gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package ca.uhn.fhir.jaxrs.server.example;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jaxrs.client.JaxRsRestfulClientFactory;
import ca.uhn.fhir.model.primitive.BoundCodeDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.util.TestUtil;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.r4.model.*;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
public class JaxRsPatientProviderR4Test {
private static IGenericClient client;
private static FhirContext ourCtx = FhirContext.forR4();
private static final String PATIENT_NAME = "Van Houte";
private static int ourPort;
private static Server jettyServer;
@AfterAll
public static void afterClassClearContext() throws Exception {
JettyUtil.closeServer(jettyServer);
TestUtil.clearAllStaticFieldsForUnitTest();
}
@BeforeAll
public static void setUpClass()
throws Exception {
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
jettyServer = new Server(0);
jettyServer.setHandler(context);
ServletHolder jerseyServlet = context.addServlet(org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher.class, "/*");
jerseyServlet.setInitOrder(0);
//@formatter:off
jerseyServlet.setInitParameter("resteasy.resources",
StringUtils.join(Arrays.asList(
JaxRsConformanceProvider.class.getCanonicalName(),
JaxRsPatientRestProvider.class.getCanonicalName(),
JaxRsPageProvider.class.getCanonicalName()
), ","));
//@formatter:on
JettyUtil.startServer(jettyServer);
ourPort = JettyUtil.getPortForStartedServer(jettyServer);
ourCtx.setRestfulClientFactory(new JaxRsRestfulClientFactory(ourCtx));
ourCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
ourCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000);
client = ourCtx.newRestfulGenericClient("http://localhost:" + ourPort + "/");
client.setEncoding(EncodingEnum.JSON);
client.registerInterceptor(new LoggingInterceptor(true));
}
/** Search/Query - Type */
@Test
public void findUsingGenericClientBySearch() {
// Perform a search
final Bundle results = client
.search()
.forResource(Patient.class)
.where(Patient.NAME.matchesExactly().value(PATIENT_NAME))
.returnBundle(Bundle.class)
.execute();
System.out.println(results.getEntry().get(0));
assertEquals(results.getEntry().size(), 1);
}
/** Search - Multi-valued Parameters (ANY/OR) */
@Test
public void findUsingGenericClientBySearchWithMultiValues() {
final Bundle response = client
.search()
.forResource(Patient.class)
.where(Patient.ADDRESS.matches().values("Toronto")).and(Patient.ADDRESS.matches().values("Ontario"))
.and(Patient.ADDRESS.matches().values("Canada"))
.where(Patient.IDENTIFIER.exactly().systemAndIdentifier("SHORTNAME", "TOYS"))
.returnBundle(Bundle.class)
.execute();
System.out.println(response.getEntry().get(0));
}
/** Search - Paging */
@Test
public void findWithPaging() {
// Perform a search
final Bundle results = client.search().forResource(Patient.class).limitTo(8).returnBundle(Bundle.class).execute();
System.out.println(results.getEntry().size());
if (results.getLink(Bundle.LINK_NEXT) != null) {
// load next page
final Bundle nextPage = client.loadPage().next(results).execute();
System.out.println(nextPage.getEntry().size());
}
}
/** */
@Test
public void testSearchPost() {
Bundle response = client.search()
.forResource("Patient")
.usingStyle(SearchStyleEnum.POST)
.returnBundle(Bundle.class)
.execute();
assertTrue(response.getEntry().size() > 0);
}
/** Search - Compartments */
@Test
public void testSearchCompartements() {
Bundle response = client.search()
.forResource(Patient.class)
.withIdAndCompartment("1", "Condition")
.returnBundle(Bundle.class)
.execute();
assertTrue(response.getEntry().size() > 0);
}
/** Search - Subsetting (_summary and _elements) */
@Test
@Disabled
public void testSummary() {
client.search()
.forResource(Patient.class)
.returnBundle(Bundle.class)
.execute();
}
@Test
public void testCreatePatient() {
final Patient existing = new Patient();
existing.setId((IdDt) null);
existing.getNameFirstRep().setFamily("Created Patient 54");
client.setEncoding(EncodingEnum.JSON);
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
System.out.println(patient);
assertNotNull(client.read().resource(Patient.class).withId(patient.getId()));
client.setEncoding(EncodingEnum.JSON);
}
/** Conditional Creates */
@Test
public void testConditionalCreate() {
final Patient existing = new Patient();
existing.setId((IdDt) null);
existing.getNameFirstRep().setFamily("Created Patient 54");
client.setEncoding(EncodingEnum.XML);
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
client.create()
.resource(patient)
.conditional()
.where(Patient.IDENTIFIER.exactly().identifier(patient.getIdentifierFirstRep().toString()))
.execute();
}
/** Find By Id */
@Test
public void findUsingGenericClientById() {
final Patient results = client.read().resource(Patient.class).withId("1").execute();
assertEquals(results.getIdElement().getIdPartAsLong().longValue(), 1L);
}
@Test
public void testUpdateById() {
final Patient existing = client.read().resource(Patient.class).withId("1").execute();
final List<HumanName> name = existing.getName();
name.get(0).addSuffix("The Second");
existing.setName(name);
client.setEncoding(EncodingEnum.XML);
final MethodOutcome results = client.update().resource(existing).withId("1").execute();
}
@Test
public void testDeletePatient() {
final Patient existing = new Patient();
existing.getNameFirstRep().setFamily("Created Patient XYZ");
final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute();
System.out.println(results.getId());
final Patient patient = (Patient) results.getResource();
client.delete().resource(patient).execute();
try {
client.read().resource(Patient.class).withId(patient.getId()).execute();
fail();
} catch (final Exception e) {
// assertEquals(e.getStatusCode(), Constants.STATUS_HTTP_404_NOT_FOUND);
}
}
/** Transaction - Server */
@Disabled
@Test
public void testTransaction() {
Bundle bundle = new Bundle();
Bundle.BundleEntryComponent entry = bundle.addEntry();
final Patient existing = new Patient();
existing.getNameFirstRep().setFamily("Created with bundle");
entry.setResource(existing);
BoundCodeDt<BundleEntryTransactionMethodEnum> theTransactionOperation = new BoundCodeDt(
BundleEntryTransactionMethodEnum.VALUESET_BINDER,
BundleEntryTransactionMethodEnum.POST);
Bundle response = client.transaction().withBundle(bundle).execute();
}
/** Conformance - Server */
@Test
@Disabled
public void testConformance() {
final CapabilityStatement caps = client.capabilities().ofType(CapabilityStatement.class).execute();
System.out.println(caps.getRest().get(0).getResource().get(0).getType());
assertEquals(caps.getRest().get(0).getResource().get(0).getType().toString(), "Patient");
}
/** Extended Operations */
// Create a client to talk to the HeathIntersections server
@Test
public void testExtendedOperations() {
client.registerInterceptor(new LoggingInterceptor(true));
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateTimeType("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateTimeType("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue"));
// Invoke $everything on "Patient/1"
Parameters outParams = client
.operation()
.onInstance(new IdDt("Patient", "1"))
.named("$firstVersion")
.withParameters(inParams)
// .useHttpGet() // Use HTTP GET instead of POST
.execute();
String resultValue = outParams.getParameter().get(0).getValue().toString();
System.out.println(resultValue);
assertEquals(resultValue.contains("myAwesomeDummyValue"), true, "expected but found : " + resultValue);
}
@Test
public void testExtendedOperationsUsingGet() {
// Create the input parameters to pass to the server
Parameters inParams = new Parameters();
inParams.addParameter().setName("start").setValue(new DateTimeType("2001-01-01"));
inParams.addParameter().setName("end").setValue(new DateTimeType("2015-03-01"));
inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue"));
// Invoke $everything on "Patient/1"
Parameters outParams = client
.operation()
.onInstance(new IdDt("Patient", "1"))
.named("$firstVersion")
.withParameters(inParams)
.useHttpGet() // Use HTTP GET instead of POST
.execute();
String resultValue = outParams.getParameter().get(0).getValue().toString();
System.out.println(resultValue);
assertEquals(resultValue.contains("myAwesomeDummyValue"), true, "expected but found : " + resultValue);
}
@Test
public void testVRead() {
final Patient patient = client.read().resource(Patient.class).withIdAndVersion("1", "1").execute();
System.out.println(patient);
}
@Test
public void testRead() {
final Patient patient = client.read().resource(Patient.class).withId("1").execute();
System.out.println(patient);
}
@Test
public void testInstanceHistory() {
final Bundle history = client.history().onInstance(new IdDt("Patient", 1L)).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
@Test
public void testTypeHistory() {
final Bundle history = client.history().onType(Patient.class).returnBundle(Bundle.class).execute();
assertEquals("myTestId", history.getIdElement().getIdPart());
}
}
| |
/**
* Copyright 2010 Wallace Wadge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.jolbox.benchmark;
import java.awt.Color;
import java.beans.PropertyVetoException;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.sql.SQLException;
import javax.naming.NamingException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartUtilities;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.XYPlot;
import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.xy.XYSeries;
import org.jfree.data.xy.XYSeriesCollection;
import com.jolbox.bonecp.MockJDBCDriver;
/**
* @author Wallace
*
*/
public class BenchmarkLaunch {
/**
* @param args
* @throws ClassNotFoundException
* @throws PropertyVetoException
* @throws SQLException
* @throws NoSuchMethodException
* @throws InvocationTargetException
* @throws IllegalAccessException
* @throws InterruptedException
* @throws SecurityException
* @throws IllegalArgumentException
* @throws NamingException
* @throws ParseException
* @throws IOException
*/
public static void main(String[] args) throws ClassNotFoundException, SQLException, PropertyVetoException, IllegalArgumentException, SecurityException, InterruptedException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, NamingException, ParseException, IOException {
Options options = new Options();
options.addOption("t", "threads",true, "Max number of threads");
options.addOption("s", "stepping",true, "Stepping of threads");
options.addOption("p", "poolsize",true, "Pool size");
options.addOption("h", "help",false, "Help");
CommandLineParser parser = new PosixParser();
CommandLine cmd = parser.parse( options, args);
if (cmd.hasOption("h")){
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "benchmark.jar", options );
System.exit(1);
}
BenchmarkTests.threads=400;
BenchmarkTests.stepping=5;
BenchmarkTests.pool_size=200;
if (cmd.hasOption("t")){
BenchmarkTests.threads=Integer.parseInt(cmd.getOptionValue("t", "400"));
}
if (cmd.hasOption("s")){
BenchmarkTests.stepping=Integer.parseInt(cmd.getOptionValue("s", "20"));
}
if (cmd.hasOption("p")){
BenchmarkTests.pool_size=Integer.parseInt(cmd.getOptionValue("p", "200"));
}
System.out.println("Starting benchmark tests with "
+ BenchmarkTests.threads + " threads (stepping "
+ BenchmarkTests.stepping+ ") using pool size of "+BenchmarkTests.pool_size+" connections");
new MockJDBCDriver();
BenchmarkTests tests = new BenchmarkTests();
plotLineGraph(tests.testMultiThreadedConstantDelay(0), 0, false);
plotLineGraph(tests.testMultiThreadedConstantDelay(10), 10, false);
plotLineGraph(tests.testMultiThreadedConstantDelay(25), 25, false);
plotLineGraph(tests.testMultiThreadedConstantDelay(50), 50, false);
plotLineGraph(tests.testMultiThreadedConstantDelay(75), 75, false);
plotBarGraph("Single Thread", "bonecp-singlethread-poolsize-"+BenchmarkTests.pool_size+"-threads-"+BenchmarkTests.threads+".png", tests.testSingleThread());
plotBarGraph("Prepared Statement\nSingle Threaded", "bonecp-preparedstatement-single-poolsize-"+BenchmarkTests.pool_size+"-threads-"+BenchmarkTests.threads+".png", tests.testPreparedStatementSingleThread());
plotLineGraph(tests.testMultiThreadedConstantDelayWithPreparedStatements(0), 0, true);
plotLineGraph(tests.testMultiThreadedConstantDelayWithPreparedStatements(10), 10, true);
plotLineGraph(tests.testMultiThreadedConstantDelayWithPreparedStatements(25), 25, true);
plotLineGraph(tests.testMultiThreadedConstantDelayWithPreparedStatements(50), 50, true);
plotLineGraph(tests.testMultiThreadedConstantDelayWithPreparedStatements(75), 75, true);
}
/**
* @param results
* @param delay
* @param statementBenchmark
* @throws IOException
*/
private static void plotLineGraph(long[][] results, int delay, boolean statementBenchmark) throws IOException {
// doPlotLineGraph(results, delay, statementBenchmark, true);
doPlotLineGraph(results, delay, statementBenchmark, false);
}
/**
* @param results
* @param delay
* @param statementBenchmark
* @param noC3P0
* @throws IOException
*/
private static void doPlotLineGraph(long[][] results, int delay, boolean statementBenchmark, boolean noC3P0) throws IOException {
String title = "Multi-Thread test ("+delay+"ms delay)";
if (statementBenchmark){
title += "\n(with PreparedStatements tests)";
}
String fname = System.getProperty("java.io.tmpdir")+File.separator+"bonecp-multithread-"+delay+"ms-delay";
if (statementBenchmark){
fname += "-with-preparedstatements";
}
fname += "-poolsize-"+BenchmarkTests.pool_size+"-threads-"+BenchmarkTests.threads;
if (noC3P0){
fname+="-noC3P0";
}
PrintWriter out = new PrintWriter(new FileWriter(fname+".txt"));
fname += ".png";
XYSeriesCollection dataset = new XYSeriesCollection();
for (int i=0; i < ConnectionPoolType.values().length; i++){ //
if (!ConnectionPoolType.values()[i].isEnabled() || (noC3P0 && ConnectionPoolType.values()[i].equals(ConnectionPoolType.C3P0))){
continue;
}
XYSeries series = new XYSeries(ConnectionPoolType.values()[i].toString());
out.println(ConnectionPoolType.values()[i].toString());
for (int j=1+BenchmarkTests.stepping; j < results[i].length; j+=BenchmarkTests.stepping){
series.add(j, results[i][j]);
out.println(j+","+results[i][j]);
}
dataset.addSeries(series);
}
out.close();
// Generate the graph
JFreeChart chart = ChartFactory.createXYLineChart(title, // Title
"threads", // x-axis Label
"time (ns)", // y-axis Label
dataset, // Dataset
PlotOrientation.VERTICAL, // Plot Orientation
true, // Show Legend
true, // Use tooltips
false // Configure chart to generate URLs?
);
XYPlot plot = (XYPlot) chart.getPlot();
XYLineAndShapeRenderer renderer = new XYLineAndShapeRenderer(true, false);
plot.setRenderer(renderer);
renderer.setSeriesPaint(0, Color.BLUE);
renderer.setSeriesPaint(1, Color.YELLOW);
renderer.setSeriesPaint(2, Color.BLACK);
renderer.setSeriesPaint(3, Color.DARK_GRAY);
renderer.setSeriesPaint(4, Color.MAGENTA);
renderer.setSeriesPaint(5, Color.RED);
renderer.setSeriesPaint(6, Color.LIGHT_GRAY);
// renderer.setSeriesShapesVisible(1, true);
// renderer.setSeriesShapesVisible(2, true);
try {
ChartUtilities.saveChartAsPNG(new File(fname), chart, 1024, 768);
System.out.println("******* Saved chart to: " + fname);
} catch (IOException e) {
System.err.println("Problem occurred creating chart.");
}
}
/**
* @param title
* @param filename
* @param results
* @throws IOException
*/
private static void plotBarGraph(String title, String filename, long[] results) throws IOException {
String fname = System.getProperty("java.io.tmpdir")+File.separator+filename;
PrintWriter out = new PrintWriter(new FileWriter(fname+".txt"));
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
for (ConnectionPoolType poolType: ConnectionPoolType.values()){
dataset.setValue(results[poolType.ordinal()], "ms", poolType);
out.println(results[poolType.ordinal()]+ ","+ poolType);
}
out.close();
JFreeChart chart = ChartFactory.createBarChart(title,
"Connection Pool", "Time (ms)", dataset, PlotOrientation.VERTICAL, false,
true, false);
try {
ChartUtilities.saveChartAsPNG(new File(fname), chart, 1024,
768);
System.out.println("******* Saved chart to: " + fname);
} catch (IOException e) {
e.printStackTrace();
System.err.println("Problem occurred creating chart.");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.azure.queue;
import java.util.EnumSet;
import com.microsoft.azure.storage.queue.CloudQueue;
import com.microsoft.azure.storage.queue.CloudQueueMessage;
import com.microsoft.azure.storage.queue.MessageUpdateFields;
import com.microsoft.azure.storage.queue.QueueListingDetails;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.component.azure.blob.BlobServiceConstants;
import org.apache.camel.component.azure.common.ExchangeUtil;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Producer which sends messages to the Azure Storage Queue Service
*/
public class QueueServiceProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(QueueServiceProducer.class);
public QueueServiceProducer(final Endpoint endpoint) {
super(endpoint);
}
@Override
public void process(final Exchange exchange) throws Exception {
QueueServiceOperations operation = determineOperation(exchange);
if (ObjectHelper.isEmpty(operation)) {
operation = QueueServiceOperations.listQueues;
} else {
switch (operation) {
case retrieveMessage:
retrieveMessage(exchange);
break;
case peekMessage:
peekMessage(exchange);
break;
case createQueue:
createQueue(exchange);
break;
case deleteQueue:
deleteQueue(exchange);
break;
case addMessage:
addMessage(exchange);
break;
case updateMessage:
updateMessage(exchange);
break;
case deleteMessage:
deleteMessage(exchange);
break;
case listQueues:
listQueues(exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
}
private void listQueues(Exchange exchange) throws Exception {
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
QueueListingDetails details
= (QueueListingDetails) exchange.getIn().getHeader(QueueServiceConstants.QUEUE_LISTING_DETAILS);
if (details == null) {
details = QueueListingDetails.ALL;
}
Iterable<CloudQueue> list = client.getServiceClient().listQueues(
getConfiguration().getQueuePrefix(), details,
opts.getRequestOpts(), opts.getOpContext());
ExchangeUtil.getMessageForResponse(exchange).setBody(list);
}
private void createQueue(Exchange exchange) throws Exception {
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
doCreateQueue(client, opts, exchange);
}
private void doCreateQueue(CloudQueue client, QueueServiceRequestOptions opts, Exchange exchange) throws Exception {
LOG.trace("Creating the queue [{}] from exchange [{}]...",
getConfiguration().getQueueName(), exchange);
client.createIfNotExists(opts.getRequestOpts(), opts.getOpContext());
ExchangeUtil.getMessageForResponse(exchange)
.setHeader(QueueServiceConstants.QUEUE_CREATED, Boolean.TRUE);
}
private void deleteQueue(Exchange exchange) throws Exception {
LOG.trace("Deleting the queue [{}] from exchange [{}]...",
getConfiguration().getQueueName(), exchange);
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
client.delete(opts.getRequestOpts(), opts.getOpContext());
}
private void addMessage(Exchange exchange) throws Exception {
LOG.trace("Putting the message into the queue [{}] from exchange [{}]...",
getConfiguration().getQueueName(), exchange);
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
Boolean queueCreated = exchange.getIn().getHeader(QueueServiceConstants.QUEUE_CREATED,
Boolean.class);
if (Boolean.TRUE != queueCreated) {
doCreateQueue(client, opts, exchange);
}
CloudQueueMessage message = getCloudQueueMessage(exchange);
client.addMessage(message,
getConfiguration().getMessageTimeToLive(),
getConfiguration().getMessageVisibilityDelay(),
opts.getRequestOpts(), opts.getOpContext());
}
private void updateMessage(Exchange exchange) throws Exception {
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
CloudQueueMessage message = getCloudQueueMessage(exchange);
LOG.trace("Updating the message in the queue [{}] from exchange [{}]...",
getConfiguration().getQueueName(), exchange);
EnumSet<MessageUpdateFields> fields = null;
Object fieldsObject = exchange.getIn().getHeader(QueueServiceConstants.MESSAGE_UPDATE_FIELDS);
if (fieldsObject instanceof EnumSet) {
@SuppressWarnings("unchecked")
EnumSet<MessageUpdateFields> theFields = (EnumSet<MessageUpdateFields>) fieldsObject;
fields = theFields;
} else if (fieldsObject instanceof MessageUpdateFields) {
fields = EnumSet.of((MessageUpdateFields) fieldsObject);
}
client.updateMessage(message,
getConfiguration().getMessageVisibilityDelay(),
fields,
opts.getRequestOpts(), opts.getOpContext());
}
private void deleteMessage(Exchange exchange) throws Exception {
LOG.trace("Deleting the message from the queue [{}] from exchange [{}]...",
getConfiguration().getQueueName(), exchange);
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
CloudQueueMessage message = getCloudQueueMessage(exchange);
client.deleteMessage(message, opts.getRequestOpts(), opts.getOpContext());
}
private void retrieveMessage(Exchange exchange) throws Exception {
QueueServiceUtil.retrieveMessage(exchange, getConfiguration());
}
private void peekMessage(Exchange exchange) throws Exception {
CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
CloudQueueMessage message = client.peekMessage(opts.getRequestOpts(), opts.getOpContext());
ExchangeUtil.getMessageForResponse(exchange).setBody(message);
}
private CloudQueueMessage getCloudQueueMessage(Exchange exchange) throws Exception {
Object body = exchange.getIn().getMandatoryBody();
CloudQueueMessage message = null;
if (body instanceof CloudQueueMessage) {
message = (CloudQueueMessage) body;
} else if (body instanceof String) {
message = new CloudQueueMessage((String) body);
}
if (message == null) {
throw new IllegalArgumentException("Unsupported queue message type:" + body.getClass().getName());
}
return message;
}
private QueueServiceOperations determineOperation(Exchange exchange) {
QueueServiceOperations operation = exchange.getIn().getHeader(BlobServiceConstants.OPERATION,
QueueServiceOperations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
protected QueueServiceConfiguration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public String toString() {
return "StorageQueueProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
@Override
public QueueServiceEndpoint getEndpoint() {
return (QueueServiceEndpoint) super.getEndpoint();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.hive.ql.exec.JoinUtil;
import org.apache.hadoop.hive.ql.exec.mapjoin.MapJoinMemoryExhaustionError;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastHashTable.VerifyFastLongHashMap;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastLongHashMap;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestVectorMapJoinFastLongHashMap extends CommonFastHashTable {
@Test
public void testOneKey() throws Exception {
random = new Random(33221);
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, CAPACITY, LOAD_FACTOR, WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
long key = random.nextLong();
byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
verifyTable.verify(map);
// Second value.
value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
verifyTable.verify(map);
// Third value.
value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
verifyTable.verify(map);
}
@Test
public void testMultipleKeysSingleValue() throws Exception {
random = new Random(900);
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, CAPACITY, LOAD_FACTOR, WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
int keyCount = 100 + random.nextInt(1000);
for (int i = 0; i < keyCount; i++) {
long key;
while (true) {
key = random.nextLong();
if (!verifyTable.contains(key)) {
// Unique keys for this test.
break;
}
}
byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
// verifyTable.verify(map);
}
verifyTable.verify(map);
}
@Test
public void testGetNonExistent() throws Exception {
random = new Random(450);
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, CAPACITY, LOAD_FACTOR, WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
long key1 = random.nextLong();
byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key1, value);
verifyTable.add(key1, value);
verifyTable.verify(map);
long key2 = key1 += 1;
VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult();
JoinUtil.JoinResult joinResult = map.lookup(key2, hashMapResult);
assertTrue(joinResult == JoinUtil.JoinResult.NOMATCH);
assertTrue(!hashMapResult.hasRows());
map.testPutRow(key2, value);
verifyTable.add(key2, value);
verifyTable.verify(map);
long key3 = key2 += 1;
hashMapResult = map.createHashMapResult();
joinResult = map.lookup(key3, hashMapResult);
assertTrue(joinResult == JoinUtil.JoinResult.NOMATCH);
assertTrue(!hashMapResult.hasRows());
}
@Test
public void testFullMap() throws Exception {
random = new Random(93440);
// Make sure the map does not expand; should be able to find space.
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, CAPACITY, 1f, WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
for (int i = 0; i < CAPACITY; i++) {
long key;
while (true) {
key = random.nextLong();
if (!verifyTable.contains(key)) {
// Unique keys for this test.
break;
}
}
byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
// verifyTable.verify(map);
}
verifyTable.verify(map);
long anotherKey;
while (true) {
anotherKey = random.nextLong();
if (!verifyTable.contains(anotherKey)) {
// Unique keys for this test.
break;
}
}
VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult();
JoinUtil.JoinResult joinResult = map.lookup(anotherKey, hashMapResult);
assertTrue(joinResult == JoinUtil.JoinResult.NOMATCH);
}
@Test
public void testExpand() throws Exception {
random = new Random(5227);
// Start with capacity 1; make sure we expand on every put.
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, 1, 0.0000001f, WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
for (int i = 0; i < 18; ++i) {
long key;
while (true) {
key = random.nextLong();
if (!verifyTable.contains(key)) {
// Unique keys for this test.
break;
}
}
byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)];
random.nextBytes(value);
map.testPutRow(key, value);
verifyTable.add(key, value);
// verifyTable.verify(map);
}
verifyTable.verify(map);
// assertEquals(1 << 18, map.getCapacity());
}
public void addAndVerifyMultipleKeyMultipleValue(int keyCount,
VectorMapJoinFastLongHashMap map, VerifyFastLongHashMap verifyTable)
throws HiveException, IOException {
addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable, -1);
}
public void addAndVerifyMultipleKeyMultipleValue(int keyCount,
VectorMapJoinFastLongHashMap map, VerifyFastLongHashMap verifyTable, int fixedValueLength)
throws HiveException, IOException {
for (int i = 0; i < keyCount; i++) {
byte[] value;
if (fixedValueLength == -1) {
value = new byte[generateLargeCount() - 1];
} else {
value = new byte[fixedValueLength];
}
random.nextBytes(value);
// Add a new key or add a value to an existing key?
if (random.nextBoolean() || verifyTable.getCount() == 0) {
long key;
while (true) {
key = random.nextLong();
if (!verifyTable.contains(key)) {
// Unique keys for this test.
break;
}
}
map.testPutRow(key, value);
verifyTable.add(key, value);
verifyTable.verify(map);
} else {
long randomExistingKey = verifyTable.addRandomExisting(value, random);
map.testPutRow(randomExistingKey, value);
// verifyTable.verify(map);
}
verifyTable.verify(map);
}
}
@Test
public void testMultipleKeysMultipleValue() throws Exception {
random = new Random(8);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
int keyCount = 1000;
addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable);
}
@Test
public void testLargeAndExpand() throws Exception {
random = new Random(20);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, MODERATE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, -1);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
int keyCount = 1000;
addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable);
}
@Ignore
@Test
public void testOutOfBounds() throws Exception {
random = new Random(42662);
int HIGHEST_INT_POWER_OF_2 = 1073741824;
boolean error = false;
try {
// The c'tor should throw the error
VectorMapJoinFastMultiKeyHashMap map =
new VectorMapJoinFastMultiKeyHashMap(
false, HIGHEST_INT_POWER_OF_2, LOAD_FACTOR, MODERATE_WB_SIZE, -1);
} catch (MapJoinMemoryExhaustionError e) {
error = true;
}
assert error;
}
/*
// Doesn't finish in a reasonable amount of time....
@Test
public void testKeyCountLimit() throws Exception {
random = new Random(28400);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMap map =
new VectorMapJoinFastLongHashMap(
false, false, HashTableKeyType.LONG, MODERATE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, 10000000);
VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap();
int keyCount = Integer.MAX_VALUE;
try {
addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable, 1);
} catch (RuntimeException re) {
System.out.println(re.toString());
assertTrue(re.toString().startsWith("Vector MapJoin Long Hash Table cannot grow any more"));
}
}
*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.util;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableSortedSet;
import org.junit.jupiter.api.Assertions;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import java.util.Objects;
import java.util.SortedSet;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Static utilities for JUnit tests.
*/
public abstract class TestUtil {
//~ Static fields/initializers ---------------------------------------------
private static final Pattern LINE_BREAK_PATTERN =
Pattern.compile("\r\n|\r|\n");
private static final Pattern TAB_PATTERN = Pattern.compile("\t");
private static final String LINE_BREAK =
"\\\\n\"" + Util.LINE_SEPARATOR + " + \"";
private static final String JAVA_VERSION =
System.getProperties().getProperty("java.version");
private static final Supplier<Integer> GUAVA_MAJOR_VERSION =
Suppliers.memoize(TestUtil::computeGuavaMajorVersion)::get;
/** Matches a number with at least four zeros after the point. */
private static final Pattern TRAILING_ZERO_PATTERN =
Pattern.compile("-?[0-9]+\\.([0-9]*[1-9])?(00000*[0-9][0-9]?)");
/** Matches a number with at least four nines after the point. */
private static final Pattern TRAILING_NINE_PATTERN =
Pattern.compile("-?[0-9]+\\.([0-9]*[0-8])?(99999*[0-9][0-9]?)");
/** This is to be used by {@link #rethrow(Throwable, String)} to add extra information via
* {@link Throwable#addSuppressed(Throwable)}. */
private static class ExtraInformation extends Throwable {
ExtraInformation(String message) {
super(message);
}
}
//~ Methods ----------------------------------------------------------------
public static void assertEqualsVerbose(
String expected,
String actual) {
Assertions.assertEquals(expected, actual,
() -> "Expected:\n"
+ expected
+ "\nActual:\n"
+ actual
+ "\nActual java:\n"
+ toJavaString(actual) + '\n');
}
/**
* Converts a string (which may contain quotes and newlines) into a java
* literal.
*
* <p>For example,
* <pre><code>string with "quotes" split
* across lines</code></pre>
*
* <p>becomes
*
* <blockquote><pre><code>"string with \"quotes\" split" + NL +
* "across lines"</code></pre></blockquote>
*/
public static String quoteForJava(String s) {
s = Util.replace(s, "\\", "\\\\");
s = Util.replace(s, "\"", "\\\"");
s = LINE_BREAK_PATTERN.matcher(s).replaceAll(LINE_BREAK);
s = TAB_PATTERN.matcher(s).replaceAll("\\\\t");
s = "\"" + s + "\"";
final String spurious = " + \n\"\"";
if (s.endsWith(spurious)) {
s = s.substring(0, s.length() - spurious.length());
}
return s;
}
/**
* Converts a string (which may contain quotes and newlines) into a java
* literal.
*
* <p>For example,</p>
*
* <blockquote><pre><code>string with "quotes" split
* across lines</code></pre></blockquote>
*
* <p>becomes</p>
*
* <blockquote><pre><code>TestUtil.fold(
* "string with \"quotes\" split\n",
* + "across lines")</code></pre></blockquote>
*/
public static String toJavaString(String s) {
// Convert [string with "quotes" split
// across lines]
// into [fold(
// "string with \"quotes\" split\n"
// + "across lines")]
//
s = Util.replace(s, "\"", "\\\"");
s = LINE_BREAK_PATTERN.matcher(s).replaceAll(LINE_BREAK);
s = TAB_PATTERN.matcher(s).replaceAll("\\\\t");
s = "\"" + s + "\"";
String spurious = "\n \\+ \"\"";
if (s.endsWith(spurious)) {
s = s.substring(0, s.length() - spurious.length());
}
return s;
}
/**
* Combines an array of strings, each representing a line, into a single
* string containing line separators.
*/
public static String fold(String... strings) {
StringBuilder buf = new StringBuilder();
for (String string : strings) {
buf.append(string);
buf.append('\n');
}
return buf.toString();
}
/** Quotes a string for Java or JSON. */
public static String escapeString(String s) {
return escapeString(new StringBuilder(), s).toString();
}
/** Quotes a string for Java or JSON, into a builder. */
public static StringBuilder escapeString(StringBuilder buf, String s) {
buf.append('"');
int n = s.length();
char lastChar = 0;
for (int i = 0; i < n; ++i) {
char c = s.charAt(i);
switch (c) {
case '\\':
buf.append("\\\\");
break;
case '"':
buf.append("\\\"");
break;
case '\n':
buf.append("\\n");
break;
case '\r':
if (lastChar != '\n') {
buf.append("\\r");
}
break;
default:
buf.append(c);
break;
}
lastChar = c;
}
return buf.append('"');
}
/**
* Quotes a pattern.
*/
public static String quotePattern(String s) {
return s.replace("\\", "\\\\")
.replace(".", "\\.")
.replace("+", "\\+")
.replace("{", "\\{")
.replace("}", "\\}")
.replace("|", "\\||")
.replace("$", "\\$")
.replace("?", "\\?")
.replace("*", "\\*")
.replace("(", "\\(")
.replace(")", "\\)")
.replace("[", "\\[")
.replace("]", "\\]");
}
/** Removes floating-point rounding errors from the end of a string.
*
* <p>{@code 12.300000006} becomes {@code 12.3};
* {@code -12.37999999991} becomes {@code -12.38}. */
public static String correctRoundedFloat(String s) {
if (s == null) {
return s;
}
final Matcher m = TRAILING_ZERO_PATTERN.matcher(s);
if (m.matches()) {
s = s.substring(0, s.length() - m.group(2).length());
}
final Matcher m2 = TRAILING_NINE_PATTERN.matcher(s);
if (m2.matches()) {
s = s.substring(0, s.length() - m2.group(2).length());
if (s.length() > 0) {
final char c = s.charAt(s.length() - 1);
switch (c) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
// '12.3499999996' became '12.34', now we make it '12.35'
s = s.substring(0, s.length() - 1) + (char) (c + 1);
break;
case '.':
// '12.9999991' became '12.', which we leave as is.
break;
}
}
}
return s;
}
/**
* Returns the Java major version: 7 for JDK 1.7, 8 for JDK 8, 10 for
* JDK 10, etc. depending on current system property {@code java.version}.
*/
public static int getJavaMajorVersion() {
return majorVersionFromString(JAVA_VERSION);
}
/**
* Detects java major version given long format of full JDK version.
* See <a href="http://openjdk.java.net/jeps/223">JEP 223: New Version-String Scheme</a>.
*
* @param version current version as string usually from {@code java.version} property.
* @return major java version ({@code 8, 9, 10, 11} etc.)
*/
@VisibleForTesting
static int majorVersionFromString(String version) {
Objects.requireNonNull(version, "version");
if (version.startsWith("1.")) {
// running on version <= 8 (expecting string of type: x.y.z*)
final String[] versions = version.split("\\.");
return Integer.parseInt(versions[1]);
}
// probably running on > 8 (just get first integer which is major version)
Matcher matcher = Pattern.compile("^\\d+").matcher(version);
if (!matcher.lookingAt()) {
throw new IllegalArgumentException("Can't parse (detect) JDK version from " + version);
}
return Integer.parseInt(matcher.group());
}
/** Returns the Guava major version. */
public static int getGuavaMajorVersion() {
return GUAVA_MAJOR_VERSION.get();
}
/** Computes the Guava major version. */
private static int computeGuavaMajorVersion() {
// A list of classes and the Guava version that they were introduced.
// The list should not contain any classes that are removed in future
// versions of Guava.
return new VersionChecker()
.tryClass(2, "com.google.common.collect.ImmutableList")
.tryClass(14, "com.google.common.reflect.Parameter")
.tryClass(17, "com.google.common.base.VerifyException")
.tryClass(21, "com.google.common.io.RecursiveDeleteOption")
.tryClass(23, "com.google.common.util.concurrent.FluentFuture")
.tryClass(26, "com.google.common.util.concurrent.ExecutionSequencer")
.bestVersion;
}
/** Returns the JVM vendor. */
public static String getJavaVirtualMachineVendor() {
return System.getProperty("java.vm.vendor");
}
/** Given a list, returns the number of elements that are not between an
* element that is less and an element that is greater. */
public static <E extends Comparable<E>> SortedSet<E> outOfOrderItems(List<E> list) {
E previous = null;
final ImmutableSortedSet.Builder<E> b = ImmutableSortedSet.naturalOrder();
for (E e : list) {
if (previous != null && previous.compareTo(e) > 0) {
b.add(e);
}
previous = e;
}
return b.build();
}
/** Checks if exceptions have give substring. That is handy to prevent logging SQL text twice */
public static boolean hasMessage(Throwable t, String substring) {
while (t != null) {
String message = t.getMessage();
if (message != null && message.contains(substring)) {
return true;
}
t = t.getCause();
}
return false;
}
/** Rethrows given exception keeping stacktraces clean and compact. */
public static <E extends Throwable> RuntimeException rethrow(Throwable e) throws E {
if (e instanceof InvocationTargetException) {
e = e.getCause();
}
throw (E) e;
}
/** Rethrows given exception keeping stacktraces clean and compact. */
public static <E extends Throwable> RuntimeException rethrow(Throwable e,
String message) throws E {
e.addSuppressed(new ExtraInformation(message));
throw (E) e;
}
/** Returns string representation of the given {@link Throwable}. */
public static String printStackTrace(Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
t.printStackTrace(pw);
pw.flush();
return sw.toString();
}
/** Checks whether a given class exists, and updates a version if it does. */
private static class VersionChecker {
int bestVersion = -1;
VersionChecker tryClass(int version, String className) {
try {
Class.forName(className);
bestVersion = Math.max(version, bestVersion);
} catch (ClassNotFoundException e) {
// ignore
}
return this;
}
}
}
| |
/*
* Copyright (c) 2014-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, without warranties or
* conditions of any kind, EITHER EXPRESS OR IMPLIED. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.vmware.xenon.services.common;
import static org.junit.Assert.assertTrue;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.concurrent.TimeoutException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.vmware.xenon.common.BasicTestCase;
import com.vmware.xenon.common.Operation;
import com.vmware.xenon.common.Service.Action;
import com.vmware.xenon.common.SynchronizationTaskService;
import com.vmware.xenon.common.UriUtils;
import com.vmware.xenon.common.Utils;
public class TestOperationIndexService extends BasicTestCase {
/**
* Command line argument specifying request count
*/
public int updateCount = 100;
@Before
public void setUp() throws Exception {
try {
// Start the tracing service
this.host.toggleOperationTracing(this.host.getUri(), true);
} catch (Throwable e) {
throw new Exception(e);
}
}
@After
public void tearDown() throws Throwable {
this.host.toggleOperationTracing(this.host.getUri(), false);
}
@Test
public void testRestart() throws Throwable {
this.host.toggleOperationTracing(this.host.getUri(), false);
this.host.toggleOperationTracing(this.host.getUri(), true);
}
@Test
public void testPost() throws Throwable {
this.host.testStart(this.updateCount);
HashMap<String, Object> stateCountMap = new HashMap<>();
for (int i = 0; i < this.updateCount; i++) {
ExampleService.ExampleServiceState state = new ExampleService.ExampleServiceState();
state.counter = (long) i;
state.name = String.format("0x%08x", i);
stateCountMap.put(state.name, 0);
Operation op = Operation
.createPost(UriUtils.buildFactoryUri(this.host, ExampleService.class))
.setBody(state)
.setCompletion(this.host.getCompletion())
.setReferer(this.host.getReferer());
this.host.sendRequest(op);
}
this.host.testWait();
this.host.logThroughput();
// Now query for the documents
QueryTask q = new QueryTask();
q.querySpec = new QueryTask.QuerySpecification();
q.querySpec.options = EnumSet.of(QueryTask.QuerySpecification.QueryOption.EXPAND_CONTENT);
q.taskInfo.isDirect = true;
QueryTask.Query pathClause = new QueryTask.Query()
.setTermPropertyName("path")
.setTermMatchValue(ExampleService.FACTORY_LINK);
QueryTask.Query actionTypeClause = new QueryTask.Query()
.setTermPropertyName("action")
.setTermMatchValue(Action.POST.toString());
q.querySpec.query.addBooleanClause(pathClause).addBooleanClause(actionTypeClause);
q.indexLink = ServiceUriPaths.CORE_OPERATION_INDEX;
// We need to poll even when testWait tells us the POST is done.
final boolean[] foundAllExpectedResults = { false };
Operation queryOp = Operation
.createPost(UriUtils.buildUri(this.host, ServiceUriPaths.CORE_QUERY_TASKS))
.setCompletion((o, e) -> {
if (e != null) {
this.host.failIteration(e);
return;
}
QueryTask query = o.getBody(QueryTask.class);
if (query.results == null ||
query.results.documentLinks == null ||
query.results.documentLinks.size() != this.updateCount * 2) {
// didn't return all results. Try again.
this.host.completeIteration();
return;
}
foundAllExpectedResults[0] = true;
for (Object d : query.results.documents.values()) {
Operation.SerializedOperation sop = Utils.fromJson(d,
Operation.SerializedOperation.class);
if (!sop.documentKind.equals(Operation.SerializedOperation.KIND)) {
this.host.failIteration(new IllegalStateException("kind not equal"));
return;
}
ExampleService.ExampleServiceState state = Utils.fromJson(
sop.jsonBody, ExampleService.ExampleServiceState.class);
int curCount = (int) stateCountMap.get(state.name);
if (curCount != (int) stateCountMap.replace(state.name,
curCount + 1)) {
this.host
.failIteration(new IllegalStateException("curCount not equal"));
return;
}
}
this.host.completeIteration();
});
while (new Date().before(this.host.getTestExpiration())) {
this.host.testStart(1);
this.host.send(queryOp.setBody(Utils.clone(q)));
this.host.testWait();
if (foundAllExpectedResults[0]) {
break;
}
Thread.sleep(250);
}
if (new Date().after(this.host.getTestExpiration())) {
new TimeoutException();
}
// Each operation sent by the test should be indexed twice (once in sendRequest, and
// once in handleRequest). Verify that for each state we POSTED, the query returned 2 entries.
for (Object v : stateCountMap.values()) {
assertTrue((int) v == 2);
}
int c = 100;
// Generate some traffic to black-listed URIs to check the operations don't get indexed.
this.host.testStart(c);
for (int i = 0; i < c; i++) {
this.host.sendRequest(Operation
.createGet(UriUtils.buildUri(this.host, ServiceUriPaths.DEFAULT_NODE_GROUP))
.setReferer(this.host.getUri())
.setCompletion(this.host.getCompletion()));
}
this.host.testWait();
// Verify the blacklist by querying for everything in the op index.
pathClause.setTermMatchType(QueryTask.QueryTerm.MatchType.WILDCARD);
pathClause.setTermMatchValue("*");
queryOp.setBody(q)
.setCompletion((o, e) -> {
try {
if (e != null) {
throw e;
}
QueryTask query = o.getBody(QueryTask.class);
if (query.results == null) {
throw new IllegalStateException("no results");
}
// Exclude all serialized operations specific to
// the synchronization-task.
int actualDocLinkSize = 0;
for (Object obj : query.results.documents.values()) {
String so = Utils.toJson(obj);
if (!so.contains(SynchronizationTaskService.FACTORY_LINK)) {
actualDocLinkSize++;
}
}
// we have at least updateCount * 2 worth of documents
if (actualDocLinkSize < this.updateCount * 2) {
throw new IllegalStateException("expected more operations");
}
// Check that there are no greater than the above + some fudge factor. We don't want
// too many documents in the index (thereby verifying the blacklist is working as intended).
// Use 10% of updateCount as the fudge factor. Anything greater than that just sounds unreasonable.
int maxDocLinkSize = this.updateCount * 2 + (this.updateCount / 10);
if (actualDocLinkSize > maxDocLinkSize) {
for (Object l : query.results.documents.values()) {
this.host.log("%s", l);
}
String msg = String
.format("too many operations found. expected less than %d, but was %d. \n%s",
maxDocLinkSize, actualDocLinkSize,
Utils.toJsonHtml(query.results));
throw new IllegalStateException(msg);
}
this.host.completeIteration();
} catch (Throwable e1) {
this.host.failIteration(e1);
}
});
this.host.testStart(1);
this.host.send(queryOp);
this.host.testWait();
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.overlord.autoscaling.ec2;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
import com.amazonaws.services.ec2.model.DescribeInstancesResult;
import com.amazonaws.services.ec2.model.Filter;
import com.amazonaws.services.ec2.model.Instance;
import com.amazonaws.services.ec2.model.InstanceNetworkInterfaceSpecification;
import com.amazonaws.services.ec2.model.Placement;
import com.amazonaws.services.ec2.model.Reservation;
import com.amazonaws.services.ec2.model.RunInstancesRequest;
import com.amazonaws.services.ec2.model.RunInstancesResult;
import com.amazonaws.services.ec2.model.TerminateInstancesRequest;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Lists;
import com.metamx.emitter.EmittingLogger;
import io.druid.indexing.overlord.autoscaling.AutoScaler;
import io.druid.indexing.overlord.autoscaling.AutoScalingData;
import io.druid.indexing.overlord.autoscaling.SimpleWorkerResourceManagementConfig;
import java.util.List;
/**
*/
public class EC2AutoScaler implements AutoScaler<EC2EnvironmentConfig>
{
private static final EmittingLogger log = new EmittingLogger(EC2AutoScaler.class);
public static final int MAX_AWS_FILTER_VALUES = 100;
private final int minNumWorkers;
private final int maxNumWorkers;
private final EC2EnvironmentConfig envConfig;
private final AmazonEC2 amazonEC2Client;
private final SimpleWorkerResourceManagementConfig config;
@JsonCreator
public EC2AutoScaler(
@JsonProperty("minNumWorkers") int minNumWorkers,
@JsonProperty("maxNumWorkers") int maxNumWorkers,
@JsonProperty("envConfig") EC2EnvironmentConfig envConfig,
@JacksonInject AmazonEC2 amazonEC2Client,
@JacksonInject SimpleWorkerResourceManagementConfig config
)
{
this.minNumWorkers = minNumWorkers;
this.maxNumWorkers = maxNumWorkers;
this.envConfig = envConfig;
this.amazonEC2Client = amazonEC2Client;
this.config = config;
}
@Override
@JsonProperty
public int getMinNumWorkers()
{
return minNumWorkers;
}
@Override
@JsonProperty
public int getMaxNumWorkers()
{
return maxNumWorkers;
}
@Override
@JsonProperty
public EC2EnvironmentConfig getEnvConfig()
{
return envConfig;
}
@Override
public AutoScalingData provision()
{
try {
final EC2NodeData workerConfig = envConfig.getNodeData();
final String userDataBase64;
if (envConfig.getUserData() == null) {
userDataBase64 = null;
} else {
if (config.getWorkerVersion() == null) {
userDataBase64 = envConfig.getUserData().getUserDataBase64();
} else {
userDataBase64 = envConfig.getUserData()
.withVersion(config.getWorkerVersion())
.getUserDataBase64();
}
}
RunInstancesRequest request = new RunInstancesRequest(
workerConfig.getAmiId(),
workerConfig.getMinInstances(),
workerConfig.getMaxInstances()
)
.withInstanceType(workerConfig.getInstanceType())
.withPlacement(new Placement(envConfig.getAvailabilityZone()))
.withKeyName(workerConfig.getKeyName())
.withIamInstanceProfile(
workerConfig.getIamProfile() == null
? null
: workerConfig.getIamProfile().toIamInstanceProfileSpecification()
)
.withUserData(userDataBase64);
// InstanceNetworkInterfaceSpecification.getAssociatePublicIpAddress may be
// true or false by default in EC2, depending on the subnet.
// Setting EC2NodeData.getAssociatePublicIpAddress explicitly will use that value instead,
// leaving it null uses the EC2 default.
if (workerConfig.getAssociatePublicIpAddress() != null) {
request.withNetworkInterfaces(
new InstanceNetworkInterfaceSpecification()
.withAssociatePublicIpAddress(workerConfig.getAssociatePublicIpAddress())
.withSubnetId(workerConfig.getSubnetId())
.withGroups(workerConfig.getSecurityGroupIds())
.withDeviceIndex(0)
);
} else {
request
.withSecurityGroupIds(workerConfig.getSecurityGroupIds())
.withSubnetId(workerConfig.getSubnetId());
}
final RunInstancesResult result = amazonEC2Client.runInstances(request);
final List<String> instanceIds = Lists.transform(
result.getReservation().getInstances(),
new Function<Instance, String>()
{
@Override
public String apply(Instance input)
{
return input.getInstanceId();
}
}
);
log.info("Created instances: %s", instanceIds);
return new AutoScalingData(
Lists.transform(
result.getReservation().getInstances(),
new Function<Instance, String>()
{
@Override
public String apply(Instance input)
{
return input.getInstanceId();
}
}
)
);
}
catch (Exception e) {
log.error(e, "Unable to provision any EC2 instances.");
}
return null;
}
@Override
public AutoScalingData terminate(List<String> ips)
{
if (ips.isEmpty()) {
return new AutoScalingData(Lists.<String>newArrayList());
}
DescribeInstancesResult result = amazonEC2Client.describeInstances(
new DescribeInstancesRequest()
.withFilters(
new Filter("private-ip-address", ips)
)
);
List<Instance> instances = Lists.newArrayList();
for (Reservation reservation : result.getReservations()) {
instances.addAll(reservation.getInstances());
}
try {
return terminateWithIds(
Lists.transform(
instances,
new Function<Instance, String>()
{
@Override
public String apply(Instance input)
{
return input.getInstanceId();
}
}
)
);
}
catch (Exception e) {
log.error(e, "Unable to terminate any instances.");
}
return null;
}
@Override
public AutoScalingData terminateWithIds(List<String> ids)
{
if (ids.isEmpty()) {
return new AutoScalingData(Lists.<String>newArrayList());
}
try {
log.info("Terminating instances[%s]", ids);
amazonEC2Client.terminateInstances(
new TerminateInstancesRequest(ids)
);
return new AutoScalingData(ids);
}
catch (Exception e) {
log.error(e, "Unable to terminate any instances.");
}
return null;
}
@Override
public List<String> ipToIdLookup(List<String> ips)
{
final List<String> retVal = FluentIterable
// chunk requests to avoid hitting default AWS limits on filters
.from(Lists.partition(ips, MAX_AWS_FILTER_VALUES))
.transformAndConcat(new Function<List<String>, Iterable<Reservation>>()
{
@Override
public Iterable<Reservation> apply(List<String> input)
{
return amazonEC2Client.describeInstances(
new DescribeInstancesRequest().withFilters(new Filter("private-ip-address", input))
).getReservations();
}
})
.transformAndConcat(new Function<Reservation, Iterable<Instance>>()
{
@Override
public Iterable<Instance> apply(Reservation reservation)
{
return reservation.getInstances();
}
})
.transform(new Function<Instance, String>()
{
@Override
public String apply(Instance instance)
{
return instance.getInstanceId();
}
}).toList();
log.debug("Performing lookup: %s --> %s", ips, retVal);
return retVal;
}
@Override
public List<String> idToIpLookup(List<String> nodeIds)
{
final List<String> retVal = FluentIterable
// chunk requests to avoid hitting default AWS limits on filters
.from(Lists.partition(nodeIds, MAX_AWS_FILTER_VALUES))
.transformAndConcat(new Function<List<String>, Iterable<Reservation>>()
{
@Override
public Iterable<Reservation> apply(List<String> input)
{
return amazonEC2Client.describeInstances(
new DescribeInstancesRequest().withFilters(new Filter("instance-id", input))
).getReservations();
}
})
.transformAndConcat(new Function<Reservation, Iterable<Instance>>()
{
@Override
public Iterable<Instance> apply(Reservation reservation)
{
return reservation.getInstances();
}
})
.transform(new Function<Instance, String>()
{
@Override
public String apply(Instance instance)
{
return instance.getPrivateIpAddress();
}
}).toList();
log.debug("Performing lookup: %s --> %s", nodeIds, retVal);
return retVal;
}
@Override
public String toString()
{
return "EC2AutoScaler{" +
"envConfig=" + envConfig +
", maxNumWorkers=" + maxNumWorkers +
", minNumWorkers=" + minNumWorkers +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EC2AutoScaler that = (EC2AutoScaler) o;
if (maxNumWorkers != that.maxNumWorkers) {
return false;
}
if (minNumWorkers != that.minNumWorkers) {
return false;
}
if (envConfig != null ? !envConfig.equals(that.envConfig) : that.envConfig != null) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = minNumWorkers;
result = 31 * result + maxNumWorkers;
result = 31 * result + (envConfig != null ? envConfig.hashCode() : 0);
return result;
}
}
| |
/*
* Generated by the Jasper component of Apache Tomcat
* Version: JspC/ApacheTomcat8
* Generated at: 2016-08-23 16:29:30 UTC
* Note: The last modified time of this file was set to
* the last modified time of the source file after
* generation to assist with modification tracking.
*/
package org.jivesoftware.openfire.admin;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.handler.IQAuthHandler;
import org.jivesoftware.openfire.handler.IQRegisterHandler;
import org.jivesoftware.openfire.session.LocalClientSession;
import org.jivesoftware.util.ParamUtils;
import java.util.regex.Pattern;
import java.util.*;
public final class reg_002dsettings_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static final javax.servlet.jsp.JspFactory _jspxFactory =
javax.servlet.jsp.JspFactory.getDefaultFactory();
private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.tomcat.InstanceManager _jsp_instancemanager;
public java.util.Map<java.lang.String,java.lang.Long> getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig());
}
public void _jspDestroy() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.release();
}
public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response)
throws java.io.IOException, javax.servlet.ServletException {
final javax.servlet.jsp.PageContext pageContext;
javax.servlet.http.HttpSession session = null;
final javax.servlet.ServletContext application;
final javax.servlet.ServletConfig config;
javax.servlet.jsp.JspWriter out = null;
final java.lang.Object page = this;
javax.servlet.jsp.JspWriter _jspx_out = null;
javax.servlet.jsp.PageContext _jspx_page_context = null;
try {
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
"error.jsp", true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\n\n\n\n\n\n\n\n\n");
org.jivesoftware.util.WebManager webManager = null;
webManager = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("webManager", javax.servlet.jsp.PageContext.PAGE_SCOPE);
if (webManager == null){
webManager = new org.jivesoftware.util.WebManager();
_jspx_page_context.setAttribute("webManager", webManager, javax.servlet.jsp.PageContext.PAGE_SCOPE);
}
out.write('\n');
webManager.init(request, response, session, application, out );
out.write("\n\n<html>\n<head>\n<title>");
if (_jspx_meth_fmt_005fmessage_005f0(_jspx_page_context))
return;
out.write("</title>\n<meta name=\"pageID\" content=\"server-reg-and-login\"/>\n<meta name=\"helpPage\" content=\"manage_registration_and_login_settings.html\"/>\n</head>\n<body>\n\n");
// Get parameters
boolean save = request.getParameter("save") != null;
boolean inbandEnabled = ParamUtils.getBooleanParameter(request, "inbandEnabled");
boolean canChangePassword = ParamUtils.getBooleanParameter(request, "canChangePassword");
boolean anonLogin = ParamUtils.getBooleanParameter(request, "anonLogin");
String allowedIPs = request.getParameter("allowedIPs");
String allowedAnonymIPs = request.getParameter("allowedAnonymIPs");
String blockedIPs = request.getParameter("blockedIPs");
// Get an IQRegisterHandler:
IQRegisterHandler regHandler = XMPPServer.getInstance().getIQRegisterHandler();
IQAuthHandler authHandler = XMPPServer.getInstance().getIQAuthHandler();
if (save) {
regHandler.setInbandRegEnabled(inbandEnabled);
regHandler.setCanChangePassword(canChangePassword);
authHandler.setAllowAnonymous(anonLogin);
// Build a Map with the allowed IP addresses
Pattern pattern = Pattern.compile("(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.)" +
"(?:(?:\\*|25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){2}" +
"(?:\\*|25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)");
Set<String> allowedSet = new HashSet<String>();
StringTokenizer tokens = new StringTokenizer(allowedIPs, ", ");
while (tokens.hasMoreTokens()) {
String address = tokens.nextToken().trim();
if (pattern.matcher(address).matches()) {
allowedSet.add( address );
}
}
Set<String> allowedAnonymousSet = new HashSet<String>();
StringTokenizer tokens1 = new StringTokenizer(allowedAnonymIPs, ", ");
while (tokens1.hasMoreTokens()) {
String address = tokens1.nextToken().trim();
if (pattern.matcher(address).matches()) {
allowedAnonymousSet.add( address );
}
}
Set<String> blockedSet = new HashSet<String>();
StringTokenizer tokens2 = new StringTokenizer(blockedIPs, ", ");
while (tokens2.hasMoreTokens()) {
String address = tokens2.nextToken().trim();
if (pattern.matcher(address).matches()) {
blockedSet.add( address );
}
}
LocalClientSession.setWhitelistedIPs( allowedSet );
LocalClientSession.setWhitelistedAnonymousIPs( allowedAnonymousSet );
LocalClientSession.setBlacklistedIPs( blockedSet );
// Log the event
webManager.logEvent("edited registration settings", "inband enabled = "+inbandEnabled+"\ncan change password = "+canChangePassword+"\nanon login = "+anonLogin+"\nallowed ips = "+allowedIPs+"\nblocked ips = "+blockedIPs);
}
// Reset the value of page vars:
inbandEnabled = regHandler.isInbandRegEnabled();
canChangePassword = regHandler.canChangePassword();
anonLogin = authHandler.isAnonymousAllowed();
// Encode the allowed IP addresses
StringBuilder buf = new StringBuilder();
Iterator<String> iter = org.jivesoftware.openfire.session.LocalClientSession.getWhitelistedIPs().iterator();
if (iter.hasNext()) {
buf.append(iter.next());
}
while (iter.hasNext()) {
buf.append(", ").append(iter.next());
}
allowedIPs = buf.toString();
StringBuilder buf1 = new StringBuilder();
Iterator<String> iter1 = org.jivesoftware.openfire.session.LocalClientSession.getWhitelistedAnonymousIPs().iterator();
if (iter1.hasNext()) {
buf1.append(iter1.next());
}
while (iter1.hasNext()) {
buf1.append(", ").append(iter1.next());
}
allowedAnonymIPs = buf1.toString();
StringBuilder buf2 = new StringBuilder();
Iterator<String> iter2 = org.jivesoftware.openfire.session.LocalClientSession.getBlacklistedIPs().iterator();
if (iter2.hasNext()) {
buf2.append(iter2.next());
}
while (iter2.hasNext()) {
buf2.append(", ").append(iter2.next());
}
blockedIPs = buf2.toString();
out.write("\n\n<p>\n");
if (_jspx_meth_fmt_005fmessage_005f1(_jspx_page_context))
return;
out.write("\n</p>\n\n<form action=\"reg-settings.jsp\">\n\n");
if (save) {
out.write("\n\n <div class=\"jive-success\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr><td class=\"jive-icon\"><img src=\"images/success-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></td>\n <td class=\"jive-icon-label\">\n ");
if (_jspx_meth_fmt_005fmessage_005f2(_jspx_page_context))
return;
out.write("\n </td></tr>\n </tbody>\n </table>\n </div><br>\n\n");
}
out.write("\n\n<!-- BEGIN registration settings -->\n\t<!--<div class=\"jive-contentBoxHeader\">\n\n\t</div>-->\n\t<div class=\"jive-contentBox\" style=\"-moz-border-radius: 3px;\">\n\n\t<h4>");
if (_jspx_meth_fmt_005fmessage_005f3(_jspx_page_context))
return;
out.write("</h4>\n\t<p>\n ");
if (_jspx_meth_fmt_005fmessage_005f4(_jspx_page_context))
return;
out.write("\n </p>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"inbandEnabled\" value=\"true\" id=\"rb01\"\n ");
out.print( ((inbandEnabled) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb01\"><b>");
if (_jspx_meth_fmt_005fmessage_005f5(_jspx_page_context))
return;
out.write("</b> -\n ");
if (_jspx_meth_fmt_005fmessage_005f6(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"inbandEnabled\" value=\"false\" id=\"rb02\"\n ");
out.print( ((!inbandEnabled) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb02\"><b>");
if (_jspx_meth_fmt_005fmessage_005f7(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f8(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n </tbody>\n </table>\n\n\t<br>\n\t<br>\n\n\t<h4>");
if (_jspx_meth_fmt_005fmessage_005f9(_jspx_page_context))
return;
out.write("</h4>\n\t<p>\n ");
if (_jspx_meth_fmt_005fmessage_005f10(_jspx_page_context))
return;
out.write("\n </p>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"canChangePassword\" value=\"true\" id=\"rb03\"\n ");
out.print( ((canChangePassword) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb03\"><b>");
if (_jspx_meth_fmt_005fmessage_005f11(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f12(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"canChangePassword\" value=\"false\" id=\"rb04\"\n ");
out.print( ((!canChangePassword) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb04\"><b>");
if (_jspx_meth_fmt_005fmessage_005f13(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f14(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n </tbody>\n </table>\n\n\t<br>\n\t<br>\n\n\t<h4>");
if (_jspx_meth_fmt_005fmessage_005f15(_jspx_page_context))
return;
out.write("</h4>\n\t<p>\n ");
if (_jspx_meth_fmt_005fmessage_005f16(_jspx_page_context))
return;
out.write("\n </p>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"anonLogin\" value=\"true\" id=\"rb05\"\n ");
out.print( ((anonLogin) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb05\"><b>");
if (_jspx_meth_fmt_005fmessage_005f17(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f18(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n <tr>\n <td width=\"1%\">\n <input type=\"radio\" name=\"anonLogin\" value=\"false\" id=\"rb06\"\n ");
out.print( ((!anonLogin) ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb06\"><b>");
if (_jspx_meth_fmt_005fmessage_005f19(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f20(_jspx_page_context))
return;
out.write("</label>\n </td>\n </tr>\n </tbody>\n </table>\n\n\t<br>\n\t<br>\n\n\t<h4>");
if (_jspx_meth_fmt_005fmessage_005f21(_jspx_page_context))
return;
out.write("</h4>\n <p>\n ");
if (_jspx_meth_fmt_005fmessage_005f22(_jspx_page_context))
return;
out.write("\n </p>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr>\n <td valign='top'><b>");
if (_jspx_meth_fmt_005fmessage_005f23(_jspx_page_context))
return;
out.write("</b></td>\n <td>\n <textarea name=\"blockedIPs\" cols=\"40\" rows=\"3\" wrap=\"virtual\">");
out.print( ((blockedIPs != null) ? blockedIPs : "") );
out.write("</textarea>\n </td>\n </tr>\n </tbody>\n </table>\n\n\t<p>\n ");
if (_jspx_meth_fmt_005fmessage_005f24(_jspx_page_context))
return;
out.write("\n </p>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr>\n <td valign='top'><b>");
if (_jspx_meth_fmt_005fmessage_005f25(_jspx_page_context))
return;
out.write("</b></td>\n <td>\n <textarea name=\"allowedIPs\" cols=\"40\" rows=\"3\" wrap=\"virtual\">");
out.print( ((allowedIPs != null) ? allowedIPs : "") );
out.write("</textarea>\n </td>\n </tr>\n <tr>\n <td valign='top'><b>");
if (_jspx_meth_fmt_005fmessage_005f26(_jspx_page_context))
return;
out.write("</b></td>\n <td>\n <textarea name=\"allowedAnonymIPs\" cols=\"40\" rows=\"3\" wrap=\"virtual\">");
out.print( ((allowedAnonymIPs != null) ? allowedAnonymIPs : "") );
out.write("</textarea>\n </td>\n </tr>\n </tbody>\n </table>\n\t\n\t</div>\n <input type=\"submit\" name=\"save\" value=\"");
if (_jspx_meth_fmt_005fmessage_005f27(_jspx_page_context))
return;
out.write("\">\n<!-- END registration settings -->\n\n</form>\n\n\n</body>\n\n</html>\n");
} catch (java.lang.Throwable t) {
if (!(t instanceof javax.servlet.jsp.SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try {
if (response.isCommitted()) {
out.flush();
} else {
out.clearBuffer();
}
} catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else throw new ServletException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_005fmessage_005f0(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f0.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f0.setParent(null);
// /reg-settings.jsp(38,7) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f0.setKey("reg.settings.title");
int _jspx_eval_fmt_005fmessage_005f0 = _jspx_th_fmt_005fmessage_005f0.doStartTag();
if (_jspx_th_fmt_005fmessage_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f1(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f1.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f1.setParent(null);
// /reg-settings.jsp(138,0) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f1.setKey("reg.settings.info");
int _jspx_eval_fmt_005fmessage_005f1 = _jspx_th_fmt_005fmessage_005f1.doStartTag();
if (_jspx_th_fmt_005fmessage_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f2(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f2.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f2.setParent(null);
// /reg-settings.jsp(150,8) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f2.setKey("reg.settings.update");
int _jspx_eval_fmt_005fmessage_005f2 = _jspx_th_fmt_005fmessage_005f2.doStartTag();
if (_jspx_th_fmt_005fmessage_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f3(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f3.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f3.setParent(null);
// /reg-settings.jsp(164,5) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f3.setKey("reg.settings.inband_account");
int _jspx_eval_fmt_005fmessage_005f3 = _jspx_th_fmt_005fmessage_005f3.doStartTag();
if (_jspx_th_fmt_005fmessage_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f4(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f4.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f4.setParent(null);
// /reg-settings.jsp(166,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f4.setKey("reg.settings.inband_account_info");
int _jspx_eval_fmt_005fmessage_005f4 = _jspx_th_fmt_005fmessage_005f4.doStartTag();
if (_jspx_th_fmt_005fmessage_005f4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f5(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f5.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f5.setParent(null);
// /reg-settings.jsp(176,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f5.setKey("reg.settings.enable");
int _jspx_eval_fmt_005fmessage_005f5 = _jspx_th_fmt_005fmessage_005f5.doStartTag();
if (_jspx_th_fmt_005fmessage_005f5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f6(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f6.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f6.setParent(null);
// /reg-settings.jsp(177,16) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f6.setKey("reg.settings.auto_create_user");
int _jspx_eval_fmt_005fmessage_005f6 = _jspx_th_fmt_005fmessage_005f6.doStartTag();
if (_jspx_th_fmt_005fmessage_005f6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f7(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f7 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f7.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f7.setParent(null);
// /reg-settings.jsp(186,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f7.setKey("reg.settings.disable");
int _jspx_eval_fmt_005fmessage_005f7 = _jspx_th_fmt_005fmessage_005f7.doStartTag();
if (_jspx_th_fmt_005fmessage_005f7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f8(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f8 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f8.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f8.setParent(null);
// /reg-settings.jsp(186,86) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f8.setKey("reg.settings.not_auto_create");
int _jspx_eval_fmt_005fmessage_005f8 = _jspx_th_fmt_005fmessage_005f8.doStartTag();
if (_jspx_th_fmt_005fmessage_005f8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f9(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f9 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f9.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f9.setParent(null);
// /reg-settings.jsp(195,5) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f9.setKey("reg.settings.change_password");
int _jspx_eval_fmt_005fmessage_005f9 = _jspx_th_fmt_005fmessage_005f9.doStartTag();
if (_jspx_th_fmt_005fmessage_005f9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f10(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f10 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f10.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f10.setParent(null);
// /reg-settings.jsp(197,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f10.setKey("reg.settings.change_password_info");
int _jspx_eval_fmt_005fmessage_005f10 = _jspx_th_fmt_005fmessage_005f10.doStartTag();
if (_jspx_th_fmt_005fmessage_005f10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f11(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f11 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f11.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f11.setParent(null);
// /reg-settings.jsp(207,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f11.setKey("reg.settings.enable");
int _jspx_eval_fmt_005fmessage_005f11 = _jspx_th_fmt_005fmessage_005f11.doStartTag();
if (_jspx_th_fmt_005fmessage_005f11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f12(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f12 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f12.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f12.setParent(null);
// /reg-settings.jsp(207,85) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f12.setKey("reg.settings.can_change");
int _jspx_eval_fmt_005fmessage_005f12 = _jspx_th_fmt_005fmessage_005f12.doStartTag();
if (_jspx_th_fmt_005fmessage_005f12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f13(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f13 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f13.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f13.setParent(null);
// /reg-settings.jsp(216,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f13.setKey("reg.settings.disable");
int _jspx_eval_fmt_005fmessage_005f13 = _jspx_th_fmt_005fmessage_005f13.doStartTag();
if (_jspx_th_fmt_005fmessage_005f13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f14(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f14 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f14.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f14.setParent(null);
// /reg-settings.jsp(216,86) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f14.setKey("reg.settings.cannot_change");
int _jspx_eval_fmt_005fmessage_005f14 = _jspx_th_fmt_005fmessage_005f14.doStartTag();
if (_jspx_th_fmt_005fmessage_005f14.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f15(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f15 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f15.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f15.setParent(null);
// /reg-settings.jsp(225,5) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f15.setKey("reg.settings.anonymous_login");
int _jspx_eval_fmt_005fmessage_005f15 = _jspx_th_fmt_005fmessage_005f15.doStartTag();
if (_jspx_th_fmt_005fmessage_005f15.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f16(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f16 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f16.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f16.setParent(null);
// /reg-settings.jsp(227,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f16.setKey("reg.settings.anonymous_login_info");
int _jspx_eval_fmt_005fmessage_005f16 = _jspx_th_fmt_005fmessage_005f16.doStartTag();
if (_jspx_th_fmt_005fmessage_005f16.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f17(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f17 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f17.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f17.setParent(null);
// /reg-settings.jsp(237,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f17.setKey("reg.settings.enable");
int _jspx_eval_fmt_005fmessage_005f17 = _jspx_th_fmt_005fmessage_005f17.doStartTag();
if (_jspx_th_fmt_005fmessage_005f17.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f18(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f18 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f18.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f18.setParent(null);
// /reg-settings.jsp(237,85) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f18.setKey("reg.settings.anyone_login");
int _jspx_eval_fmt_005fmessage_005f18 = _jspx_th_fmt_005fmessage_005f18.doStartTag();
if (_jspx_th_fmt_005fmessage_005f18.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f19(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f19 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f19.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f19.setParent(null);
// /reg-settings.jsp(246,37) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f19.setKey("reg.settings.disable");
int _jspx_eval_fmt_005fmessage_005f19 = _jspx_th_fmt_005fmessage_005f19.doStartTag();
if (_jspx_th_fmt_005fmessage_005f19.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f20(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f20 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f20.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f20.setParent(null);
// /reg-settings.jsp(246,86) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f20.setKey("reg.settings.only_registered_login");
int _jspx_eval_fmt_005fmessage_005f20 = _jspx_th_fmt_005fmessage_005f20.doStartTag();
if (_jspx_th_fmt_005fmessage_005f20.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f21(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f21 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f21.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f21.setParent(null);
// /reg-settings.jsp(255,5) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f21.setKey("reg.settings.allowed_ips");
int _jspx_eval_fmt_005fmessage_005f21 = _jspx_th_fmt_005fmessage_005f21.doStartTag();
if (_jspx_th_fmt_005fmessage_005f21.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f22(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f22 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f22.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f22.setParent(null);
// /reg-settings.jsp(257,8) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f22.setKey("reg.settings.allowed_ips_blocked_info");
int _jspx_eval_fmt_005fmessage_005f22 = _jspx_th_fmt_005fmessage_005f22.doStartTag();
if (_jspx_th_fmt_005fmessage_005f22.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f23(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f23 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f23.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f23.setParent(null);
// /reg-settings.jsp(262,32) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f23.setKey("reg.settings.ips_blocked");
int _jspx_eval_fmt_005fmessage_005f23 = _jspx_th_fmt_005fmessage_005f23.doStartTag();
if (_jspx_th_fmt_005fmessage_005f23.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f24(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f24 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f24.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f24.setParent(null);
// /reg-settings.jsp(271,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f24.setKey("reg.settings.allowed_ips_info");
int _jspx_eval_fmt_005fmessage_005f24 = _jspx_th_fmt_005fmessage_005f24.doStartTag();
if (_jspx_th_fmt_005fmessage_005f24.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f24);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f24);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f25(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f25 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f25.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f25.setParent(null);
// /reg-settings.jsp(276,32) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f25.setKey("reg.settings.ips_all");
int _jspx_eval_fmt_005fmessage_005f25 = _jspx_th_fmt_005fmessage_005f25.doStartTag();
if (_jspx_th_fmt_005fmessage_005f25.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f25);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f25);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f26(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f26 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f26.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f26.setParent(null);
// /reg-settings.jsp(282,32) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f26.setKey("reg.settings.ips_anonymous");
int _jspx_eval_fmt_005fmessage_005f26 = _jspx_th_fmt_005fmessage_005f26.doStartTag();
if (_jspx_th_fmt_005fmessage_005f26.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f26);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f26);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f27(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f27 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f27.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f27.setParent(null);
// /reg-settings.jsp(291,44) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f27.setKey("global.save_settings");
int _jspx_eval_fmt_005fmessage_005f27 = _jspx_th_fmt_005fmessage_005f27.doStartTag();
if (_jspx_th_fmt_005fmessage_005f27.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f27);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f27);
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.jsonpath;
import java.io.CharConversionException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.Arrays;
/**
* Special stream for JSON streams. Determines from the first 4 bytes the JSON
* encoding according to JSON specification RFC-4627 or newer. In addition BOMs
* are taken into account.
* <p>
* This class is not thread safe.
*/
public class JsonStream extends FilterInputStream {
private static final byte[] BOM_UTF_32BE = new byte[] {0x00, 0x00, (byte) 0xFE, (byte) 0xFF };
private static final byte[] BOM_UTF_32LE = new byte[] {(byte) 0xFF, (byte) 0xFE, 0x00, 0x00 };
private static final byte[] BOM_UTF_32_2143 = new byte[] {0x00, 0x00, (byte) 0xFF, (byte) 0xFE };
private static final byte[] BOM_UTF_32_3412 = new byte[] {(byte) 0xFE, (byte) 0xFF, 0x00, 0x00 };
private static final byte[] BOM_UTF_16BE = new byte[] {(byte) 0xFE, (byte) 0xFF };
private static final byte[] BOM_UTF_16LE = new byte[] {(byte) 0xFF, (byte) 0xFE };
private static final byte[] BOM_UTF_8 = new byte[] {(byte) 0xEF, (byte) 0xBB, (byte) 0xBF };
private final byte[] fourByteBuffer = new byte[4];
/* input index of the four byte buffer (BOMs are skipped) */
private int inputIndex;
/* read bytes into the buffer */
private int inputEnd;
private final Charset encoding;
/**
* Constructor. Determines the encoding during the instantiation according
* to JSON specification RFC-4627 or newer. In addition BOMs are taken into
* account.
*
* @param in
* input stream must contain a JSON content
* @throws IOException
* if an error occurs during the determination of the encoding
* @throws CharConversionException
* if the UCS4 endianess 2143 or 3412 is used
* @throws IllegalArgumentException
* if the input stream is <code>null</code>
*/
public JsonStream(InputStream in) throws IOException {
super(in);
if (in == null) {
throw new IllegalArgumentException("input stream is null");
}
inputEnd = inputIndex = 0;
Charset enc = null;
if (loadAtLeast(4)) {
enc = getEncodingFromBOM();
if (enc == null) {
// no BOM
enc = getUTF32EncodingFromNullPattern();
if (enc == null) {
enc = getUTF16EncodingFromNullPattern();
}
}
} else if (loadAtLeast(2)) {
enc = getUTF16EncodingFromNullPattern();
}
if (enc == null) {
// not found; as per specification, this means it must be UTF-8.
enc = Charset.forName("UTF-8");
}
encoding = enc;
}
public Charset getEncoding() {
return encoding;
}
private boolean loadAtLeast(int minimum) throws IOException {
int received = inputEnd - inputIndex;
while (received < minimum) {
int count = in.read(fourByteBuffer, inputEnd, fourByteBuffer.length - inputEnd);
if (count < 1) {
return false;
}
inputEnd += count;
received += count;
}
return true;
}
private Charset getEncodingFromBOM() throws IOException {
// 32-bit encoding BOMs
if (Arrays.equals(fourByteBuffer, BOM_UTF_32BE)) {
inputIndex = 4;
return Charset.forName("UTF-32BE");
} else if (Arrays.equals(fourByteBuffer, BOM_UTF_32LE)) {
inputIndex = 4;
return Charset.forName("UTF-32LE");
} else if (Arrays.equals(fourByteBuffer, BOM_UTF_32_2143)) {
throw getExceptionUnsupportedUCS4("2143");
} else if (Arrays.equals(fourByteBuffer, BOM_UTF_32_3412)) {
throw getExceptionUnsupportedUCS4("3412");
}
byte[] firstTwoBytes = Arrays.copyOf(fourByteBuffer, 2);
// 16-bit encoding BOMs
if (Arrays.equals(firstTwoBytes, BOM_UTF_16BE)) {
inputIndex = 2;
return Charset.forName("UTF-16BE");
}
if (Arrays.equals(firstTwoBytes, BOM_UTF_16LE)) {
inputIndex = 2;
return Charset.forName("UTF-16LE");
}
byte[] firstThreeBytes = Arrays.copyOf(fourByteBuffer, 3);
// UTF-8 BOM?
if (Arrays.equals(firstThreeBytes, BOM_UTF_8)) {
inputIndex = 3;
return Charset.forName("UTF-8");
}
return null;
}
private Charset getUTF32EncodingFromNullPattern() throws IOException {
//content without BOM
if (fourByteBuffer[0] == 0 && fourByteBuffer[1] == 0 && fourByteBuffer[2] == 0) {
// 00 00 00 xx
return Charset.forName("UTF-32BE");
} else if (fourByteBuffer[1] == 0 && fourByteBuffer[2] == 0 && fourByteBuffer[3] == 0) {
// xx 00 00 00
return Charset.forName("UTF-32LE");
} else if (fourByteBuffer[0] == 0 && fourByteBuffer[2] == 0 && fourByteBuffer[3] == 0) {
// 00 xx 00 00
throw getExceptionUnsupportedUCS4("3412");
} else if (fourByteBuffer[0] == 0 && fourByteBuffer[1] == 0 && fourByteBuffer[3] == 0) {
//00 00 xx 00
throw getExceptionUnsupportedUCS4("2143");
} else {
// Cannot be valid UTF-32 encoded JSON...
return null;
}
}
private Charset getUTF16EncodingFromNullPattern() {
if (fourByteBuffer[0] == 0) {
return Charset.forName("UTF-16BE");
} else if (fourByteBuffer[1] == 0) {
return Charset.forName("UTF-16LE");
} else { // not UTF-16
return null;
}
}
private CharConversionException getExceptionUnsupportedUCS4(String type) throws IOException {
return new CharConversionException("Unsupported UCS-4 endianness (" + type + ") detected");
}
@Override
public int read() throws IOException {
if (inputIndex < inputEnd) {
int result = fourByteBuffer[inputIndex];
inputIndex++;
return result;
}
try {
return in.read();
} catch (java.io.EOFException ex) {
return -1;
}
}
@Override
public int read(byte b[]) throws IOException {
if (inputIndex < inputEnd) {
int minimum = Math.min(b.length, inputEnd - inputIndex);
for (int i = 0; i < minimum; i++) {
b[i] = fourByteBuffer[inputIndex];
inputIndex++;
}
int rest = b.length - minimum;
if (rest == 0) {
return minimum;
}
try {
int additionalRead = in.read(b, minimum, rest);
if (additionalRead < 0) {
return minimum;
} else {
return minimum + additionalRead;
}
} catch (java.io.EOFException ex) {
return minimum;
}
} else {
return read(b, 0, b.length);
}
}
@Override
public int read(byte b[], int off, int len) throws IOException {
if (inputIndex < inputEnd) {
int minimum = Math.min(b.length, inputEnd - inputIndex);
for (int i = 0; i < minimum; i++) {
b[off + i] = fourByteBuffer[inputIndex];
inputIndex++;
}
int rest = b.length - minimum;
if (rest == 0) {
return minimum;
}
try {
int additionalRead = in.read(b, minimum + off, rest);
if (additionalRead < 0) {
return minimum;
} else {
return minimum + additionalRead;
}
} catch (java.io.EOFException ex) {
return minimum;
}
} else {
try {
return in.read(b, off, len);
} catch (java.io.EOFException ex) {
return -1;
}
}
}
@Override
public long skip(long n) throws IOException {
if (inputIndex < inputEnd) {
long minimum = Math.min(n, inputEnd - inputIndex);
for (int i = 0; i < minimum; i++) {
inputIndex++;
}
long rest = n - minimum;
if (rest == 0) {
return minimum;
}
long additionalSkipped = in.skip(rest);
return additionalSkipped + minimum;
} else {
return in.skip(n);
}
}
@Override
public synchronized void reset() throws IOException {
throw new IOException("reset not supported");
}
@Override
public boolean markSupported() {
return false;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator;
import com.google.common.collect.ImmutableListMultimap;
import io.airlift.http.client.HttpStatus;
import io.airlift.http.client.Request;
import io.airlift.http.client.Response;
import io.airlift.http.client.testing.TestingHttpClient;
import io.airlift.http.client.testing.TestingResponse;
import io.airlift.testing.TestingTicker;
import io.airlift.units.DataSize;
import io.airlift.units.DataSize.Unit;
import io.airlift.units.Duration;
import io.trino.FeaturesConfig.DataIntegrityVerification;
import io.trino.execution.StageId;
import io.trino.execution.TaskId;
import io.trino.execution.buffer.PagesSerde;
import io.trino.execution.buffer.SerializedPage;
import io.trino.operator.HttpPageBufferClient.ClientCallback;
import io.trino.spi.HostAddress;
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.RunLengthEncodedBlock;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertContains;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static io.trino.TrinoMediaTypes.TRINO_PAGES;
import static io.trino.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde;
import static io.trino.spi.StandardErrorCode.EXCEEDED_LOCAL_MEMORY_LIMIT;
import static io.trino.spi.StandardErrorCode.PAGE_TOO_LARGE;
import static io.trino.spi.StandardErrorCode.PAGE_TRANSPORT_ERROR;
import static io.trino.spi.StandardErrorCode.PAGE_TRANSPORT_TIMEOUT;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.util.Failures.WORKER_NODE_ERROR;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
public class TestHttpPageBufferClient
{
private ScheduledExecutorService scheduler;
private ExecutorService pageBufferClientCallbackExecutor;
private static final PagesSerde PAGES_SERDE = testingPagesSerde();
private static final TaskId TASK_ID = new TaskId(new StageId("query", 0), 0, 0);
@BeforeClass
public void setUp()
{
scheduler = newScheduledThreadPool(4, daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
pageBufferClientCallbackExecutor = Executors.newSingleThreadExecutor();
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
if (scheduler != null) {
scheduler.shutdownNow();
scheduler = null;
}
if (pageBufferClientCallbackExecutor != null) {
pageBufferClientCallbackExecutor.shutdownNow();
pageBufferClientCallbackExecutor = null;
}
}
@Test
public void testHappyPath()
throws Exception
{
Page expectedPage = new Page(100);
DataSize expectedMaxSize = DataSize.of(11, Unit.MEGABYTE);
MockExchangeRequestProcessor processor = new MockExchangeRequestProcessor(expectedMaxSize);
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
expectedMaxSize,
new Duration(1, TimeUnit.MINUTES),
true,
TASK_ID,
location,
callback,
scheduler,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
// fetch a page and verify
processor.addPage(location, expectedPage);
callback.resetStats();
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 1);
assertPageEquals(expectedPage, callback.getPages().get(0));
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertStatus(client, location, "queued", 1, 1, 1, 0, "not scheduled");
// fetch no data and verify
callback.resetStats();
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertStatus(client, location, "queued", 1, 2, 2, 0, "not scheduled");
// fetch two more pages and verify
processor.addPage(location, expectedPage);
processor.addPage(location, expectedPage);
callback.resetStats();
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 2);
assertPageEquals(expectedPage, callback.getPages().get(0));
assertPageEquals(expectedPage, callback.getPages().get(1));
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 0);
callback.resetStats();
assertStatus(client, location, "queued", 3, 3, 3, 0, "not scheduled");
// finish and verify
callback.resetStats();
processor.setComplete(location);
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
// get the buffer complete signal
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
// schedule the delete call to the buffer
callback.resetStats();
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getFinishedBuffers(), 1);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 0);
assertEquals(callback.getFailedBuffers(), 0);
assertStatus(client, location, "closed", 3, 5, 5, 0, "not scheduled");
}
@Test
public void testLifecycle()
throws Exception
{
CyclicBarrier beforeRequest = new CyclicBarrier(2);
CyclicBarrier afterRequest = new CyclicBarrier(2);
StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest);
processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.of(), new byte[0]));
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
DataSize.of(10, MEGABYTE),
new Duration(1, TimeUnit.MINUTES),
true,
TASK_ID,
location,
callback,
scheduler,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
client.scheduleRequest();
beforeRequest.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST");
assertEquals(client.isRunning(), true);
afterRequest.await(10, TimeUnit.SECONDS);
requestComplete.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled");
client.close();
beforeRequest.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "closed", 0, 1, 1, 1, "PROCESSING_REQUEST");
afterRequest.await(10, TimeUnit.SECONDS);
requestComplete.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "closed", 0, 1, 2, 1, "not scheduled");
}
@Test
public void testInvalidResponses()
throws Exception
{
CyclicBarrier beforeRequest = new CyclicBarrier(1);
CyclicBarrier afterRequest = new CyclicBarrier(1);
StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest);
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
DataSize.of(10, MEGABYTE),
new Duration(1, TimeUnit.MINUTES),
true,
TASK_ID,
location,
callback,
scheduler,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
// send not found response and verify response was ignored
processor.setResponse(new TestingResponse(HttpStatus.NOT_FOUND, ImmutableListMultimap.of(CONTENT_TYPE, TRINO_PAGES), new byte[0]));
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertInstanceOf(callback.getFailure(), PageTransportErrorException.class);
assertContains(callback.getFailure().getMessage(), "Expected response code to be 200, but was 404");
assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled");
// send invalid content type response and verify response was ignored
callback.resetStats();
processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "INVALID_TYPE"), new byte[0]));
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertInstanceOf(callback.getFailure(), PageTransportErrorException.class);
assertContains(callback.getFailure().getMessage(), "Expected application/x-trino-pages response from server but got INVALID_TYPE");
assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled");
// send unexpected content type response and verify response was ignored
callback.resetStats();
processor.setResponse(new TestingResponse(HttpStatus.OK, ImmutableListMultimap.of(CONTENT_TYPE, "text/plain"), new byte[0]));
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertInstanceOf(callback.getFailure(), PageTransportErrorException.class);
assertContains(callback.getFailure().getMessage(), "Expected application/x-trino-pages response from server but got text/plain");
assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled");
// close client and verify
processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.of(), new byte[0]));
client.close();
requestComplete.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "closed", 0, 3, 4, 3, "not scheduled");
}
@Test
public void testCloseDuringPendingRequest()
throws Exception
{
CyclicBarrier beforeRequest = new CyclicBarrier(2);
CyclicBarrier afterRequest = new CyclicBarrier(2);
StaticRequestProcessor processor = new StaticRequestProcessor(beforeRequest, afterRequest);
processor.setResponse(new TestingResponse(HttpStatus.NO_CONTENT, ImmutableListMultimap.of(), new byte[0]));
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
DataSize.of(10, MEGABYTE),
new Duration(1, TimeUnit.MINUTES),
true,
TASK_ID,
location,
callback,
scheduler,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
// send request
client.scheduleRequest();
beforeRequest.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "running", 0, 1, 0, 0, "PROCESSING_REQUEST");
assertEquals(client.isRunning(), true);
// request is pending, now close it
client.close();
try {
requestComplete.await(10, TimeUnit.SECONDS);
}
catch (BrokenBarrierException ignored) {
}
try {
afterRequest.await(10, TimeUnit.SECONDS);
}
catch (BrokenBarrierException ignored) {
afterRequest.reset();
}
// client.close() triggers a DELETE request, so wait for it to finish
beforeRequest.await(10, TimeUnit.SECONDS);
afterRequest.await(10, TimeUnit.SECONDS);
requestComplete.await(10, TimeUnit.SECONDS);
assertStatus(client, location, "closed", 0, 1, 2, 1, "not scheduled");
}
@Test
public void testExceptionFromResponseHandler()
throws Exception
{
TestingTicker ticker = new TestingTicker();
AtomicReference<Duration> tickerIncrement = new AtomicReference<>(new Duration(0, TimeUnit.SECONDS));
TestingHttpClient.Processor processor = (input) -> {
Duration delta = tickerIncrement.get();
ticker.increment(delta.toMillis(), TimeUnit.MILLISECONDS);
throw new RuntimeException("Foo");
};
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
DataSize.of(10, MEGABYTE),
new Duration(30, TimeUnit.SECONDS),
true,
TASK_ID,
location,
callback,
scheduler,
ticker,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
// request processor will throw exception, verify the request is marked a completed
// this starts the error stopwatch
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 0);
assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled");
// advance time forward, but not enough to fail the client
tickerIncrement.set(new Duration(30, TimeUnit.SECONDS));
// verify that the client has not failed
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 2);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 0);
assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled");
// advance time forward beyond the minimum error duration
tickerIncrement.set(new Duration(31, TimeUnit.SECONDS));
// verify that the client has failed
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 3);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertInstanceOf(callback.getFailure(), PageTransportTimeoutException.class);
assertContains(callback.getFailure().getMessage(), WORKER_NODE_ERROR + " (http://localhost:8080/0 - 3 failures, failure duration 31.00s, total failed request time 31.00s)");
assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled");
}
@Test
public void testErrorCodes()
{
assertEquals(new PageTooLargeException().getErrorCode(), PAGE_TOO_LARGE.toErrorCode());
assertEquals(new PageTransportErrorException(HostAddress.fromParts("127.0.0.1", 8080), "").getErrorCode(), PAGE_TRANSPORT_ERROR.toErrorCode());
assertEquals(new PageTransportTimeoutException(HostAddress.fromParts("127.0.0.1", 8080), "", null).getErrorCode(), PAGE_TRANSPORT_TIMEOUT.toErrorCode());
}
@Test
public void testMemoryExceededInAddPages()
throws Exception
{
URI location = URI.create("http://localhost:8080");
Page page = new Page(RunLengthEncodedBlock.create(BIGINT, 1L, 100));
MockExchangeRequestProcessor processor = new MockExchangeRequestProcessor(DataSize.of(10, MEGABYTE));
CyclicBarrier requestComplete = new CyclicBarrier(2);
TrinoException expectedException = new TrinoException(EXCEEDED_LOCAL_MEMORY_LIMIT, "Memory limit exceeded");
AtomicBoolean addPagesCalled = new AtomicBoolean(false);
TestingClientCallback callback = new TestingClientCallback(requestComplete)
{
@Override
public boolean addPages(HttpPageBufferClient client, List<SerializedPage> pages)
{
addPagesCalled.set(true);
throw expectedException;
}
};
HttpPageBufferClient client = new HttpPageBufferClient(
"localhost",
new TestingHttpClient(processor, scheduler),
DataIntegrityVerification.ABORT,
DataSize.of(10, MEGABYTE),
new Duration(30, TimeUnit.SECONDS),
true,
TASK_ID,
location,
callback,
scheduler,
pageBufferClientCallbackExecutor);
// attempt to fetch a page
processor.addPage(location, page);
callback.resetStats();
client.scheduleRequest();
requestComplete.await(10, TimeUnit.SECONDS);
// addPages was called
assertTrue(addPagesCalled.get());
// Memory exceeded failure is reported
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertEquals(callback.getFailure(), expectedException);
}
private static void assertStatus(
HttpPageBufferClient client,
URI location, String status,
int pagesReceived,
int requestsScheduled,
int requestsCompleted,
int requestsFailed,
String httpRequestState)
{
PageBufferClientStatus actualStatus = client.getStatus();
assertEquals(actualStatus.getUri(), location);
assertEquals(actualStatus.getState(), status, "status");
assertEquals(actualStatus.getPagesReceived(), pagesReceived, "pagesReceived");
assertEquals(actualStatus.getRequestsScheduled(), requestsScheduled, "requestsScheduled");
assertEquals(actualStatus.getRequestsCompleted(), requestsCompleted, "requestsCompleted");
assertEquals(actualStatus.getRequestsFailed(), requestsFailed, "requestsFailed");
assertEquals(actualStatus.getHttpRequestState(), httpRequestState, "httpRequestState");
}
private static void assertPageEquals(Page expectedPage, Page actualPage)
{
assertEquals(actualPage.getPositionCount(), expectedPage.getPositionCount());
assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount());
}
private static class TestingClientCallback
implements ClientCallback
{
private final CyclicBarrier done;
private final List<SerializedPage> pages = Collections.synchronizedList(new ArrayList<>());
private final AtomicInteger completedRequests = new AtomicInteger();
private final AtomicInteger finishedBuffers = new AtomicInteger();
private final AtomicInteger failedBuffers = new AtomicInteger();
private final AtomicReference<Throwable> failure = new AtomicReference<>();
public TestingClientCallback(CyclicBarrier done)
{
this.done = done;
}
public List<Page> getPages()
{
return pages.stream()
.map(PAGES_SERDE::deserialize)
.collect(Collectors.toList());
}
private int getCompletedRequests()
{
return completedRequests.get();
}
private int getFinishedBuffers()
{
return finishedBuffers.get();
}
public int getFailedBuffers()
{
return failedBuffers.get();
}
public Throwable getFailure()
{
return failure.get();
}
@Override
public boolean addPages(HttpPageBufferClient client, List<SerializedPage> pages)
{
this.pages.addAll(pages);
return true;
}
@Override
public void requestComplete(HttpPageBufferClient client)
{
completedRequests.getAndIncrement();
awaitDone();
}
@Override
public void clientFinished(HttpPageBufferClient client)
{
finishedBuffers.getAndIncrement();
awaitDone();
}
@Override
public void clientFailed(HttpPageBufferClient client, Throwable cause)
{
failedBuffers.getAndIncrement();
failure.compareAndSet(null, cause);
// requestComplete() will be called after this
}
public void resetStats()
{
pages.clear();
completedRequests.set(0);
finishedBuffers.set(0);
failedBuffers.set(0);
failure.set(null);
}
private void awaitDone()
{
try {
done.await(10, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
catch (BrokenBarrierException | TimeoutException e) {
throw new RuntimeException(e);
}
}
}
private static class StaticRequestProcessor
implements TestingHttpClient.Processor
{
private final AtomicReference<Response> response = new AtomicReference<>();
private final CyclicBarrier beforeRequest;
private final CyclicBarrier afterRequest;
private StaticRequestProcessor(CyclicBarrier beforeRequest, CyclicBarrier afterRequest)
{
this.beforeRequest = beforeRequest;
this.afterRequest = afterRequest;
}
private void setResponse(Response response)
{
this.response.set(response);
}
@SuppressWarnings({"ThrowFromFinallyBlock", "Finally"})
@Override
public Response handle(Request request)
throws Exception
{
beforeRequest.await(10, TimeUnit.SECONDS);
try {
return response.get();
}
finally {
afterRequest.await(10, TimeUnit.SECONDS);
}
}
}
}
| |
/*******************************************************************************
* Copyright 2014 United States Government as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package gov.nasa.arc.spife.europa.clientside;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.log4j.Logger;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.osgi.framework.Bundle;
public abstract class EuropaServerProxyBase
implements EuropaServerProxy
{
private static final Logger logger = Logger.getLogger(EuropaServerProxyBase.class);
protected ServerLauncher serverLauncher_=null;
protected EuropaServerConfig serverConfig_=null;
protected String sessionId_=null;
public EuropaServerProxyBase(EuropaServerConfig config, boolean useRemoteServer)
{
serverConfig_=config;
serverLauncher_ = (
useRemoteServer
? new RemoteServerLauncher(config)
: new EmbeddedServerLauncher(config)
);
}
@Override
public void startServer(String sessionId)
{
serverLauncher_.startServer(sessionId);
int timeout=5; // in seconds
if (!serverResponds(serverConfig_, timeout))
throw new RuntimeException("Failed starting DynamicEuropa server");
}
@Override
public void stopServer()
{
try {
executeCommand(EuropaCommand.STOP_SERVER,new Vector<Object>());
serverLauncher_.waitForServerShutdown();
}
catch (Throwable t) {
logger.warn("Exception caught while stopping DynamicEuropa server");
}
}
@Override
public void startSession(
String sessionId,
String modelName)
{
try {
sessionId_ = sessionId;
StringBuffer info = new StringBuffer();
info.append("Session id: ").append(sessionId_).append("\n");
// Start a new session with DynamicEuropa
List<Object> params = new ArrayList<Object>();
params.add(modelName);
executeCommand(EuropaCommand.START_SESSION, params);
// Print server version if available
params.clear();
Object version = executeCommand(EuropaCommand.GET_SERVER_VERSION,params);
if (version != null)
info.append("Server Version: ").append(version).append("\n");
logger.info(info);
EuropaServerMonitor.setInfo(info.toString());
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void stopSession()
{
try {
logger.info("Europa session [" + sessionId_ + "] stopping session...");
executeCommand(EuropaCommand.STOP_SESSION, new ArrayList<Object>());
logger.info("Europa session [" + sessionId_ + "] has been shutdown");
} catch (Exception e) {
logger.warn("disconnect failed: ", e);
}
}
@Override
public Object syncExecute(
EuropaCommand command,
List<? extends Object> parameters,
boolean reportErrors)
{
try {
autoFlushQueue();
Object result = executeCommand(command, parameters);
return result;
}
catch (Exception e) {
if (reportErrors) {
EuropaServerMonitor.getInstance().setException(e);
logger.error(command + " failed", e);
return e;
}
logger.warn(command + " failed", e);
return null;
}
}
@Override
public void queueExecute(
EuropaCommand command,
List<? extends Object> parameters)
{
queueCommand(command, getCommandParameters(command, parameters));
logger.debug(command + " queued.");
}
@Override
public List<?> flushQueue()
{
EuropaServerMonitor.getInstance().updateLastRequestTimeMillis();
List<?> results = flushCommandQueue();
EuropaServerMonitor.getInstance().updateLastResponseTimeMillis();
logger.debug("flushQueue results: " + formatExecuteResult(results));
return results;
}
protected Object executeCommand(EuropaCommand command,List<? extends Object> parameters)
{
try {
EuropaServerMonitor.getInstance().updateLastRequestTimeMillis();
Object result = executeCommand(command,getCommandParameters(command,parameters));
EuropaServerMonitor.getInstance().updateLastResponseTimeMillis();
logger.debug(command + " result: " + formatExecuteResult(result));
return result;
}
catch (Exception e) {
logger.error(e);
throw new RuntimeException(e);
}
}
protected abstract Object executeCommand(EuropaCommand command, Vector<Object> parameters);
protected abstract void queueCommand(EuropaCommand command, Vector<Object> parameters);
protected abstract List<?> flushCommandQueue();
protected void autoFlushQueue()
{
List<?> queuedResults = flushQueue();
if (!queuedResults.isEmpty()) {
logger.warn("queue auto-flushed, result count = " + queuedResults.size());
for (Object result : queuedResults) {
if (result instanceof Exception) {
Exception exception = (Exception) result;
logger.warn("exception during auto-flush", exception);
}
}
}
}
protected Vector<Object> getCommandParameters(EuropaCommand command, List<? extends Object> parameters) {
for (Object object : parameters) {
if (object == null) {
logger.warn("Found null parameter while invoking EuropaCommand "+command.getXmlrpcString());
return new Vector<Object>();
}
}
Vector<Object> commandParameters = new Vector<Object>();
if (needsSession(command))
commandParameters.add(sessionId_);
commandParameters.addAll(parameters);
return commandParameters;
}
protected String formatExecuteResult(Object result) {
String output = (result == null ? "<null>" : result.toString());
if (output.length() > 40) {
output = "\n " + result;
} else if (output.length() == 0) {
output = "<empty>";
}
return output;
}
protected abstract boolean serverResponds(EuropaServerConfig config, int timeout);
protected static boolean needsSession(EuropaCommand command)
{
return !s_sessionlessMethods.contains(command);
}
static Set<EuropaCommand> s_sessionlessMethods = new HashSet<EuropaCommand>();
static {
s_sessionlessMethods.add(EuropaCommand.GET_SERVER_VERSION);
s_sessionlessMethods.add(EuropaCommand.STOP_SERVER);
s_sessionlessMethods.add(EuropaCommand.GET_RESOURCE_IN_TIME_FORMAT);
}
protected static interface ServerLauncher
{
public void startServer(String sessionId);
public void waitForServerShutdown();
public EuropaServer getServer();
}
protected static class EmbeddedServerLauncher
implements ServerLauncher
{
protected EuropaServer embeddedServer_=null;
protected EuropaServerConfig serverConfig_=null;
protected Future<Object> embeddedServerThread_ = null;
public EmbeddedServerLauncher(EuropaServerConfig config)
{
serverConfig_=config;
embeddedServer_=new EuropaServer(config);
}
@Override
public void startServer(String sessionId)
{
ExecutorService executor = new ThreadPoolExecutor(
2,
Integer.MAX_VALUE,
Long.MAX_VALUE,
TimeUnit.MILLISECONDS,
new SynchronousQueue<Runnable>()
);
try {
embeddedServerThread_ = executor.submit(
new Callable<Object>()
{
@Override
public Object call() { return embeddedServer_.Start(); }
}
);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void waitForServerShutdown()
{
if (embeddedServerThread_ == null)
return;
int timeout=10; // seconds
try {
embeddedServer_.SetTimeout(0);
embeddedServerThread_.get(timeout,TimeUnit.SECONDS);
logger.info("Stopped Embedded DynamicEuropa Server running on port:"+serverConfig_.GetPort());
}
catch (TimeoutException e) {
logger.warn("DynamicEuropa server failed to stop after "+timeout+" seconds",e);
}
catch (InterruptedException e) {
logger.warn("Exception caught while shutting down DynamicEuropa server",e);
}
catch (ExecutionException e) {
logger.warn("Exception caught while shutting down DynamicEuropa server",e);
}
embeddedServer_ = null;
embeddedServerThread_ = null;
}
@Override
public EuropaServer getServer()
{
return embeddedServer_;
}
}
protected static class RemoteServerLauncher
implements ServerLauncher
{
protected EuropaServerConfig serverConfig_=null;
protected Process remoteServerProcess_=null;
public RemoteServerLauncher(EuropaServerConfig config)
{
serverConfig_=config;
}
@Override
public void startServer(String sessionId)
{
String exePath = getExePath();
String configPath = makeConfigFile(sessionId);
ProcessBuilder pb = new ProcessBuilder(exePath, configPath);
pb.directory(EuropaServerManager.localStatePath().toFile());
pb.redirectErrorStream(true); // merge stdout and stdin
try {
remoteServerProcess_ = pb.start();
new Thread(
new StreamDrain(remoteServerProcess_.getInputStream())
).start();
}
catch (Exception e) {
throw new RuntimeException("Failed starting remote server:",e);
}
}
protected String getExePath()
{
try {
String bundleName = "gov.nasa.arc.spife.europa.clientside";
String exeName = "DynamicEuropa_o"; // TODO: deal with debug version
if (Platform.getOS().equals(Platform.OS_WIN32)) {
exeName += ".exe";
}
Bundle b = Platform.getBundle(bundleName);
URL url = FileLocator.find(b, new Path(exeName), null /*override*/);
url = FileLocator.toFileURL(url);
return url.getFile();
}
catch (Exception e) {
throw new RuntimeException("Failed trying to get path to Europa executable",e);
}
}
protected String makeConfigFile(String sessionId)
{
String outputFileName = sessionId+".cfg";
File outputFile = EuropaServerManager.localStatePath().append(outputFileName).toFile();
try {
outputFile.createNewFile();
PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(outputFile)));
writer.println(toString(serverConfig_));
writer.close();
}
catch (IOException e) {
throw new RuntimeException("Failed creating configuration file for new Europa server",e);
}
return outputFile.getAbsolutePath();
}
protected String toString(EuropaServerConfig serverConfig)
{
StringBuffer buf = new StringBuffer();
/*
buf
.append("port").append(" ").append(serverConfig.GetPort()).append("\n")
.append("debug").append(" ").append(serverConfig.GetDebug()).append("\n")
.append("verbosity").append(" ").append(serverConfig.GetVerbosity()).append("\n")
.append("initial_state").append(" ").append(serverConfig.GetInitialStateFilename()).append("\n")
.append("model_paths").append(" ").append(serverConfig.GetModelPaths()).append("\n")
.append("server_host").append(" ").append(serverConfig.GetHost()).append("\n")
.append("server_host_local").append(" ").append(serverConfig.GetHostLocal()).append("\n")
.append("server_version").append(" ").append(serverConfig.GetVersion()).append("\n")
.append("planner_config").append(" ").append(serverConfig.GetPlannerConfigFilename()).append("\n")
.append("planner_elem").append(" ").append(serverConfig.GetPlannerConfigElement()).append("\n")
.append("server_timeout").append(" ").append(serverConfig.GetServerTimeout()).append("\n")
.append("log").append(" ").append(serverConfig.GetLogFile()).append("\n")
;
*/
buf.append(serverConfig.toString());
return buf.toString();
}
@Override
public void waitForServerShutdown()
{
if (remoteServerProcess_ == null)
return;
boolean serverExited=false;
int timeout=10; //seconds
long startTime=System.currentTimeMillis();
while (!serverExited && (elapsedTime(startTime) < (timeout*1000))) {
try {
remoteServerProcess_.exitValue();
serverExited=true;
logger.info("Stopped Remote DynamicEuropa Server running on port:"+serverConfig_.GetPort());
}
catch (IllegalThreadStateException e) {
Thread.yield();
}
catch (Exception e) {
logger.warn("Exception caught while shutting down DynamicEuropa server",e);
break;
}
}
if (!serverExited) {
logger.warn("DynamicEuropa server failed to stop after "+timeout+" seconds");
remoteServerProcess_.destroy();
}
remoteServerProcess_ = null;
}
protected long elapsedTime(long startInMillis)
{
return (System.currentTimeMillis()-startInMillis);
}
@Override
public EuropaServer getServer()
{
throw new RuntimeException("Can't get a handle on a remote server");
}
}
protected static class StreamDrain
implements Runnable
{
protected InputStream stream_;
private StreamDrain (InputStream s)
{
stream_ = s;
}
@Override
public void run()
{
try {
BufferedReader br = new BufferedReader(new InputStreamReader(stream_));
while ((br.readLine()) != null) {
// Do nothing
// we just want to drain the stream to prevent the child process from blocking
}
logger.debug("InputStream drainer finished");
} catch (Exception e) {
logger.error("Died while draining stderr & stdout for DynamicEuropa server process");
}
}
}
}
| |
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
public class UnbalancedBSTMap_baseTests {
@Test
// We can create UnbalancedBSTMaps with different key and value types!
public void testConstruction() {
// Only checking if this compiles!
// Please ignore warnings that these variables are not used
Map<String, String> mapStrStr = new UnbalancedBSTMap<String, String>();
Map<Integer, Object> mapIntOb = new UnbalancedBSTMap<Integer, Object>();
Map<Date, int[]> mapDateIntAr = new UnbalancedBSTMap<Date, int[]>();
}
// //////////////////////////////////////////////////////////////////
// *** Queries about the tree ***
// Methods: isEmpty, size, containsKey, containsValue, get
// //////////////////////////////////////////////////////////////////
/* *************** */
// isEmpty tests
/* *************** */
@Test
public void test_isEmpty(){
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertTrue(myMap.isEmpty());
}
/* *************** */
// size tests
/* *************** */
@Test
public void test_size0(){
// empty tree
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertEquals(0, myMap.size());
}
@Test
public void test_size1(){
// ..... 42 .....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertEquals(1, myMap.size());
}
@Test
public void test_size2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertEquals(2, myMap.size());
}
@Test
public void test_size2left(){
// Test tree:
// ....42....
// 26........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
assertEquals(2, myMap.size());
}
@Test
public void test_size3(){
// Test tree:
// ....42....
// 26......52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
assertEquals(3, myMap.size());
}
@Test
public void test_size4a(){
// Test tree:
// ......42......
// ..26......52..
// 18............
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(10), "Beth");
assertEquals(4, myMap.size());
}
@Test
public void test_size4b(){
// Test tree:
// ......42......
// ..26......52..
// ....30........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(30), "Stone");
assertEquals(4, myMap.size());
}
@Test
public void test_size4c(){
// Test tree:
// ......42......
// ..26......52..
// ........45....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(45), "Julie");
assertEquals(4, myMap.size());
}
@Test
public void test_size4d(){
// Test tree:
// ......42......
// ..26......52..
// ............60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
assertEquals(4, myMap.size());
}
@Test
public void test_size7(){
// Test tree:
// ......42......
// ..26......52..
// 18..30..45..60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
myMap.put(new Integer(18), "Beth");
myMap.put(new Integer(45), "Julie");
myMap.put(new Integer(30), "Stone");
assertEquals(7, myMap.size());
}
/* *************** */
// containsKey tests
/* *************** */
@Test
public void test_containsKey0(){
// empty tree
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertFalse(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertFalse(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertFalse(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey1(){
// ..... 42 .....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertFalse(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertFalse(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertFalse(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey2left(){
// Test tree:
// ....42....
// 26........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertFalse(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey3(){
// Test tree:
// ....42....
// 26......52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey4a(){
// Test tree:
// ......42......
// ..26......52..
// 18............
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(18), "Beth");
assertFalse(myMap.containsKey(new Integer(10)));
assertTrue(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey4b(){
// Test tree:
// ......42......
// ..26......52..
// ....30........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(30), "Stone");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertTrue(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey4c(){
// Test tree:
// ......42......
// ..26......52..
// ........45....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(45), "Julie");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertTrue(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertFalse(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey4d(){
// Test tree:
// ......42......
// ..26......52..
// ............60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
assertFalse(myMap.containsKey(new Integer(10)));
assertFalse(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertFalse(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertFalse(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertTrue(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
@Test
public void test_containsKey7(){
// Test tree:
// ......42......
// ..26......52..
// 18..30..45..60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
myMap.put(new Integer(18), "Beth");
myMap.put(new Integer(45), "Julie");
myMap.put(new Integer(30), "Stone");
assertFalse(myMap.containsKey(new Integer(10)));
assertTrue(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertTrue(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertTrue(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertTrue(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
/* *************** */
// containsValue tests
/* *************** */
@Test
public void test_containsValue0(){
// empty tree
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertFalse(myMap.containsValue("Dodds"));
assertFalse(myMap.containsValue("Ben"));
assertFalse(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue1(){
// ..... 42 .....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertTrue(myMap.containsValue("Dodds"));
assertFalse(myMap.containsValue("Ben"));
assertFalse(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertTrue(myMap.containsValue("Dodds"));
assertFalse(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue2left(){
// Test tree:
// ....42....
// 26........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertFalse(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue3(){
// Test tree:
// ....42....
// 26......52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue4a(){
// Test tree:
// ......42......
// ..26......52..
// 18............
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(18), "Beth");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertTrue(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue4b(){
// Test tree:
// ......42......
// ..26......52..
// ....30........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(30), "Stone");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertTrue(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue4c(){
// Test tree:
// ......42......
// ..26......52..
// ........45....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(45), "Julie");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertFalse(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertTrue(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue4d(){
// Test tree:
// ......42......
// ..26......52..
// ............60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertTrue(myMap.containsValue("Ran"));
assertFalse(myMap.containsValue("Beth"));
assertFalse(myMap.containsValue("Julie"));
assertFalse(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
@Test
public void test_containsValue7(){
// Test tree:
// ......42......
// ..26......52..
// 18..30..45..60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
myMap.put(new Integer(18), "Beth");
myMap.put(new Integer(45), "Julie");
myMap.put(new Integer(30), "Stone");
assertTrue(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
assertTrue(myMap.containsValue("Ran"));
assertTrue(myMap.containsValue("Beth"));
assertTrue(myMap.containsValue("Julie"));
assertTrue(myMap.containsValue("Stone"));
assertFalse(myMap.containsValue("Geoff"));
assertFalse(myMap.containsValue("Melissa"));
}
/* *************** */
// get tests
/* *************** */
@Test
public void test_get0(){
// empty tree
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertEquals(null, myMap.get(new Integer(42)));
}
@Test
public void test_get1(){
// ..... 42 .....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertEquals("Dodds", myMap.get(new Integer(42)));
}
@Test
public void test_get2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Colleen", myMap.get(new Integer(52)));
}
@Test
public void test_get2left(){
// Test tree:
// ....42....
// 26........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Dodds", myMap.get(new Integer(42)));
}
@Test
public void test_get3(){
// Test tree:
// ....42....
// 26......52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Colleen", myMap.get(new Integer(52)));
}
@Test
public void test_get4a(){
// Test tree:
// ......42......
// ..26......52..
// 18............
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(18), "Beth");
assertEquals("Beth", myMap.get(new Integer(18)));
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Colleen", myMap.get(new Integer(52)));
}
@Test
public void test_get4b(){
// Test tree:
// ......42......
// ..26......52..
// ....30........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(30), "Stone");
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Stone", myMap.get(new Integer(30)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Colleen", myMap.get(new Integer(52)));
}
@Test
public void test_get4c(){
// Test tree:
// ......42......
// ..26......52..
// ........45....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(45), "Julie");
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Julie", myMap.get(new Integer(45)));
assertEquals("Colleen", myMap.get(new Integer(52)));
}
@Test
public void test_get4d(){
// Test tree:
// ......42......
// ..26......52..
// ............60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Colleen", myMap.get(new Integer(52)));
assertEquals("Ran", myMap.get(new Integer(60)));
}
@Test
public void test_get7(){
// Test tree:
// ......42......
// ..26......52..
// 18..30..45..60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(60), "Ran");
myMap.put(new Integer(18), "Beth");
myMap.put(new Integer(45), "Julie");
myMap.put(new Integer(30), "Stone");
assertEquals("Beth", myMap.get(new Integer(18)));
assertEquals("Ben", myMap.get(new Integer(26)));
assertEquals("Stone", myMap.get(new Integer(30)));
assertEquals("Dodds", myMap.get(new Integer(42)));
assertEquals("Julie", myMap.get(new Integer(45)));
assertEquals("Colleen", myMap.get(new Integer(52)));
assertEquals("Ran", myMap.get(new Integer(60)));
}
// //////////////////////////////////////////////////////////////////
// *** Modifications to the tree ***
// Methods: clear, put, putAll
// //////////////////////////////////////////////////////////////////
/* *************** */
// clear tests
/* *************** */
@Test
public void test_clear0(){
// empty tree
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
assertEquals(0, myMap.size());
myMap.clear();
assertEquals(0, myMap.size());
}
@Test
public void test_clear1(){
// ..... 42 .....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertEquals(1, myMap.size());
myMap.clear();
assertEquals(0, myMap.size());
}
@Test
public void test_clear2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertEquals(2, myMap.size());
myMap.clear();
assertEquals(0, myMap.size());
}
/* *************** */
// put tests
/* *************** */
public void test_put_checkReturn(){
// Test tree:
// ......42......
// ..26......52..
// 18..30..45..60
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
String value = myMap.put(new Integer(42), "Dodds");
assertEquals("Dodds", value);
value = myMap.put(new Integer(26), "Ben");
assertEquals("Ben", value);
value = myMap.put(new Integer(52), "Colleen");
assertEquals("Colleen", value);
value = myMap.put(new Integer(60), "Ran");
assertEquals("Ran", value);
value = myMap.put(new Integer(18), "Beth");
assertEquals("Beth", value);
value = myMap.put(new Integer(45), "Julie");
assertEquals("Julie", value);
value = myMap.put(new Integer(30), "Stone");
assertEquals("Stone", value);
}
// Test put's replacement (i.e. adding a key that is already in the tree)
// Additionally, most tests rely on put
@Test
public void test_put_replacement1(){
// Test tree:
// ......42......
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
assertEquals("Dodds", myMap.get(new Integer(42)));
myMap.put(new Integer(42), "Beth");
// Replace "Dodds" with "Beth" as the value for 42
assertEquals("Beth", myMap.get(new Integer(42)));
assertFalse(myMap.containsValue("Dodds"));
assertTrue(myMap.containsValue("Beth"));
}
@Test
public void test_put_replacement2right(){
// Test tree:
// ....42....
// ........52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(52), "Colleen");
assertEquals("Colleen", myMap.get(new Integer(52)));
myMap.put(new Integer(52), "Julie");
// Replace "Colleen" with "Julie" as the value for 52
assertEquals("Julie", myMap.get(new Integer(52)));
assertFalse(myMap.containsValue("Colleen"));
assertTrue(myMap.containsValue("Julie"));
}
@Test
public void test_put_replacement2left(){
// Test tree:
// ....42....
// 26........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
assertEquals("Ben", myMap.get(new Integer(26)));
myMap.put(new Integer(26), "Colleen");
// Replace "Ben" with "Colleen" as the value for 26
assertEquals("Colleen", myMap.get(new Integer(26)));
assertFalse(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
}
@Test
public void test_put_replacement3(){
// Test tree:
// ....42....
// 26......52
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Beth");
assertEquals("Ben", myMap.get(new Integer(26)));
myMap.put(new Integer(26), "Colleen");
// Replace "Ben" with "Colleen" as the value for 26
assertEquals("Colleen", myMap.get(new Integer(26)));
assertFalse(myMap.containsValue("Ben"));
assertTrue(myMap.containsValue("Colleen"));
}
@Test
public void test_put_replacement4a(){
// Test tree:
// ......42......
// ..26......52..
// 18............
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(18), "Beth");
assertEquals("Beth", myMap.get(new Integer(18)));
myMap.put(new Integer(18), "Julie");
// Replace "Beth" with "Julie" as the value for 18
assertEquals("Julie", myMap.get(new Integer(18)));
assertFalse(myMap.containsValue("Beth"));
assertTrue(myMap.containsValue("Julie"));
}
@Test
public void test_put_replacement4b(){
// Test tree:
// ......42......
// ..26......52..
// ....30........
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(30), "Stone");
assertEquals("Stone", myMap.get(new Integer(30)));
myMap.put(new Integer(30), "Beth");
// Replace "Stone" with "Beth" as the value for 30
assertEquals("Beth", myMap.get(new Integer(30)));
assertFalse(myMap.containsValue("Stone"));
assertTrue(myMap.containsValue("Beth"));
}
@Test
public void test_put_replacement4c(){
// Test tree:
// ......42......
// ..26......52..
// ........45....
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.put(new Integer(42), "Dodds");
myMap.put(new Integer(26), "Ben");
myMap.put(new Integer(52), "Colleen");
myMap.put(new Integer(45), "Julie");
assertEquals("Colleen", myMap.get(new Integer(52)));
myMap.put(new Integer(52), "Julie");
// Replace "Colleen" with "Julie" as the value for 52
assertEquals("Julie", myMap.get(new Integer(52)));
assertFalse(myMap.containsValue("Colleen"));
assertTrue(myMap.containsValue("Julie"));
}
/* *************** */
// putAll tests
/* *************** */
@Test
public void test_putAll7(){
// Test tree:
// contains: 18, 26, 30, 42, 45, 52, 60
// We don't have a guarantee of the structure when using putAll
Map<Integer, String> inputMap = new HashMap<Integer, String>();
inputMap.put(new Integer(42), "Dodds");
inputMap.put(new Integer(26), "Ben");
inputMap.put(new Integer(52), "Colleen");
inputMap.put(new Integer(60), "Ran");
inputMap.put(new Integer(18), "Beth");
inputMap.put(new Integer(45), "Julie");
inputMap.put(new Integer(30), "Stone");
UnbalancedBSTMap<Integer, String> myMap = new UnbalancedBSTMap<Integer, String>();
myMap.putAll(inputMap);
assertFalse(myMap.containsKey(new Integer(10)));
assertTrue(myMap.containsKey(new Integer(18)));
assertTrue(myMap.containsKey(new Integer(26)));
assertTrue(myMap.containsKey(new Integer(30)));
assertTrue(myMap.containsKey(new Integer(42)));
assertTrue(myMap.containsKey(new Integer(45)));
assertTrue(myMap.containsKey(new Integer(52)));
assertTrue(myMap.containsKey(new Integer(60)));
assertFalse(myMap.containsKey(new Integer(90)));
}
}
| |
package com.jamesg.forecastr.SpotFragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
import com.jamesg.forecastr.ForecastrApplication;
import com.jamesg.forecastr.R;
import com.jamesg.forecastr.base.BaseSpotFragment;
import com.jamesg.forecastr.cards.HeaderCard;
import com.jamesg.forecastr.cards.LoadingCard;
import com.jamesg.forecastr.cards.MapCard;
import com.jamesg.forecastr.cards.SunCard;
import com.jamesg.forecastr.cards.SwellCard;
import com.jamesg.forecastr.cards.TideCard;
import com.jamesg.forecastr.cards.WeatherCard;
import com.jamesg.forecastr.cards.WindCard;
import com.jamesg.forecastr.data.Spot;
import com.jamesg.forecastr.data.SpotSearchedEvent;
import com.jamesg.forecastr.data.SpotUpdatedEvent;
import com.jamesg.forecastr.manager.SpotManager;
import com.jamesg.forecastr.utils.Logger;
import com.squareup.otto.Subscribe;
import javax.inject.Inject;
public class SpotFragment extends BaseSpotFragment {
@Inject
SpotManager spotManager;
@Inject
Tracker tracker;
private static final String SPOT_NAME = "Spot_Name";
private static final String SPOT_SEARCHED = "Spot_Searched";
// TODO: Rename and change types of parameters
private String spotName;
private boolean search;
WindCard windCard;
HeaderCard headerCard;
WeatherCard weatherCard;
SwellCard swellCard;
TideCard tideCard;
SunCard sunCard;
MapCard mapCard;
LoadingCard loadingCard;
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param name Spot Name.
* @return A new instance of fragment SpotFragment.
*/
// TODO: Rename and change types and number of parameters
public static SpotFragment newInstance(String name, boolean search) {
SpotFragment fragment = new SpotFragment();
Bundle args = new Bundle();
args.putString(SPOT_NAME, name);
args.putBoolean(SPOT_SEARCHED, search);
fragment.setArguments(args);
return fragment;
}
public SpotFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
((ForecastrApplication) getActivity().getApplication()).inject(this);
super.onCreate(savedInstanceState);
if (getArguments() != null) {
spotName = getArguments().getString(SPOT_NAME);
search = getArguments().getBoolean(SPOT_SEARCHED);
}
// Send a screen view.
tracker.setScreenName("Spot");
tracker.send(new HitBuilders.EventBuilder()
.setCategory("Viewed")
.setAction(spotName)
.build());
int dateTab = 0;
if (this.mListener != null) {
dateTab = mListener.getDateTab();
}
Spot spot = spotManager.getSpot(spotName);
if (spot != null) {
getActivity().setTitle(spot.getName());
headerCard = new HeaderCard(getActivity(), spot, dateTab);
if (!search) {
windCard = new WindCard(getActivity(), spot, dateTab);
weatherCard = new WeatherCard(getActivity(), spot, dateTab);
if (spot.hasSwell()) {
swellCard = new SwellCard(getActivity(), spot, dateTab);
tideCard = new TideCard(getActivity(), spot, dateTab);
}
if(spot.hasSunData()) {
sunCard = new SunCard(getActivity(), spot, dateTab);
}
mapCard = new MapCard(getActivity(), spot, dateTab, getActivity().getSupportFragmentManager());
} else {
loadingCard = new LoadingCard(getActivity());
spotManager.getDataForSpot(spot);
}
}
}
@Subscribe
public void getMessage(String s) {
Logger.d("BUS MESSAGE baseSpotFragment - " + s);
if (s.equals("Update Finished")) {
try {
updateFinished();
} catch (Exception e) {
//DO NOTHING
}
}else if(s.equals("Update Started")){
try {
updateStarted();
}catch(Exception e) {
//DO NOTHING
}
}
}
@Subscribe
public void onSpotSearched(SpotSearchedEvent s) {
Logger.d("BUS SPOT SEARCHED baseSpotFragment - " + s.getName());
if (s.getName().equals(spotName)) {
spotUpdated(s.getSpot());
}
}
@Subscribe
public void onSpotUpdated(SpotUpdatedEvent s) {
Logger.d("BUS SPOT UPDATED baseSpotFragment - " + s.getName());
if (s.getName().equals(spotName)) {
updateSpotData();
}
}
public void spotUpdated(Spot spot) {
spotManager.searchSpot(spot);
headerCard.setSearch(true);
updateCards(spot);
}
public void updateCards(Spot spot){
int dateTab = 0;
if(this.mListener != null){
dateTab = mListener.getDateTab();
}
windCard = new WindCard(getActivity(), spot, dateTab);
weatherCard = new WeatherCard(getActivity(), spot, dateTab);
if (spot.hasSwell()) {
swellCard = new SwellCard(getActivity(), spot, dateTab);
tideCard = new TideCard(getActivity(), spot, dateTab);
}
if(spot.hasSunData()) {
sunCard = new SunCard(getActivity(), spot, dateTab);
}
mapCard = new MapCard(getActivity(), spot, dateTab, getActivity().getSupportFragmentManager());
LinearLayout content_body = (LinearLayout) getActivity().findViewById(R.id.content_body);
content_body.removeAllViews();
LayoutInflater inflater = LayoutInflater.from(getActivity());
if(headerCard != null) content_body.addView(headerCard.getView(inflater));
if(windCard != null) content_body.addView(windCard.getView(inflater));
if(weatherCard != null) content_body.addView(weatherCard.getView(inflater));
if(swellCard != null) content_body.addView(swellCard.getView(inflater));
if(tideCard != null) content_body.addView(tideCard.getView(inflater));
if(sunCard != null) content_body.addView(sunCard.getView(inflater));
if(mapCard != null) content_body.addView(mapCard.getView(inflater));
}
@Override
public void onCreateSpotsView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState, View view) {
LinearLayout content_body = (LinearLayout) view.findViewById(R.id.content_body);
if(headerCard != null) content_body.addView(headerCard.getView(inflater));
if(loadingCard != null) content_body.addView(loadingCard.getView(inflater));
if(windCard != null) content_body.addView(windCard.getView(inflater));
if(weatherCard != null) content_body.addView(weatherCard.getView(inflater));
if(swellCard != null) content_body.addView(swellCard.getView(inflater));
if(tideCard != null) content_body.addView(tideCard.getView(inflater));
if(sunCard != null) content_body.addView(sunCard.getView(inflater));
if(mapCard != null) content_body.addView(mapCard.getView(inflater));
}
@Override
public void updateDateTab(int newDateTab){
if(headerCard != null) headerCard.updateView(newDateTab);
if(windCard != null) windCard.updateView(newDateTab);
if(weatherCard != null) weatherCard.updateView(newDateTab);
if(swellCard != null) swellCard.updateView(newDateTab);
if(tideCard != null) tideCard.updateView(newDateTab);
if(sunCard != null) sunCard.updateView(newDateTab);
if(mapCard != null) mapCard.updateView(newDateTab);
}
@Override
public void updateSpotData(){
if(headerCard != null) headerCard.updateView();
if(windCard != null) windCard.updateView();
if(weatherCard != null) weatherCard.updateView();
if(swellCard != null) swellCard.updateView();
if(tideCard != null) tideCard.updateView();
if(sunCard != null) sunCard.updateView();
if(mapCard != null) mapCard.updateView();
}
}
| |
/**
*/
package substationStandard.LNNodes.LNGroupZ.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import substationStandard.Dataclasses.ASG;
import substationStandard.Dataclasses.INS;
import substationStandard.LNNodes.LNGroupZ.LNGroupZPackage;
import substationStandard.LNNodes.LNGroupZ.ZTCF;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>ZTCF</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link substationStandard.LNNodes.LNGroupZ.impl.ZTCFImpl#getOpTmh <em>Op Tmh</em>}</li>
* <li>{@link substationStandard.LNNodes.LNGroupZ.impl.ZTCFImpl#getPwrFrq <em>Pwr Frq</em>}</li>
* </ul>
*
* @generated
*/
public class ZTCFImpl extends GroupZImpl implements ZTCF {
/**
* The cached value of the '{@link #getOpTmh() <em>Op Tmh</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOpTmh()
* @generated
* @ordered
*/
protected INS opTmh;
/**
* The cached value of the '{@link #getPwrFrq() <em>Pwr Frq</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPwrFrq()
* @generated
* @ordered
*/
protected ASG pwrFrq;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ZTCFImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return LNGroupZPackage.Literals.ZTCF;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public INS getOpTmh() {
if (opTmh != null && opTmh.eIsProxy()) {
InternalEObject oldOpTmh = (InternalEObject)opTmh;
opTmh = (INS)eResolveProxy(oldOpTmh);
if (opTmh != oldOpTmh) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupZPackage.ZTCF__OP_TMH, oldOpTmh, opTmh));
}
}
return opTmh;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public INS basicGetOpTmh() {
return opTmh;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOpTmh(INS newOpTmh) {
INS oldOpTmh = opTmh;
opTmh = newOpTmh;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupZPackage.ZTCF__OP_TMH, oldOpTmh, opTmh));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG getPwrFrq() {
if (pwrFrq != null && pwrFrq.eIsProxy()) {
InternalEObject oldPwrFrq = (InternalEObject)pwrFrq;
pwrFrq = (ASG)eResolveProxy(oldPwrFrq);
if (pwrFrq != oldPwrFrq) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupZPackage.ZTCF__PWR_FRQ, oldPwrFrq, pwrFrq));
}
}
return pwrFrq;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG basicGetPwrFrq() {
return pwrFrq;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPwrFrq(ASG newPwrFrq) {
ASG oldPwrFrq = pwrFrq;
pwrFrq = newPwrFrq;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupZPackage.ZTCF__PWR_FRQ, oldPwrFrq, pwrFrq));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case LNGroupZPackage.ZTCF__OP_TMH:
if (resolve) return getOpTmh();
return basicGetOpTmh();
case LNGroupZPackage.ZTCF__PWR_FRQ:
if (resolve) return getPwrFrq();
return basicGetPwrFrq();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case LNGroupZPackage.ZTCF__OP_TMH:
setOpTmh((INS)newValue);
return;
case LNGroupZPackage.ZTCF__PWR_FRQ:
setPwrFrq((ASG)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case LNGroupZPackage.ZTCF__OP_TMH:
setOpTmh((INS)null);
return;
case LNGroupZPackage.ZTCF__PWR_FRQ:
setPwrFrq((ASG)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case LNGroupZPackage.ZTCF__OP_TMH:
return opTmh != null;
case LNGroupZPackage.ZTCF__PWR_FRQ:
return pwrFrq != null;
}
return super.eIsSet(featureID);
}
} //ZTCFImpl
| |
/*
* Copyright (c) 2015 Layer. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.layer.atlas;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.layer.atlas.Atlas.Participant;
import com.layer.atlas.Atlas.ParticipantProvider;
import com.layer.atlas.Atlas.Tools;
/**
* @author Oleg Orlov
* @since 27 Apr 2015
*/
public class AtlasParticipantPicker extends FrameLayout {
private static final String TAG = AtlasParticipantPicker.class.getSimpleName();
private static final boolean debug = false;
// participants picker
private View rootView;
private EditText textFilter;
private ListView participantsList;
private ViewGroup selectedParticipantsContainer;
private ParticipantProvider participantProvider;
private TreeSet<String> skipUserIds = new TreeSet<String>();
private final Map<String, Participant> filteredParticipants = new HashMap<String, Participant>();
private ArrayList<String> selectedParticipantIds = new ArrayList<String>();
private final ArrayList<ParticipantEntry> participantsForAdapter = new ArrayList<ParticipantEntry>();
private BaseAdapter participantsAdapter;
// styles
private int inputTextColor;
private Typeface inputTextTypeface;
private int inputTextStyle;
private int listTextColor;
private Typeface listTextTypeface;
private int listTextStyle;
private int chipBackgroundColor;
private int chipTextColor;
private Typeface chipTextTypeface;
private int chipTextStyle;
private Bitmap maskSingleBmp = Bitmap.createBitmap((int)Tools.getPxFromDp(32, getContext()), (int)Tools.getPxFromDp(32, getContext()), Config.ARGB_8888);
private Paint avatarPaint = new Paint();
private Paint maskPaint = new Paint();
private int avatarBackgroundColor;
private Bitmap maskSmallBmp = Bitmap.createBitmap((int)Tools.getPxFromDp(24, getContext()), (int)Tools.getPxFromDp(24, getContext()), Config.ARGB_8888);
public AtlasParticipantPicker(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
parseStyle(context, attrs, defStyle);
setupPaints();
}
public AtlasParticipantPicker(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AtlasParticipantPicker(Context context) {
super(context);
setupPaints();
}
public void init(String[] userIdToSkip, ParticipantProvider participantProvider) {
if (participantProvider == null) throw new IllegalArgumentException("ParticipantProvider cannot be null");
if (participantsList != null) throw new IllegalStateException("AtlasParticipantPicker is already initialized!");
LayoutInflater.from(getContext()).inflate(R.layout.atlas_participants_picker, this);
this.participantProvider = participantProvider;
if (userIdToSkip != null) skipUserIds.addAll(Arrays.asList(userIdToSkip));
// START OF -------------------- Participant Picker ----------------------------------------
this.rootView = this;
textFilter = (EditText) rootView.findViewById(R.id.atlas_participants_picker_text);
participantsList = (ListView) rootView.findViewById(R.id.atlas_participants_picker_list);
selectedParticipantsContainer = (ViewGroup) rootView.findViewById(R.id.atlas_participants_picker_names);
if (rootView.getVisibility() == View.VISIBLE) {
textFilter.requestFocus();
}
// log focuses
final View scroller = rootView.findViewById(R.id.atlas_participants_picker_scroll);
scroller.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (debug) Log.w(TAG, "scroller.onFocusChange() hasFocus: " + hasFocus);
}
});
selectedParticipantsContainer.setOnFocusChangeListener(new OnFocusChangeListener() {
public void onFocusChange(View v, boolean hasFocus) {
if (debug) Log.w(TAG, "names.onFocusChange() hasFocus: " + hasFocus);
}
});
// If filter.requestFocus is called from .onClickListener - filter receives focus, but
// NamesLayout receives it immediately after that. So filter lose it.
// XXX: scroller also receives focus
selectedParticipantsContainer.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (debug) Log.w(TAG, "names.onTouch() event: " + event);
if (event.getAction() == MotionEvent.ACTION_DOWN) // ACTION_UP never comes if
textFilter.requestFocus(); // there is no .onClickListener
return false;
}
});
textFilter.setOnFocusChangeListener(new OnFocusChangeListener() {
public void onFocusChange(View v, boolean hasFocus) {
View focused = selectedParticipantsContainer.hasFocus() ? selectedParticipantsContainer : selectedParticipantsContainer.findFocus();
if (debug) Log.w(TAG, "filter.onFocusChange() hasFocus: " + hasFocus + ", focused: " + focused);
if (hasFocus) {
participantsList.setVisibility(View.VISIBLE);
}
v.post(new Runnable() { // check focus runnable
@Override
public void run() {
if (debug) Log.w(TAG, "filter.onFocusChange.run() filter.focus: " + textFilter.hasFocus());
if (debug) Log.w(TAG, "filter.onFocusChange.run() names.focus: " + selectedParticipantsContainer.hasFocus());
if (debug) Log.w(TAG, "filter.onFocusChange.run() scroller.focus: " + scroller.hasFocus());
// check focus is on any descendants and hide list otherwise
View focused = selectedParticipantsContainer.hasFocus() ? selectedParticipantsContainer : selectedParticipantsContainer.findFocus();
if (focused == null) {
participantsList.setVisibility(View.GONE);
textFilter.setText("");
}
}
});
}
});
participantsList.setAdapter(participantsAdapter = new BaseAdapter() {
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(parent.getContext()).inflate(R.layout.atlas_view_participants_picker_convert, parent, false);
}
TextView name = (TextView) convertView.findViewById(R.id.atlas_view_participants_picker_convert_name);
TextView avatarText = (TextView) convertView.findViewById(R.id.atlas_view_participants_picker_convert_ava);
ImageView avatarImgView = (ImageView) convertView.findViewById(R.id.atlas_view_participants_picker_convert_avatar_img);
Bitmap avatarBmp = null;
if (avatarImgView.getDrawable() instanceof BitmapDrawable){
BitmapDrawable bitmapDrawable = (BitmapDrawable) avatarImgView.getDrawable();
avatarBmp = bitmapDrawable.getBitmap();
} else {
avatarBmp = Bitmap.createBitmap(maskSingleBmp.getWidth(), maskSingleBmp.getHeight(), Config.ARGB_8888);
}
Canvas avatarCanvas = new Canvas(avatarBmp);
avatarCanvas.drawColor(avatarBackgroundColor, Mode.CLEAR);
avatarCanvas.drawColor(avatarBackgroundColor);
ParticipantEntry entry = participantsForAdapter.get(position);
if (entry != null) {
name.setText(Atlas.getFullName(entry.participant));
Drawable avatarDrawable = entry.participant.getAvatarDrawable();
if (entry.participant != null && avatarDrawable != null) {
avatarDrawable.setBounds(0, 0, avatarBmp.getWidth(), avatarBmp.getHeight());
avatarDrawable.draw(avatarCanvas);
avatarText.setVisibility(View.INVISIBLE);
} else {
avatarText.setVisibility(View.VISIBLE);
avatarText.setText(Atlas.getInitials(entry.participant));
}
} else {
name.setText("Unknown user");
avatarText.setText("?");
}
avatarCanvas.drawBitmap(maskSingleBmp, 0, 0, maskPaint);
avatarImgView.setImageBitmap(avatarBmp);
// apply styles
name.setTextColor(listTextColor);
name.setTypeface(listTextTypeface, listTextStyle);
avatarText.setTextColor(listTextColor);
avatarText.setTypeface(listTextTypeface, listTextStyle);
return convertView;
}
public long getItemId(int position) {
return participantsForAdapter.get(position).id.hashCode();
}
public Object getItem(int position) {
return participantsForAdapter.get(position);
}
public int getCount() {
return participantsForAdapter.size();
}
});
participantsList.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
ParticipantEntry entry = participantsForAdapter.get(position);
selectedParticipantIds.add(entry.id);
refreshParticipants(selectedParticipantIds);
textFilter.setText("");
textFilter.requestFocus();
filterParticipants(""); // refresh participantList
}
});
// track text and filter participant list
textFilter.addTextChangedListener(new TextWatcher() {
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
if (debug) Log.w(TAG, "beforeTextChanged() s: " + s + " start: " + start + " count: " + count + " after: " + after);
}
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (debug) Log.w(TAG, "onTextChanged() s: " + s + " start: " + start + " before: " + before + " count: " + count);
final String filter = s.toString().toLowerCase();
filterParticipants(filter);
}
public void afterTextChanged(Editable s) {
if (debug) Log.w(TAG, "afterTextChanged() s: " + s);
}
});
// select last added participant when press "Backspace/Del"
textFilter.setOnKeyListener(new OnKeyListener() {
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (debug) Log.w(TAG, "onKey() keyCode: " + keyCode + ", event: " + event);
if (keyCode == KeyEvent.KEYCODE_DEL && event.getAction() == KeyEvent.ACTION_DOWN && textFilter.getText().length() == 0 && selectedParticipantIds.size() > 0) {
selectedParticipantIds.remove(selectedParticipantIds.size() - 1);
refreshParticipants(selectedParticipantIds);
filterParticipants("");
textFilter.requestFocus();
}
return false;
}
});
// END OF ---------------------- Participant Picker ----------------------------------------
filterParticipants("");
applyStyle();
}
private void refreshParticipants(final ArrayList<String> selectedParticipantIds) {
// remove name_converts first. Better to keep editText in place rather than add/remove that force keyboard to blink
for (int i = selectedParticipantsContainer.getChildCount() - 1; i >= 0; i--) {
View child = selectedParticipantsContainer.getChildAt(i);
if (child != textFilter) {
selectedParticipantsContainer.removeView(child);
}
}
if (debug) Log.w(TAG, "refreshParticipants() childs left: " + selectedParticipantsContainer.getChildCount());
for (String id : selectedParticipantIds) {
Participant participant = participantProvider.getParticipant(id);
View participantView = LayoutInflater.from(selectedParticipantsContainer.getContext()).inflate(R.layout.atlas_view_participants_picker_name_convert, selectedParticipantsContainer, false);
TextView avaText = (TextView) participantView.findViewById(R.id.atlas_view_participants_picker_name_convert_ava);
// ImageView avatarImgView = (ImageView) participantView.findViewById(R.id.atlas_view_participants_picker_name_convert_avatar_img);
// Bitmap avatarBmp = Bitmap.createBitmap(maskSmallBmp.getWidth(), maskSmallBmp.getHeight(), Config.ARGB_8888);
// Canvas avatarCanvas = new Canvas(avatarBmp);
// avatarCanvas.drawColor(R.color.atlas_shape_avatar_gray);
// if (participant.getAvatarDrawable() != null) {
// participant.getAvatarDrawable().setBounds(0, 0, avatarBmp.getWidth(), avatarBmp.getHeight());
// participant.getAvatarDrawable().draw(avatarCanvas);
// avaText.setVisibility(View.INVISIBLE);
// } else {
avaText.setText(Atlas.getInitials(participant));
// }
// avatarCanvas.drawBitmap(maskSmallBmp, 0, 0, maskPaint);
// avatarImgView.setImageBitmap(avatarBmp);
TextView nameText = (TextView) participantView.findViewById(R.id.atlas_view_participants_picker_name_convert_name);
nameText.setText(Atlas.getFullName(participant));
participantView.setTag(participant);
selectedParticipantsContainer.addView(participantView, selectedParticipantsContainer.getChildCount() - 1);
if (debug) Log.w(TAG, "refreshParticipants() child added: " + participantView + ", for: " + participant);
// apply styles
avaText.setTextColor(chipTextColor);
avaText.setTypeface(chipTextTypeface, chipTextStyle);
nameText.setTextColor(chipTextColor);
nameText.setTypeface(chipTextTypeface, chipTextStyle);
View container = participantView.findViewById(R.id.atlas_view_participants_picker_name_convert);
GradientDrawable drawable = (GradientDrawable) container.getBackground();
drawable.setColor(chipBackgroundColor);
}
if (selectedParticipantIds.size() == 0) {
LayoutParams params = new LayoutParams(textFilter.getLayoutParams());
params.width = LayoutParams.MATCH_PARENT;
}
selectedParticipantsContainer.requestLayout();
}
private void filterParticipants(final String filter) {
filteredParticipants.clear();
participantProvider.getParticipants(filter, filteredParticipants);
if (debug) Log.w(TAG, "filterParticipants() filtered: " + filteredParticipants.size() + ", filter: " + filter);
participantsForAdapter.clear();
for (Map.Entry<String, Participant> entry : filteredParticipants.entrySet()) {
if (selectedParticipantIds.contains(entry.getKey())) continue;
if (skipUserIds.contains(entry.getKey())) continue;
participantsForAdapter.add(new ParticipantEntry(entry.getValue(), entry.getKey()));
}
Collections.sort(participantsForAdapter, new ParticipantEntryFilteringComparator(filter));
if (debug) Log.w(TAG, "filterParticipants() participants to show: " + participantsForAdapter.size());
participantsAdapter.notifyDataSetChanged();
}
private void setupPaints() {
avatarPaint.setAntiAlias(true);
avatarPaint.setDither(true);
maskPaint.setAntiAlias(true);
maskPaint.setDither(true);
maskPaint.setXfermode(new PorterDuffXfermode(Mode.DST_IN));
Paint paintCircle = new Paint();
paintCircle.setStyle(Style.FILL_AND_STROKE);
paintCircle.setColor(Color.CYAN);
paintCircle.setAntiAlias(true);
Canvas maskSingleCanvas = new Canvas(maskSingleBmp);
maskSingleCanvas.drawCircle(0.5f * maskSingleBmp.getWidth(), 0.5f * maskSingleBmp.getHeight(), 0.5f * maskSingleBmp.getWidth(), paintCircle);
avatarBackgroundColor = getResources().getColor(R.color.atlas_shape_avatar_gray);
}
private void parseStyle(Context context, AttributeSet attrs, int defStyle) {
TypedArray ta = context.getTheme().obtainStyledAttributes(attrs, R.styleable.AtlasParticipantPicker, R.attr.AtlasParticipantPicker, defStyle);
this.inputTextColor = ta.getColor(R.styleable.AtlasParticipantPicker_inputTextColor, context.getResources().getColor(R.color.atlas_text_black));
this.inputTextStyle = ta.getInt(R.styleable.AtlasParticipantPicker_inputTextStyle, Typeface.NORMAL);
String inputTextTypefaceName = ta.getString(R.styleable.AtlasParticipantPicker_inputTextTypeface);
this.inputTextTypeface = inputTextTypefaceName != null ? Typeface.create(inputTextTypefaceName, inputTextStyle) : null;
this.listTextColor = ta.getColor(R.styleable.AtlasParticipantPicker_listTextColor, context.getResources().getColor(R.color.atlas_text_black));
this.listTextStyle = ta.getInt(R.styleable.AtlasParticipantPicker_listTextStyle, Typeface.NORMAL);
String listTextTypefaceName = ta.getString(R.styleable.AtlasParticipantPicker_listTextTypeface);
this.listTextTypeface = listTextTypefaceName != null ? Typeface.create(listTextTypefaceName, inputTextStyle) : null;
this.chipBackgroundColor = ta.getColor(R.styleable.AtlasParticipantPicker_chipBackgroundColor, context.getResources().getColor(R.color.atlas_background_gray));
this.chipTextColor = ta.getColor(R.styleable.AtlasParticipantPicker_chipTextColor, context.getResources().getColor(R.color.atlas_text_black));
this.chipTextStyle = ta.getInt(R.styleable.AtlasParticipantPicker_chipTextStyle, Typeface.NORMAL);
String chipTextTypefaceName = ta.getString(R.styleable.AtlasParticipantPicker_chipTextTypeface);
this.chipTextTypeface = chipTextTypefaceName != null ? Typeface.create(chipTextTypefaceName, inputTextStyle) : null;
ta.recycle();
}
private void applyStyle() {
refreshParticipants(selectedParticipantIds);
participantsAdapter.notifyDataSetChanged();
textFilter.setTextColor(inputTextColor);
textFilter.setTypeface(inputTextTypeface, inputTextStyle);
}
public String[] getSelectedUserIds() {
String[] userIds = new String[selectedParticipantIds.size()];
int i = 0;
for (String id : selectedParticipantIds) {
userIds[i++] = id;
}
return userIds;
}
public void setVisibility(int visibility) {
super.setVisibility(visibility);
if (visibility == View.VISIBLE) {
textFilter.requestFocus();
}
}
private static final class FilteringComparator implements Comparator<Participant> {
private final String filter;
/**
* @param filter - the less indexOf(filter) the less order of participant
*/
public FilteringComparator(String filter) {
this.filter = filter;
}
@Override
public int compare(Participant lhs, Participant rhs) {
int result = subCompareCaseInsensitive(lhs.getFirstName(), rhs.getFirstName());
if (result != 0) return result;
return subCompareCaseInsensitive(lhs.getLastName(), rhs.getLastName());
}
private int subCompareCaseInsensitive(String lhs, String rhs) {
int left = lhs != null ? lhs.toLowerCase().indexOf(filter) : -1;
int right = rhs != null ? rhs.toLowerCase().indexOf(filter) : -1;
if (left == -1 && right == -1) return 0;
if (left != -1 && right == -1) return -1;
if (left == -1 && right != -1) return 1;
if (left - right != 0) return left - right;
return String.CASE_INSENSITIVE_ORDER.compare(lhs, rhs);
}
}
private static final class ParticipantEntryFilteringComparator implements Comparator<ParticipantEntry> {
FilteringComparator comparator;
public ParticipantEntryFilteringComparator(String filter) {
this.comparator = new FilteringComparator(filter);
}
@Override
public int compare(ParticipantEntry lhs, ParticipantEntry rhs) {
return comparator.compare(lhs.participant, rhs.participant);
}
}
private static class ParticipantEntry {
final Participant participant;
final String id;
public ParticipantEntry(Participant participant, String id) {
if (participant == null) throw new IllegalArgumentException("Participant cannot be null");
if (id == null) throw new IllegalArgumentException("ID cannot be null");
this.participant = participant;
this.id = id;
}
}
}
| |
package com.caterpillar.launcher;
/* @author kaivnlp@gmail.com
* */
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.Font;
import java.awt.Image;
import java.awt.SystemColor;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.AbstractAction;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JProgressBar;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import javax.swing.UIManager;
import javax.swing.border.EtchedBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import com.caterpillar.dataobject.FileData;
import com.caterpillar.parsers.FileTypeChecker;
import com.caterpillar.ui.About;
import com.caterpillar.ui.CaterpillarTableModel;
public class CaterPillar implements ActionListener, PropertyChangeListener {
private JFrame frmChronolog;
private JTextField inputFolder;
private JTextField outputFolder;
private JTable table;
private Object[][] tbldata;
private CaterpillarTableModel tblmodel;
private JProgressBar progressBar;
private static Logger logger = Logger
.getLogger("com.caterpillar.launcher.CaterPillar");
/**
* Launch the application.
*/
public static void main(String[] args) {
try {
UIManager.setLookAndFeel("javax.swing.plaf.metal.MetalLookAndFeel");
LoggerCat.initLogger();
logger.setLevel(Level.ALL);
logger.addHandler(LoggerCat.HANDLER);
} catch (ClassNotFoundException e1) {
// TODO Auto-generated catch block
logger.log(Level.SEVERE, e1.getMessage());
} catch (Exception e1) {
logger.log(Level.SEVERE, e1.getMessage());
}
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
CaterPillar window = new CaterPillar();
window.frmChronolog.setVisible(true);
window.frmChronolog.setLocationRelativeTo(null);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public CaterPillar() {
initialize();
}
/**
* Initialise the contents of the frame.
*/
private void initialize() {
logger.log(Level.INFO, "Initializing UI");
frmChronolog = new JFrame();
frmChronolog.setTitle("Chrono-Log");
frmChronolog.getContentPane().setBackground(new Color(192, 192, 192));
frmChronolog.setResizable(false);
URL url = CaterPillar.class.getResource("/caterpillar.jpg");
Image image = Toolkit.getDefaultToolkit().getImage(url);
frmChronolog.setIconImage(image);
frmChronolog.setBounds(100, 100, 950, 433);
frmChronolog.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frmChronolog.getContentPane().setLayout(null);
/*
* Menubar constructions
*/
JMenuBar menuBar = new JMenuBar();
menuBar.setBackground(new Color(192, 192, 192));
menuBar.setBounds(0, 0, 932, 21);
frmChronolog.getContentPane().add(menuBar);
JMenu mnNewMenu = new JMenu("File");
menuBar.add(mnNewMenu);
JMenuItem mntmNewMenuItem = new JMenuItem("Close");
mntmNewMenuItem.setBackground(new Color(192, 192, 192));
mntmNewMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
frmChronolog.dispose();
}
});
mnNewMenu.add(mntmNewMenuItem);
JMenu mnAbout = new JMenu("About");
mnAbout.setBackground(new Color(192, 192, 192));
menuBar.add(mnAbout);
JMenuItem mntmAboutCaterpillar = new JMenuItem("About caterpillar");
mntmAboutCaterpillar.setBackground(new Color(192, 192, 192));
mntmAboutCaterpillar.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
About awindow = new About();
awindow.setLocationRelativeTo(frmChronolog);
awindow.setVisible(true);
// frmChronolog.dispose();
}
});
mnAbout.add(mntmAboutCaterpillar);
final JPanel panel = new JPanel();
panel.setBackground(new Color(192, 192, 192));
panel.setBounds(0, 21, 942, 355);
frmChronolog.getContentPane().add(panel);
panel.setLayout(null);
final JLabel lblChooseFilesFrom = new JLabel("Choose Application Logs");
lblChooseFilesFrom.setBounds(10, 11, 145, 14);
panel.add(lblChooseFilesFrom);
final JButton browseBtn = new JButton("Browse");
browseBtn.setHorizontalTextPosition(SwingConstants.LEFT);
browseBtn.setFont(new Font("Tahoma", Font.PLAIN, 9));
browseBtn.setBounds(385, 9, 65, 19);
panel.add(browseBtn);
inputFolder = new JTextField();
inputFolder.setBackground(new Color(250, 250, 210));
inputFolder.setBounds(180, 8, 205, 20);
panel.add(inputFolder);
inputFolder.setColumns(10);
JLabel lblSelectOutputFile = new JLabel("Select Output File Name");
lblSelectOutputFile.setBounds(10, 281, 159, 14);
panel.add(lblSelectOutputFile);
outputFolder = new JTextField();
outputFolder.setBounds(10, 307, 321, 20);
panel.add(outputFolder);
outputFolder.setColumns(10);
outputFolder.setText("chrono-log" + "_" + System.currentTimeMillis()
+ ".txt");
final JButton btnNewButton_3 = new JButton("Go!!");
btnNewButton_3.setBackground(new Color(248, 248, 255));
btnNewButton_3.setForeground(Color.BLACK);
btnNewButton_3.setBounds(385, 308, 65, 19);
panel.add(btnNewButton_3);
JScrollPane scrollPane = new JScrollPane();
scrollPane.setViewportBorder(null);
scrollPane.setBounds(10, 55, 866, 198);
panel.add(scrollPane);
table = new JTable();
table.setFillsViewportHeight(true);
table.setFont(new Font("Tahoma", Font.PLAIN, 12));
table.setBorder(null);
tblmodel = new CaterpillarTableModel();// DefaultTableModel();
table.setModel(tblmodel);
tblmodel.setColumns(table);
scrollPane.setViewportView(table);
JSeparator separator = new JSeparator();
scrollPane.setColumnHeaderView(separator);
final JButton btnNewButton_1 = new JButton("");
btnNewButton_1.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
tblmodel.setData(new Object[0][0]);
tblmodel.fireTableDataChanged();
}
});
btnNewButton_1.setBackground(new Color(192, 192, 192));
btnNewButton_1.setToolTipText("Clear Table");
URL url2 = CaterPillar.class.getResource("/trash.jpg");
Image image2 = Toolkit.getDefaultToolkit().getImage(url2);
btnNewButton_1.setIcon(new ImageIcon(image2));
btnNewButton_1.setBounds(879, 55, 51, 40);
panel.add(btnNewButton_1);
JSeparator separator_1 = new JSeparator();
separator_1.setBounds(0, 353, 942, 2);
panel.add(separator_1);
progressBar = new JProgressBar();
progressBar.setStringPainted(true);
progressBar.setBorder(new EtchedBorder(EtchedBorder.LOWERED, new Color(
176, 224, 230), null));
progressBar.setFont(new Font("Tahoma", Font.PLAIN, 11));
progressBar.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent arg0) {
}
});
progressBar.setBackground(new Color(192, 192, 192));
progressBar.setBounds(461, 378, 481, 16);
progressBar.setForeground(SystemColor.desktop);
frmChronolog.getContentPane().add(progressBar);
final JLabel statusText = new JLabel("");
statusText.setBorder(new EtchedBorder(EtchedBorder.LOWERED, new Color(
173, 216, 230), null));
statusText.setFont(new Font("Tahoma", Font.PLAIN, 12));
statusText.setBounds(0, 378, 458, 16);
frmChronolog.getContentPane().add(statusText);
browseBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
JFileChooser fc = new JFileChooser(".");
fc.setMultiSelectionEnabled(true);
if (inputFolder.getText() != null
&& !inputFolder.getText().trim().equals(""))
fc.setCurrentDirectory(new File(inputFolder.getText()));
int returnVal = fc.showOpenDialog(frmChronolog);
// Here it is closed. What is chosen?
if (returnVal == JFileChooser.APPROVE_OPTION) {
inputFolder.setText(fc.getCurrentDirectory()
.getAbsolutePath());
inputFolder.setToolTipText(fc.getCurrentDirectory()
.getAbsolutePath());
File[] selFiles = fc.getSelectedFiles();
tbldata = new Object[selFiles.length][10];
for (int i = 0; i <= selFiles.length - 1; i++) {
FileTypeChecker fsChecker = new FileTypeChecker(
selFiles[i].getAbsolutePath());
ArrayList<?> fattrib = fsChecker.getFileType();
if (fattrib.size() > 0) {
tbldata[i][0] = selFiles[i].getName();
tbldata[i][1] = selFiles[i].getAbsolutePath();
tbldata[i][2] = fattrib.get(1);
tbldata[i][3] = fattrib.get(0);
tbldata[i][4] = fattrib.get(2);
tbldata[i][5] = fattrib.get(3);
tbldata[i][6] = fattrib.get(4);
tbldata[i][7] = fattrib.get(5);
tbldata[i][8] = fattrib.get(6);
tbldata[i][9] = "yes";
} else {
tbldata[i][0] = selFiles[i].getName();
tbldata[i][1] = selFiles[i].getAbsolutePath();
tbldata[i][2] = "";
tbldata[i][3] = "";
tbldata[i][4] = "";
tbldata[i][5] = "";
tbldata[i][6] = "";
tbldata[i][7] = "";
tbldata[i][8] = "";
tbldata[i][9] = "no";
}
}
}
tblmodel.setData(tbldata);
tblmodel.fireTableDataChanged();
}
});
btnNewButton_3.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int rowCount = table.getModel().getRowCount();
int colCount = table.getModel().getColumnCount();
List<FileData> flist = new ArrayList<FileData>();
btnNewButton_1.setEnabled(false);
for (int i = 0; i <= rowCount - 1; i++) {
String rowString = "";
for (int j = 0; j <= colCount - 1; j++) {
if (j == 0) {
rowString = (String) table.getModel().getValueAt(i,
j);
} else {
rowString = rowString + ","
+ table.getModel().getValueAt(i, j);
}
}
logger.log(Level.FINE, "File Row on UI " + rowString);
FileData f1 = new FileData(rowString);
flist.add(f1);
}
logger.log(Level.INFO, "Calling Sequencer with File Data List "
+ flist);
Sequencer chrono = new Sequencer(flist, outputFolder.getText(),
statusText, btnNewButton_1);
chrono.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if ("progress".equals(evt.getPropertyName())) {
progressBar.setValue((Integer) evt.getNewValue());
// System.out.println(evt.getNewValue());
}
}
});
chrono.execute();
}
});
}
@SuppressWarnings("serial")
private class SwingAction extends AbstractAction {
public SwingAction() {
putValue(NAME, "SwingAction");
putValue(SHORT_DESCRIPTION, "Some short description");
}
public void actionPerformed(ActionEvent e) {
}
}
@Override
public void actionPerformed(ActionEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void propertyChange(PropertyChangeEvent arg0) {
// TODO Auto-generated method stub
}
}
| |
package Windows;
import Entities.Personaje;
import java.awt.Color;
import java.awt.Font;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFrame;
import static javax.swing.JFrame.EXIT_ON_CLOSE;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JWindow;
import javax.swing.Timer;
/**
* Esta clase el controlador general y el menu de inicio y carga
*
* determina cuando esta cargando y que escena iniciar pausa
* esta incluye todos los keyListener del personaje principal
*
* @author: Mario Josue Grieco Villamizar Universidad Nacional Experimental del
* Tachira
* @version: 0.1
*/
public class Make extends JFrame implements Screem {
/**
* el controllador principal
*/
public LevelsController controller;
/**
* no usado
*/
public Toolkit tols;
/**
* Se carga el menu secundario
*/
public MenuSecundario menu;
/**
* una clave para los cheats aun no se usa
*/
public String password;
/**
* carga el game view ( la vida el pantalla los coins etc..)
*/
public GameView gameView;
/**
* carga el panel para cuando muera
*/
public GameOver DeadPanel;
/**
* Menu de inicio
*/
MainMenu menus;
/**
* IMAGEN DE CARGA
*/
ImageIcon CARGA;
/**
* Contenedor de la imagen de carga
*/
JLabel fondo;
/**
* Controllador de timer del KeyListerner
*/
public Timer main;
/**
* Icon para el gif de carga
*/
Icon icon;
/**
* url para el gif de carga
*/
URL url;
int contador;
int contadorI = 0;
/**
* Determina cuando se carga o no ina partida
* @param x a cual escena ir
*/
public void cargar(int x){
initGame(x);
controller.setInitLevel(x);
}
/**
* determina cuando empieza de 0 el juego
* @param level 0 por defecto
*/
public void initGame(int level){
controller.controllerTimer.start();
controller.main.timer.start();
super.addKeyListener(new KeyListener() {
@Override
public void keyTyped(KeyEvent e) {
}
@Override
public void keyPressed(KeyEvent e) {
if (controller.blockEntry == false) {
if (controller.main.lianing) {
controller.main.lianing = false;
controller.main.jumpingLianing = true;
controller.main.ground = false;
}
if (e.getKeyCode() == KeyEvent.VK_LEFT && controller.main.deading == false) {
controller.main.orientation = -1;
}
if (e.getKeyCode() == KeyEvent.VK_RIGHT && controller.main.deading == false) {
controller.main.orientation = 1;
}
if (e.getKeyCode() == KeyEvent.VK_C) {
if (controller.main.runing == true && controller.main.gliding == false && controller.main.jumping == false) {
controller.main.sliding = true;
}
}
if ( /*controller.Make.climbing == false && */ e.getKeyCode() == KeyEvent.VK_LEFT && controller.main.ground && controller.main.deading == false || (controller.main.gliding && e.getKeyCode() == KeyEvent.VK_LEFT) && controller.main.deading == false) {
if ( controller.main.climbing) {
controller.main.climbing = false;
controller.main.ground = false;
}
controller.main.idling = false;
controller.main.runing = true;
}
if ((/*controller.Make.climbing == false && */e.getKeyCode() == KeyEvent.VK_RIGHT && controller.main.ground && controller.main.deading == false) || (controller.main.gliding && e.getKeyCode() == KeyEvent.VK_RIGHT) && controller.main.deading == false) {
if ( controller.main.climbing) {
controller.main.climbing = false;
controller.main.ground = false;
}
controller.main.runing = true;
controller.main.idling = false;
}
if (e.getKeyCode() == KeyEvent.VK_SPACE && controller.main.ground && controller.main.deading == false && controller.main.throwing == false) {
controller.main.jumping = true;
controller.main.climbing = false;
controller.main.idling = false;
}
if (gameView.kunakeLength > 0 && e.getKeyCode() == KeyEvent.VK_K && controller.main.jumping == false && controller.main.gliding == false && controller.main.deading == false && controller.main.throwing == false && controller.main.instantiate == false) {
controller.main.runing = false;
controller.main.idling = false;
controller.main.gliding = false;
controller.main.throwing = true;
gameView.panel.repaint();
gameView.setKani(1);
}
if (gameView.kunakeLength > 0 && e.getKeyCode() == KeyEvent.VK_K && controller.main.jumping == true && controller.main.gliding == false && controller.main.deading == false && controller.main.jumpingThrowing == false && controller.main.instantiate == false) {
controller.main.idling = false;
controller.main.gliding = false;
controller.main.throwing = false;
controller.main.jumpingThrowing = true;
gameView.panel.repaint();
gameView.setKani(1);
}
if (e.getKeyCode() == KeyEvent.VK_M && controller.main.gameOver == false) {
controller.main.runing = false;
if ( menu.panel.isVisible() == false) {
controller.main.timer.stop();
controller.controllerTimer.stop();
controller.wordlPaint.stop();
controller.tiempo.stop();
menu.panel.setVisible(true);
} else {
menu.panel.setVisible(false);
controller.main.timer.start();
controller.controllerTimer.start();
controller.tiempo.start();
controller.wordlPaint.start();
}
}
if ( controller.sceneNumber == 2 && e.getKeyCode() == KeyEvent.VK_UP && controller.main.onClian && /**
* controller.main.gliding == false &&*
*/
controller.main.runing == false && controller.main.sliding == false) {
controller.main.subiendo = true;
controller.main.climbing = true;
controller.main.gliding = false;
controller.main.ground = true;
controller.main.runing = false;
}
// else if (controller.Make.climbing && controller.Make.onTop == false) {
// controller.Make.subiendo = false;
// controller.Make.climbing = false;
// controller.Make.ground = false;
// }
if ( controller.sceneNumber == 2 && e.getKeyCode() == KeyEvent.VK_DOWN && controller.main.onClian2 ) {
controller.main.bajando = true;
controller.main.climbing = true;
controller.main.onTop = false;
controller.main.gliding = false;
controller.main.ground = true;
}
}
}
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_LEFT && controller.main.deading == false) {
if (controller.main.idling == false) {
controller.main.runing = false;
controller.main.idling = true;
controller.main.run = 0;
}
}
if (e.getKeyCode() == KeyEvent.VK_RIGHT && controller.main.deading == false) {
if (controller.main.idling == false) {
controller.main.runing = false;
controller.main.idling = true;
controller.main.run = 0;
}
}
if (e.getKeyCode() == KeyEvent.VK_UP) {
controller.main.subiendo = false;
}
if (e.getKeyCode() == KeyEvent.VK_DOWN ) {
controller.main.bajando = false;
}
// jumping false from controller personaje
}
});
getContentPane().remove(menus);
super.setContentPane(controller);
super.add(menu.panel);
super.add(gameView.panel);
super.add(DeadPanel.panel);
}
/**
* constructor no confundir con el metodo main de java
*/
public Make() {
JOptionPane.showMessageDialog(null, "Juego Creado por Mario Josue Grieco Villamizar ci 26205981");
JOptionPane.showMessageDialog(null, "El codigo en github licencia Apache 2.0");
JOptionPane.showMessageDialog(null, "mariojosuexz@gmail.com");
JOptionPane.showMessageDialog(null, "UNET - segundo semestre");
JOptionPane.showMessageDialog(null, "7/12/216");
try {
icon = new ImageIcon((new File("src/inicio/carga.gif")).toURI().toURL());
} catch (MalformedURLException ex) {
Logger.getLogger(Make.class.getName()).log(Level.SEVERE, null, ex);
}
menus = new MainMenu(this);
fondo = new JLabel(icon);
// fondo.setIcon(CARGA);
fondo.setName("f");
fondo.setForeground(Color.red);
fondo.setLocation(0,0);
fondo.setSize(Screem.width,Screem.heigth);
fondo.setFocusable(false);
fondo.setVisible(true);
fondo.setFont(new Font("tipe 2", Font.BOLD, 16));
super.getContentPane().setBackground(new Color(21,25,31));
super.setUndecorated(true);
super.setSize(width, heigth);
// super.setSize(icon.getIconWidth(),icon.getIconHeight());
super.setLayout(null);
super.setLocationRelativeTo(null);
// menus = new MainMenu(this);
// super.add(menus);
// super.setVisible(true);
super.getContentPane().add(fondo);
super.setVisible(true);
// Make.start();
main = new Timer(50,new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if ( contadorI > 4 ) {
System.out.println(".actionPerformed()");
// setLocationRelativeTo(menus);
setSize(width, heigth);
getContentPane().remove(fondo);
getContentPane().add(menus);
repaint();
contadorI =0;
main.stop();
}
contadorI++;
}
});
gameView = new GameView();
DeadPanel = new GameOver();
controller = new LevelsController(width, heigth, gameView, DeadPanel,1);
controller.controllerTimer.stop();
controller.main.timer.stop();
menu = new MenuSecundario(controller, gameView,this);
}
/**
* metodo principal de java
* @param args texto de ingreso
*/
public static void main(String[] args) {
// TODO code application logic here
Make APP = null;
try{
APP = new Make();
}catch(OutOfMemoryError err){
JOptionPane.showMessageDialog(null, "HEAD INSUFICIENTE INTENTA CORRER CON EL COMANDO -Xmx1024M");
JOptionPane.showMessageDialog(null, err.getMessage());
}
// APP.initGame(WIDTH);
APP.main.start();
}
void callMain() {
controller = null;
Make.main(null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.CharMatcher;
import com.google.common.base.Predicate;
import com.google.common.base.Splitter;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.io.ByteSource;
import com.google.common.io.CharSource;
import com.google.common.io.Resources;
import com.google.common.reflect.Reflection;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Scans the source of a {@link ClassLoader} and finds all loadable classes and resources.
*
* <p><b>Warning:</b> Currently only {@link URLClassLoader} and only {@code file://} urls are
* supported.
* </p>
*
* <p>Based on Ben Yu's implementation in
* <a href="https://github.com/google/guava/blob/896c51abd32e136621c13d56b6130d0a72f4957a/guava/src/com/google/common/reflect/ClassPath.java">Guava</a>.
* </p>
*
* <p><b>Note:</b> Internalised here to avoid a forced upgrade to
* <a href="https://github.com/google/guava/releases/tag/v21.0">Guava 21.0 which requires
* Java 8.</a>
* </p>
*/
@Beta
final class ClassPath {
private static final Logger LOG = LoggerFactory.getLogger(ClassPath.class.getName());
private static final Predicate<ClassInfo> IS_TOP_LEVEL =
new Predicate<ClassInfo>() {
@Override
public boolean apply(ClassInfo info) {
return info != null && info.className.indexOf('$') == -1;
}
};
/** Separator for the Class-Path manifest attribute value in jar files. */
private static final Splitter CLASS_PATH_ATTRIBUTE_SEPARATOR =
Splitter.on(" ").omitEmptyStrings();
private static final String CLASS_FILE_NAME_EXTENSION = ".class";
private final ImmutableSet<ResourceInfo> resources;
private ClassPath(ImmutableSet<ResourceInfo> resources) {
this.resources = resources;
}
/**
* Returns a {@code ClassPath} representing all classes and resources loadable from {@code
* classloader} and its parent class loaders.
*
* <p><b>Warning:</b> Currently only {@link URLClassLoader} and only {@code file://} urls are
* supported.
*
* @throws IOException if the attempt to read class path resources (jar files or directories)
* failed.
*/
public static ClassPath from(ClassLoader classloader) throws IOException {
DefaultScanner scanner = new DefaultScanner();
scanner.scan(classloader);
return new ClassPath(scanner.getResources());
}
/**
* Returns all resources loadable from the current class path, including the class files of all
* loadable classes but excluding the "META-INF/MANIFEST.MF" file.
*/
public ImmutableSet<ResourceInfo> getResources() {
return resources;
}
/**
* Returns all classes loadable from the current class path.
*
* @since 16.0
*/
public ImmutableSet<ClassInfo> getAllClasses() {
return FluentIterable.from(resources).filter(ClassInfo.class).toSet();
}
/** Returns all top level classes loadable from the current class path. */
public ImmutableSet<ClassInfo> getTopLevelClasses() {
return FluentIterable.from(resources).filter(ClassInfo.class).filter(IS_TOP_LEVEL).toSet();
}
/** Returns all top level classes whose package name is {@code packageName}. */
public ImmutableSet<ClassInfo> getTopLevelClasses(String packageName) {
checkNotNull(packageName);
ImmutableSet.Builder<ClassInfo> builder = ImmutableSet.builder();
for (ClassInfo classInfo : getTopLevelClasses()) {
if (classInfo.getPackageName().equals(packageName)) {
builder.add(classInfo);
}
}
return builder.build();
}
/**
* Returns all top level classes whose package name is {@code packageName} or starts with
* {@code packageName} followed by a '.'.
*/
public ImmutableSet<ClassInfo> getTopLevelClassesRecursive(String packageName) {
checkNotNull(packageName);
String packagePrefix = packageName + '.';
ImmutableSet.Builder<ClassInfo> builder = ImmutableSet.builder();
for (ClassInfo classInfo : getTopLevelClasses()) {
if (classInfo.getName().startsWith(packagePrefix)) {
builder.add(classInfo);
}
}
return builder.build();
}
/**
* Represents a class path resource that can be either a class file or any other resource file
* loadable from the class path.
*
* @since 14.0
*/
@Beta
public static class ResourceInfo {
private final String resourceName;
final ClassLoader loader;
static ResourceInfo of(String resourceName, ClassLoader loader) {
if (resourceName.endsWith(CLASS_FILE_NAME_EXTENSION)) {
return new ClassInfo(resourceName, loader);
} else {
return new ResourceInfo(resourceName, loader);
}
}
ResourceInfo(String resourceName, ClassLoader loader) {
this.resourceName = checkNotNull(resourceName);
this.loader = checkNotNull(loader);
}
/**
* Returns the url identifying the resource.
*
* <p>See {@link ClassLoader#getResource}
*
* @throws NoSuchElementException if the resource cannot be loaded through the class loader,
* despite physically existing in the class path.
*/
public final URL url() {
URL url = loader.getResource(resourceName);
if (url == null) {
throw new NoSuchElementException(resourceName);
}
return url;
}
/**
* Returns a {@link ByteSource} view of the resource from which its bytes can be read.
*
* @throws NoSuchElementException if the resource cannot be loaded through the class loader,
* despite physically existing in the class path.
* @since 20.0
*/
public final ByteSource asByteSource() {
return Resources.asByteSource(url());
}
/**
* Returns a {@link CharSource} view of the resource from which its bytes can be read as
* characters decoded with the given {@code charset}.
*
* @throws NoSuchElementException if the resource cannot be loaded through the class loader,
* despite physically existing in the class path.
* @since 20.0
*/
public final CharSource asCharSource(Charset charset) {
return Resources.asCharSource(url(), charset);
}
/** Returns the fully qualified name of the resource. Such as "com/mycomp/foo/bar.txt". */
public final String getResourceName() {
return resourceName;
}
@Override
public int hashCode() {
return resourceName.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ResourceInfo) {
ResourceInfo that = (ResourceInfo) obj;
return resourceName.equals(that.resourceName) && loader == that.loader;
}
return false;
}
// Do not change this arbitrarily. We rely on it for sorting ResourceInfo.
@Override
public String toString() {
return resourceName;
}
}
/**
* Represents a class that can be loaded through {@link #load}.
*
* @since 14.0
*/
@Beta
static final class ClassInfo extends ResourceInfo {
private final String className;
ClassInfo(String resourceName, ClassLoader loader) {
super(resourceName, loader);
this.className = getClassName(resourceName);
}
/**
* Returns the package name of the class, without attempting to load the class.
*
* <p>Behaves identically to {@link Package#getName()} but does not require the class (or
* package) to be loaded.
*/
public String getPackageName() {
return Reflection.getPackageName(className);
}
/**
* Returns the simple name of the underlying class as given in the source code.
*
* <p>Behaves identically to {@link Class#getSimpleName()} but does not require the class
* to be
* loaded.
*/
public String getSimpleName() {
int lastDollarSign = className.lastIndexOf('$');
if (lastDollarSign != -1) {
String innerClassName = className.substring(lastDollarSign + 1);
// local and anonymous classes are prefixed with number (1,2,3...), anonymous classes are
// entirely numeric whereas local classes have the user supplied name as a suffix
return CharMatcher.digit().trimLeadingFrom(innerClassName);
}
String packageName = getPackageName();
if (packageName.isEmpty()) {
return className;
}
// Since this is a top level class, its simple name is always the part after package name.
return className.substring(packageName.length() + 1);
}
/**
* Returns the fully qualified name of the class.
*
* <p>Behaves identically to {@link Class#getName()} but does not require the class to be
* loaded.
*/
public String getName() {
return className;
}
/**
* Loads (but doesn't link or initialize) the class.
*
* @throws LinkageError when there were errors in loading classes that this class depends on.
* For example, {@link NoClassDefFoundError}.
*/
public Class<?> load() {
try {
return loader.loadClass(className);
} catch (ClassNotFoundException e) {
// Shouldn't happen, since the class name is read from the class path.
throw new IllegalStateException(e);
}
}
@Override
public String toString() {
return className;
}
}
/**
* Abstract class that scans through the class path represented by a {@link ClassLoader} and
* calls
* {@link #scanDirectory} and {@link #scanJarFile} for directories and jar files on the class
* path
* respectively.
*/
abstract static class Scanner {
// We only scan each file once independent of the classloader that resource might be
// associated
// with.
private final Set<File> scannedUris = Sets.newHashSet();
public final void scan(ClassLoader classloader) throws IOException {
for (Map.Entry<File, ClassLoader> entry : getClassPathEntries(classloader).entrySet()) {
scan(entry.getKey(), entry.getValue());
}
}
/** Called when a directory is scanned for resource files. */
protected abstract void scanDirectory(ClassLoader loader, File directory) throws IOException;
/** Called when a jar file is scanned for resource entries. */
protected abstract void scanJarFile(ClassLoader loader, JarFile file) throws IOException;
@VisibleForTesting
final void scan(File file, ClassLoader classloader) throws IOException {
if (scannedUris.add(file.getCanonicalFile())) {
scanFrom(file, classloader);
}
}
private void scanFrom(File file, ClassLoader classloader) throws IOException {
try {
if (!file.exists()) {
return;
}
} catch (SecurityException e) {
LOG.warn("Cannot access " + file + ": " + e);
return;
}
if (file.isDirectory()) {
scanDirectory(classloader, file);
} else {
scanJar(file, classloader);
}
}
private void scanJar(File file, ClassLoader classloader) throws IOException {
JarFile jarFile;
try {
jarFile = new JarFile(file);
} catch (IOException e) {
// Not a jar file
return;
}
try {
for (File path : getClassPathFromManifest(file, jarFile.getManifest())) {
scan(path, classloader);
}
scanJarFile(classloader, jarFile);
} finally {
try {
jarFile.close();
} catch (IOException ignored) {
}
}
}
/**
* Returns the class path URIs specified by the {@code Class-Path} manifest attribute,
* according
* to
* <a href="http://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Main_Attributes">
* JAR File Specification</a>. If {@code manifest} is null, it means the jar file has no
* manifest, and an empty set will be returned.
*/
@VisibleForTesting
static ImmutableSet<File> getClassPathFromManifest(File jarFile,
@Nullable Manifest manifest) {
if (manifest == null) {
return ImmutableSet.of();
}
ImmutableSet.Builder<File> builder = ImmutableSet.builder();
String classpathAttribute =
manifest.getMainAttributes().getValue(Attributes.Name.CLASS_PATH.toString());
if (classpathAttribute != null) {
for (String path : CLASS_PATH_ATTRIBUTE_SEPARATOR.split(classpathAttribute)) {
URL url;
try {
url = getClassPathEntry(jarFile, path);
} catch (MalformedURLException e) {
// Ignore bad entry
LOG.warn("Invalid Class-Path entry: " + path);
continue;
}
if (url.getProtocol().equals("file")) {
builder.add(toFile(url));
}
}
}
return builder.build();
}
@VisibleForTesting
static ImmutableMap<File, ClassLoader> getClassPathEntries(ClassLoader classloader) {
LinkedHashMap<File, ClassLoader> entries = Maps.newLinkedHashMap();
// Search parent first, since it's the order ClassLoader#loadClass() uses.
ClassLoader parent = classloader.getParent();
if (parent != null) {
entries.putAll(getClassPathEntries(parent));
}
if (classloader instanceof URLClassLoader) {
URLClassLoader urlClassLoader = (URLClassLoader) classloader;
for (URL entry : urlClassLoader.getURLs()) {
if (entry.getProtocol().equals("file")) {
File file = toFile(entry);
if (!entries.containsKey(file)) {
entries.put(file, classloader);
}
}
}
}
return ImmutableMap.copyOf(entries);
}
/**
* Returns the absolute uri of the Class-Path entry value as specified in
* <a href="http://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Main_Attributes">
* JAR File Specification</a>. Even though the specification only talks about relative urls,
* absolute urls are actually supported too (for example, in Maven surefire plugin).
*/
@VisibleForTesting
static URL getClassPathEntry(File jarFile, String path) throws MalformedURLException {
return new URL(jarFile.toURI().toURL(), path);
}
}
@VisibleForTesting
static final class DefaultScanner extends Scanner {
private final SetMultimap<ClassLoader, String> resources =
MultimapBuilder.hashKeys().linkedHashSetValues().build();
ImmutableSet<ResourceInfo> getResources() {
ImmutableSet.Builder<ResourceInfo> builder = ImmutableSet.builder();
for (Map.Entry<ClassLoader, String> entry : resources.entries()) {
builder.add(ResourceInfo.of(entry.getValue(), entry.getKey()));
}
return builder.build();
}
@Override
protected void scanJarFile(ClassLoader classloader, JarFile file) {
Enumeration<JarEntry> entries = file.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
if (entry.isDirectory() || entry.getName().equals(JarFile.MANIFEST_NAME)) {
continue;
}
resources.get(classloader).add(entry.getName());
}
}
@Override
protected void scanDirectory(ClassLoader classloader, File directory) throws IOException {
scanDirectory(directory, classloader, "");
}
private void scanDirectory(File directory, ClassLoader classloader, String packagePrefix)
throws IOException {
File[] files = directory.listFiles();
if (files == null) {
LOG.warn("Cannot read directory " + directory);
// IO error, just skip the directory
return;
}
for (File f : files) {
String name = f.getName();
if (f.isDirectory()) {
scanDirectory(f, classloader, packagePrefix + name + "/");
} else {
String resourceName = packagePrefix + name;
if (!resourceName.equals(JarFile.MANIFEST_NAME)) {
resources.get(classloader).add(resourceName);
}
}
}
}
}
@VisibleForTesting
static String getClassName(String filename) {
int classNameEnd = filename.length() - CLASS_FILE_NAME_EXTENSION.length();
return filename.substring(0, classNameEnd).replace('/', '.');
}
@VisibleForTesting
static File toFile(URL url) {
checkArgument(url.getProtocol().equals("file"));
try {
return new File(url.toURI()); // Accepts escaped characters like %20.
} catch (URISyntaxException e) { // URL.toURI() doesn't escape chars.
return new File(url.getPath()); // Accepts non-escaped chars like space.
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: TransformerFactoryImpl.java 468640 2006-10-28 06:53:53Z minchau $
*/
package org.apache.xalan.processor;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.Properties;
import javax.xml.XMLConstants;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.Source;
import javax.xml.transform.Templates;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.URIResolver;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXResult;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.sax.SAXTransformerFactory;
import javax.xml.transform.sax.TemplatesHandler;
import javax.xml.transform.sax.TransformerHandler;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.xalan.res.XSLMessages;
import org.apache.xalan.res.XSLTErrorResources;
import org.apache.xalan.transformer.TrAXFilter;
import org.apache.xalan.transformer.TransformerIdentityImpl;
import org.apache.xalan.transformer.TransformerImpl;
import org.apache.xalan.transformer.XalanProperties;
import org.apache.xml.dtm.ref.sax2dtm.SAX2DTM;
import org.apache.xml.utils.DefaultErrorHandler;
import org.apache.xml.utils.SystemIDResolver;
import org.apache.xml.utils.TreeWalker;
import org.apache.xml.utils.StylesheetPIHandler;
import org.apache.xml.utils.StopParseException;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.XMLFilter;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
/**
* The TransformerFactoryImpl, which implements the TRaX TransformerFactory
* interface, processes XSLT stylesheets into a Templates object
* (a StylesheetRoot).
*/
public class TransformerFactoryImpl extends SAXTransformerFactory
{
/**
* The path/filename of the property file: XSLTInfo.properties
* Maintenance note: see also
* <code>org.apache.xpath.functions.FuncSystemProperty.XSLT_PROPERTIES</code>
*/
public static final String XSLT_PROPERTIES =
"org/apache/xalan/res/XSLTInfo.properties";
/**
* <p>State of secure processing feature.</p>
*/
private boolean m_isSecureProcessing = false;
/**
* Constructor TransformerFactoryImpl
*
*/
public TransformerFactoryImpl()
{
}
/** Static string to be used for incremental feature */
public static final String FEATURE_INCREMENTAL =
"http://xml.apache.org/xalan/features/incremental";
/** Static string to be used for optimize feature */
public static final String FEATURE_OPTIMIZE =
"http://xml.apache.org/xalan/features/optimize";
/** Static string to be used for source_location feature */
public static final String FEATURE_SOURCE_LOCATION =
XalanProperties.SOURCE_LOCATION;
public javax.xml.transform.Templates processFromNode(Node node)
throws TransformerConfigurationException
{
try
{
TemplatesHandler builder = newTemplatesHandler();
TreeWalker walker = new TreeWalker(builder,
new org.apache.xml.utils.DOM2Helper(),
builder.getSystemId());
walker.traverse(node);
return builder.getTemplates();
}
catch (org.xml.sax.SAXException se)
{
if (m_errorListener != null)
{
try
{
m_errorListener.fatalError(new TransformerException(se));
}
catch (TransformerConfigurationException ex)
{
throw ex;
}
catch (TransformerException ex)
{
throw new TransformerConfigurationException(ex);
}
return null;
}
else
{
// Should remove this later... but right now diagnostics from
// TransformerConfigurationException are not good.
// se.printStackTrace();
throw new TransformerConfigurationException(XSLMessages.createMessage(XSLTErrorResources.ER_PROCESSFROMNODE_FAILED, null), se);
//"processFromNode failed", se);
}
}
catch (TransformerConfigurationException tce)
{
// Assume it's already been reported to the error listener.
throw tce;
}
/* catch (TransformerException tce)
{
// Assume it's already been reported to the error listener.
throw new TransformerConfigurationException(tce.getMessage(), tce);
}*/
catch (Exception e)
{
if (m_errorListener != null)
{
try
{
m_errorListener.fatalError(new TransformerException(e));
}
catch (TransformerConfigurationException ex)
{
throw ex;
}
catch (TransformerException ex)
{
throw new TransformerConfigurationException(ex);
}
return null;
}
else
{
// Should remove this later... but right now diagnostics from
// TransformerConfigurationException are not good.
// se.printStackTrace();
throw new TransformerConfigurationException(XSLMessages.createMessage(XSLTErrorResources.ER_PROCESSFROMNODE_FAILED, null), e); //"processFromNode failed",
//e);
}
}
}
/**
* The systemID that was specified in
* processFromNode(Node node, String systemID).
*/
private String m_DOMsystemID = null;
/**
* The systemID that was specified in
* processFromNode(Node node, String systemID).
*
* @return The systemID, or null.
*/
String getDOMsystemID()
{
return m_DOMsystemID;
}
/**
* Process the stylesheet from a DOM tree, if the
* processor supports the "http://xml.org/trax/features/dom/input"
* feature.
*
* @param node A DOM tree which must contain
* valid transform instructions that this processor understands.
* @param systemID The systemID from where xsl:includes and xsl:imports
* should be resolved from.
*
* @return A Templates object capable of being used for transformation purposes.
*
* @throws TransformerConfigurationException
*/
javax.xml.transform.Templates processFromNode(Node node, String systemID)
throws TransformerConfigurationException
{
m_DOMsystemID = systemID;
return processFromNode(node);
}
/**
* Get InputSource specification(s) that are associated with the
* given document specified in the source param,
* via the xml-stylesheet processing instruction
* (see http://www.w3.org/TR/xml-stylesheet/), and that matches
* the given criteria. Note that it is possible to return several stylesheets
* that match the criteria, in which case they are applied as if they were
* a list of imports or cascades.
*
* <p>Note that DOM2 has it's own mechanism for discovering stylesheets.
* Therefore, there isn't a DOM version of this method.</p>
*
*
* @param source The XML source that is to be searched.
* @param media The media attribute to be matched. May be null, in which
* case the prefered templates will be used (i.e. alternate = no).
* @param title The value of the title attribute to match. May be null.
* @param charset The value of the charset attribute to match. May be null.
*
* @return A Source object capable of being used to create a Templates object.
*
* @throws TransformerConfigurationException
*/
public Source getAssociatedStylesheet(
Source source, String media, String title, String charset)
throws TransformerConfigurationException
{
String baseID;
InputSource isource = null;
Node node = null;
XMLReader reader = null;
if (source instanceof DOMSource)
{
DOMSource dsource = (DOMSource) source;
node = dsource.getNode();
baseID = dsource.getSystemId();
}
else
{
isource = SAXSource.sourceToInputSource(source);
baseID = isource.getSystemId();
}
// What I try to do here is parse until the first startElement
// is found, then throw a special exception in order to terminate
// the parse.
StylesheetPIHandler handler = new StylesheetPIHandler(baseID, media,
title, charset);
// Use URIResolver. Patch from Dmitri Ilyin
if (m_uriResolver != null)
{
handler.setURIResolver(m_uriResolver);
}
try
{
if (null != node)
{
TreeWalker walker = new TreeWalker(handler, new org.apache.xml.utils.DOM2Helper(), baseID);
walker.traverse(node);
}
else
{
// Use JAXP1.1 ( if possible )
try
{
javax.xml.parsers.SAXParserFactory factory =
javax.xml.parsers.SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
if (m_isSecureProcessing)
{
try
{
factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
}
catch (org.xml.sax.SAXException e) {}
}
javax.xml.parsers.SAXParser jaxpParser = factory.newSAXParser();
reader = jaxpParser.getXMLReader();
}
catch (javax.xml.parsers.ParserConfigurationException ex)
{
throw new org.xml.sax.SAXException(ex);
}
catch (javax.xml.parsers.FactoryConfigurationError ex1)
{
throw new org.xml.sax.SAXException(ex1.toString());
}
catch (NoSuchMethodError ex2){}
catch (AbstractMethodError ame){}
if (null == reader)
{
reader = XMLReaderFactory.createXMLReader();
}
// Need to set options!
reader.setContentHandler(handler);
reader.parse(isource);
}
}
catch (StopParseException spe)
{
// OK, good.
}
catch (org.xml.sax.SAXException se)
{
throw new TransformerConfigurationException(
"getAssociatedStylesheets failed", se);
}
catch (IOException ioe)
{
throw new TransformerConfigurationException(
"getAssociatedStylesheets failed", ioe);
}
return handler.getAssociatedStylesheet();
}
/**
* Create a new Transformer object that performs a copy
* of the source to the result.
*
* @return A Transformer object that may be used to perform a transformation
* in a single thread, never null.
*
* @throws TransformerConfigurationException May throw this during
* the parse when it is constructing the
* Templates object and fails.
*/
public TemplatesHandler newTemplatesHandler()
throws TransformerConfigurationException
{
return new StylesheetHandler(this);
}
/**
* <p>Set a feature for this <code>TransformerFactory</code> and <code>Transformer</code>s
* or <code>Template</code>s created by this factory.</p>
*
* <p>
* Feature names are fully qualified {@link java.net.URI}s.
* Implementations may define their own features.
* An {@link TransformerConfigurationException} is thrown if this <code>TransformerFactory</code> or the
* <code>Transformer</code>s or <code>Template</code>s it creates cannot support the feature.
* It is possible for an <code>TransformerFactory</code> to expose a feature value but be unable to change its state.
* </p>
*
* <p>See {@link javax.xml.transform.TransformerFactory} for full documentation of specific features.</p>
*
* @param name Feature name.
* @param value Is feature state <code>true</code> or <code>false</code>.
*
* @throws TransformerConfigurationException if this <code>TransformerFactory</code>
* or the <code>Transformer</code>s or <code>Template</code>s it creates cannot support this feature.
* @throws NullPointerException If the <code>name</code> parameter is null.
*/
public void setFeature(String name, boolean value)
throws TransformerConfigurationException {
// feature name cannot be null
if (name == null) {
throw new NullPointerException(
XSLMessages.createMessage(
XSLTErrorResources.ER_SET_FEATURE_NULL_NAME, null));
}
// secure processing?
if (name.equals(XMLConstants.FEATURE_SECURE_PROCESSING)) {
m_isSecureProcessing = value;
}
// This implementation does not support the setting of a feature other than
// the secure processing feature.
else
{
throw new TransformerConfigurationException(
XSLMessages.createMessage(
XSLTErrorResources.ER_UNSUPPORTED_FEATURE,
new Object[] {name}));
}
}
/**
* Look up the value of a feature.
* <p>The feature name is any fully-qualified URI. It is
* possible for an TransformerFactory to recognize a feature name but
* to be unable to return its value; this is especially true
* in the case of an adapter for a SAX1 Parser, which has
* no way of knowing whether the underlying parser is
* validating, for example.</p>
*
* @param name The feature name, which is a fully-qualified URI.
* @return The current state of the feature (true or false).
*/
public boolean getFeature(String name) {
// feature name cannot be null
if (name == null)
{
throw new NullPointerException(
XSLMessages.createMessage(
XSLTErrorResources.ER_GET_FEATURE_NULL_NAME, null));
}
// Try first with identity comparison, which
// will be faster.
if ((DOMResult.FEATURE == name) || (DOMSource.FEATURE == name)
|| (SAXResult.FEATURE == name) || (SAXSource.FEATURE == name)
|| (StreamResult.FEATURE == name)
|| (StreamSource.FEATURE == name)
|| (SAXTransformerFactory.FEATURE == name)
|| (SAXTransformerFactory.FEATURE_XMLFILTER == name))
return true;
else if ((DOMResult.FEATURE.equals(name))
|| (DOMSource.FEATURE.equals(name))
|| (SAXResult.FEATURE.equals(name))
|| (SAXSource.FEATURE.equals(name))
|| (StreamResult.FEATURE.equals(name))
|| (StreamSource.FEATURE.equals(name))
|| (SAXTransformerFactory.FEATURE.equals(name))
|| (SAXTransformerFactory.FEATURE_XMLFILTER.equals(name)))
return true;
// secure processing?
else if (name.equals(XMLConstants.FEATURE_SECURE_PROCESSING))
return m_isSecureProcessing;
else
// unknown feature
return false;
}
/**
* Flag set by FEATURE_OPTIMIZE.
* This feature specifies whether to Optimize stylesheet processing. By
* default it is set to true.
*/
private boolean m_optimize = true;
/** Flag set by FEATURE_SOURCE_LOCATION.
* This feature specifies whether the transformation phase should
* keep track of line and column numbers for the input source
* document. Note that this works only when that
* information is available from the source -- in other words, if you
* pass in a DOM, there's little we can do for you.
*
* The default is false. Setting it true may significantly
* increase storage cost per node.
*/
private boolean m_source_location = false;
/**
* Flag set by FEATURE_INCREMENTAL.
* This feature specifies whether to produce output incrementally, rather than
* waiting to finish parsing the input before generating any output. By
* default this attribute is set to false.
*/
private boolean m_incremental = false;
/**
* Allows the user to set specific attributes on the underlying
* implementation.
*
* @param name The name of the attribute.
* @param value The value of the attribute; Boolean or String="true"|"false"
*
* @throws IllegalArgumentException thrown if the underlying
* implementation doesn't recognize the attribute.
*/
public void setAttribute(String name, Object value)
throws IllegalArgumentException
{
if (name.equals(FEATURE_INCREMENTAL))
{
if(value instanceof Boolean)
{
// Accept a Boolean object..
m_incremental = ((Boolean)value).booleanValue();
}
else if(value instanceof String)
{
// .. or a String object
m_incremental = (new Boolean((String)value)).booleanValue();
}
else
{
// Give a more meaningful error message
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_BAD_VALUE, new Object[]{name, value})); //name + " bad value " + value);
}
}
else if (name.equals(FEATURE_OPTIMIZE))
{
if(value instanceof Boolean)
{
// Accept a Boolean object..
m_optimize = ((Boolean)value).booleanValue();
}
else if(value instanceof String)
{
// .. or a String object
m_optimize = (new Boolean((String)value)).booleanValue();
}
else
{
// Give a more meaningful error message
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_BAD_VALUE, new Object[]{name, value})); //name + " bad value " + value);
}
}
// Custom Xalan feature: annotate DTM with SAX source locator fields.
// This gets used during SAX2DTM instantiation.
//
// %REVIEW% Should the name of this field really be in XalanProperties?
// %REVIEW% I hate that it's a global static, but didn't want to change APIs yet.
else if(name.equals(FEATURE_SOURCE_LOCATION))
{
if(value instanceof Boolean)
{
// Accept a Boolean object..
m_source_location = ((Boolean)value).booleanValue();
}
else if(value instanceof String)
{
// .. or a String object
m_source_location = (new Boolean((String)value)).booleanValue();
}
else
{
// Give a more meaningful error message
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_BAD_VALUE, new Object[]{name, value})); //name + " bad value " + value);
}
}
else
{
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_NOT_SUPPORTED, new Object[]{name})); //name + "not supported");
}
}
/**
* Allows the user to retrieve specific attributes on the underlying
* implementation.
*
* @param name The name of the attribute.
* @return value The value of the attribute.
*
* @throws IllegalArgumentException thrown if the underlying
* implementation doesn't recognize the attribute.
*/
public Object getAttribute(String name) throws IllegalArgumentException
{
if (name.equals(FEATURE_INCREMENTAL))
{
return new Boolean(m_incremental);
}
else if (name.equals(FEATURE_OPTIMIZE))
{
return new Boolean(m_optimize);
}
else if (name.equals(FEATURE_SOURCE_LOCATION))
{
return new Boolean(m_source_location);
}
else
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_ATTRIB_VALUE_NOT_RECOGNIZED, new Object[]{name})); //name + " attribute not recognized");
}
/**
* Create an XMLFilter that uses the given source as the
* transformation instructions.
*
* @param src The source of the transformation instructions.
*
* @return An XMLFilter object, or null if this feature is not supported.
*
* @throws TransformerConfigurationException
*/
public XMLFilter newXMLFilter(Source src)
throws TransformerConfigurationException
{
Templates templates = newTemplates(src);
if( templates==null ) return null;
return newXMLFilter(templates);
}
/**
* Create an XMLFilter that uses the given source as the
* transformation instructions.
*
* @param templates non-null reference to Templates object.
*
* @return An XMLFilter object, or null if this feature is not supported.
*
* @throws TransformerConfigurationException
*/
public XMLFilter newXMLFilter(Templates templates)
throws TransformerConfigurationException
{
try
{
return new TrAXFilter(templates);
}
catch( TransformerConfigurationException ex )
{
if( m_errorListener != null)
{
try
{
m_errorListener.fatalError( ex );
return null;
}
catch( TransformerConfigurationException ex1 )
{
throw ex1;
}
catch( TransformerException ex1 )
{
throw new TransformerConfigurationException(ex1);
}
}
throw ex;
}
}
/**
* Get a TransformerHandler object that can process SAX
* ContentHandler events into a Result, based on the transformation
* instructions specified by the argument.
*
* @param src The source of the transformation instructions.
*
* @return TransformerHandler ready to transform SAX events.
*
* @throws TransformerConfigurationException
*/
public TransformerHandler newTransformerHandler(Source src)
throws TransformerConfigurationException
{
Templates templates = newTemplates(src);
if( templates==null ) return null;
return newTransformerHandler(templates);
}
/**
* Get a TransformerHandler object that can process SAX
* ContentHandler events into a Result, based on the Templates argument.
*
* @param templates The source of the transformation instructions.
*
* @return TransformerHandler ready to transform SAX events.
* @throws TransformerConfigurationException
*/
public TransformerHandler newTransformerHandler(Templates templates)
throws TransformerConfigurationException
{
try {
TransformerImpl transformer =
(TransformerImpl) templates.newTransformer();
transformer.setURIResolver(m_uriResolver);
TransformerHandler th =
(TransformerHandler) transformer.getInputContentHandler(true);
return th;
}
catch( TransformerConfigurationException ex )
{
if( m_errorListener != null )
{
try
{
m_errorListener.fatalError( ex );
return null;
}
catch (TransformerConfigurationException ex1 )
{
throw ex1;
}
catch (TransformerException ex1 )
{
throw new TransformerConfigurationException(ex1);
}
}
throw ex;
}
}
// /** The identity transform string, for support of newTransformerHandler()
// * and newTransformer(). */
// private static final String identityTransform =
// "<xsl:stylesheet " + "xmlns:xsl='http://www.w3.org/1999/XSL/Transform' "
// + "version='1.0'>" + "<xsl:template match='/|node()'>"
// + "<xsl:copy-of select='.'/>" + "</xsl:template>" + "</xsl:stylesheet>";
//
// /** The identity transform Templates, built from identityTransform,
// * for support of newTransformerHandler() and newTransformer(). */
// private static Templates m_identityTemplate = null;
/**
* Get a TransformerHandler object that can process SAX
* ContentHandler events into a Result.
*
* @return TransformerHandler ready to transform SAX events.
*
* @throws TransformerConfigurationException
*/
public TransformerHandler newTransformerHandler()
throws TransformerConfigurationException
{
return new TransformerIdentityImpl(m_isSecureProcessing);
}
/**
* Process the source into a Transformer object. Care must
* be given to know that this object can not be used concurrently
* in multiple threads.
*
* @param source An object that holds a URL, input stream, etc.
*
* @return A Transformer object capable of
* being used for transformation purposes in a single thread.
*
* @throws TransformerConfigurationException May throw this during the parse when it
* is constructing the Templates object and fails.
*/
public Transformer newTransformer(Source source)
throws TransformerConfigurationException
{
try
{
Templates tmpl=newTemplates( source );
/* this can happen if an ErrorListener is present and it doesn't
throw any exception in fatalError.
The spec says: "a Transformer must use this interface
instead of throwing an exception" - the newTemplates() does
that, and returns null.
*/
if( tmpl==null ) return null;
Transformer transformer = tmpl.newTransformer();
transformer.setURIResolver(m_uriResolver);
return transformer;
}
catch( TransformerConfigurationException ex )
{
if( m_errorListener != null )
{
try
{
m_errorListener.fatalError( ex );
return null; // TODO: but the API promises to never return null...
}
catch( TransformerConfigurationException ex1 )
{
throw ex1;
}
catch( TransformerException ex1 )
{
throw new TransformerConfigurationException( ex1 );
}
}
throw ex;
}
}
/**
* Create a new Transformer object that performs a copy
* of the source to the result.
*
* @return A Transformer object capable of
* being used for transformation purposes in a single thread.
*
* @throws TransformerConfigurationException May throw this during
* the parse when it is constructing the
* Templates object and it fails.
*/
public Transformer newTransformer() throws TransformerConfigurationException
{
return new TransformerIdentityImpl(m_isSecureProcessing);
}
/**
* Process the source into a Templates object, which is likely
* a compiled representation of the source. This Templates object
* may then be used concurrently across multiple threads. Creating
* a Templates object allows the TransformerFactory to do detailed
* performance optimization of transformation instructions, without
* penalizing runtime transformation.
*
* @param source An object that holds a URL, input stream, etc.
* @return A Templates object capable of being used for transformation purposes.
*
* @throws TransformerConfigurationException May throw this during the parse when it
* is constructing the Templates object and fails.
*/
public Templates newTemplates(Source source)
throws TransformerConfigurationException
{
String baseID = source.getSystemId();
if (null != baseID) {
baseID = SystemIDResolver.getAbsoluteURI(baseID);
}
if (source instanceof DOMSource)
{
DOMSource dsource = (DOMSource) source;
Node node = dsource.getNode();
if (null != node)
return processFromNode(node, baseID);
else
{
String messageStr = XSLMessages.createMessage(
XSLTErrorResources.ER_ILLEGAL_DOMSOURCE_INPUT, null);
throw new IllegalArgumentException(messageStr);
}
}
TemplatesHandler builder = newTemplatesHandler();
builder.setSystemId(baseID);
try
{
InputSource isource = SAXSource.sourceToInputSource(source);
isource.setSystemId(baseID);
XMLReader reader = null;
if (source instanceof SAXSource)
reader = ((SAXSource) source).getXMLReader();
if (null == reader)
{
// Use JAXP1.1 ( if possible )
try
{
javax.xml.parsers.SAXParserFactory factory =
javax.xml.parsers.SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
if (m_isSecureProcessing)
{
try
{
factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
}
catch (org.xml.sax.SAXException se) {}
}
javax.xml.parsers.SAXParser jaxpParser = factory.newSAXParser();
reader = jaxpParser.getXMLReader();
}
catch (javax.xml.parsers.ParserConfigurationException ex)
{
throw new org.xml.sax.SAXException(ex);
}
catch (javax.xml.parsers.FactoryConfigurationError ex1)
{
throw new org.xml.sax.SAXException(ex1.toString());
}
catch (NoSuchMethodError ex2){}
catch (AbstractMethodError ame){}
}
if (null == reader)
reader = XMLReaderFactory.createXMLReader();
// If you set the namespaces to true, we'll end up getting double
// xmlns attributes. Needs to be fixed. -sb
// reader.setFeature("http://xml.org/sax/features/namespace-prefixes", true);
reader.setContentHandler(builder);
reader.parse(isource);
}
catch (org.xml.sax.SAXException se)
{
if (m_errorListener != null)
{
try
{
m_errorListener.fatalError(new TransformerException(se));
}
catch (TransformerConfigurationException ex1)
{
throw ex1;
}
catch (TransformerException ex1)
{
throw new TransformerConfigurationException(ex1);
}
}
else
{
throw new TransformerConfigurationException(se.getMessage(), se);
}
}
catch (Exception e)
{
if (m_errorListener != null)
{
try
{
m_errorListener.fatalError(new TransformerException(e));
return null;
}
catch (TransformerConfigurationException ex1)
{
throw ex1;
}
catch (TransformerException ex1)
{
throw new TransformerConfigurationException(ex1);
}
}
else
{
throw new TransformerConfigurationException(e.getMessage(), e);
}
}
return builder.getTemplates();
}
/**
* The object that implements the URIResolver interface,
* or null.
*/
URIResolver m_uriResolver;
/**
* Set an object that will be used to resolve URIs used in
* xsl:import, etc. This will be used as the default for the
* transformation.
* @param resolver An object that implements the URIResolver interface,
* or null.
*/
public void setURIResolver(URIResolver resolver)
{
m_uriResolver = resolver;
}
/**
* Get the object that will be used to resolve URIs used in
* xsl:import, etc. This will be used as the default for the
* transformation.
*
* @return The URIResolver that was set with setURIResolver.
*/
public URIResolver getURIResolver()
{
return m_uriResolver;
}
/** The error listener. */
private ErrorListener m_errorListener = new org.apache.xml.utils.DefaultErrorHandler(false);
/**
* Get the error listener in effect for the TransformerFactory.
*
* @return A non-null reference to an error listener.
*/
public ErrorListener getErrorListener()
{
return m_errorListener;
}
/**
* Set an error listener for the TransformerFactory.
*
* @param listener Must be a non-null reference to an ErrorListener.
*
* @throws IllegalArgumentException if the listener argument is null.
*/
public void setErrorListener(ErrorListener listener)
throws IllegalArgumentException
{
if (null == listener)
throw new IllegalArgumentException(XSLMessages.createMessage(XSLTErrorResources.ER_ERRORLISTENER, null));
// "ErrorListener");
m_errorListener = listener;
}
/**
* Return the state of the secure processing feature.
*
* @return state of the secure processing feature.
*/
public boolean isSecureProcessing()
{
return m_isSecureProcessing;
}
}
| |
package io.hummer.prefetch.sim;
import io.hummer.prefetch.PrefetchingService.ServiceInvocation;
import io.hummer.prefetch.TestConstants;
import io.hummer.prefetch.context.Context;
import io.hummer.prefetch.context.ContextPredictor;
import io.hummer.prefetch.context.Location;
import io.hummer.prefetch.context.NetworkQuality;
import io.hummer.prefetch.context.Time;
import io.hummer.prefetch.context.Path.PathPoint;
import io.hummer.prefetch.impl.InvocationPredictor;
import io.hummer.prefetch.impl.UsagePattern;
import io.hummer.prefetch.sim.VehicleSimulation.MovingEntities;
import io.hummer.prefetch.sim.VehicleSimulation.MovingEntity;
import io.hummer.prefetch.sim.swisscom.CellularCoverage;
import io.hummer.prefetch.sim.util.Util;
import io.hummer.prefetch.sim.ws.VehicleInfoService;
import io.hummer.prefetch.sim.ws.VehicleInfoService.VehicleInfoServiceImpl;
import io.hummer.prefetch.ws.W3CEndpointReferenceUtils;
import io.hummer.prefetch.ws.WSClient;
import io.hummer.util.log.LogUtil;
import io.hummer.util.xml.XMLUtil;
import java.io.File;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.xml.ws.Endpoint;
import javax.xml.ws.wsaddressing.W3CEndpointReference;
import org.apache.log4j.Logger;
import org.w3c.dom.Element;
public class SimulationTestData {
public static final String TRACE_FILE = System.getProperty("user.home")
+ "/Desktop/traces.txt";
private static Map<String, List<String>> file = new HashMap<>();
private static XMLUtil xmlUtil = new XMLUtil();
private static final Logger LOG = LogUtil.getLogger();
public static class ServiceUsage {
public UsagePattern pattern;
public InvocationPredictor invocationPredictor;
public static UsagePattern combine(final ServiceUsage ... usages) {
UsagePattern[] patterns = new UsagePattern[usages.length];
for(int i = 0; i < usages.length; i ++)
patterns[i] = usages[i].pattern;
return UsagePattern.combine(patterns);
}
}
public static MovingEntity getData(String id) {
List<String> lines = getLines(id);
MovingEntity result = new MovingEntity();
result.id = id;
for (String s : lines) {
String[] parts = s.split("setdest");
if (parts.length > 1) {
String[] coords = parts[1].trim().split(" ");
String time = parts[0].trim().split(" ")[2];
boolean loadDetails = false;
result.addPathPoint(Double.parseDouble(time),
Double.parseDouble(coords[0]),
Double.parseDouble(coords[1]),
Double.parseDouble(coords[2].replace("\"", "")),
loadDetails);
}
}
return result;
}
private static List<String> getLines(String id) {
if (file.isEmpty()) {
String regex = ".*\\$node_\\(([A-Za-z0-9]+)\\).*";
List<String> lines = Util.readFile(TRACE_FILE);
for (String s : lines) {
if (s.matches(regex)) {
String i = s.replaceAll(regex, "$1");
if (!file.containsKey(i)) {
file.put(i, new LinkedList<String>());
}
file.get(i).add(s);
}
}
}
List<String> lines = file.get(id);
if (lines != null)
return lines;
return Collections.emptyList();
}
static void setTunnelOverrides() {
//46.58232725478691,8.56863682841651
NetworkQuality q1 = new NetworkQuality(false);
/* Gotthard Tunnel: */
CellularCoverage.OVERRIDES.put(
new Location(46.58232725478691,8.56863682841651), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.61630210986323,8.578053356802688), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.62514880956553,8.580507305472487), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.63688960569387,8.58376530161051), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.649437172968,8.587248769273087), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.65707885240678,8.58937106896852), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.5434865755487,8.596293438544723), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.53798057183448,8.600210400527107), q1);
CellularCoverage.OVERRIDES.put(
new Location(46.53485189855097,8.602435735412291), q1);
}
public static MovingEntities getData(int fromCar, int toCar) {
setTunnelOverrides();
String file = Constants.TMP_DIR + "/traces.xml.gz";
MovingEntities result = new MovingEntities();
if(new File(file).exists()) {
String content = Util.loadStringFromGzip(file);
try {
result = xmlUtil.toJaxbObject(MovingEntities.class,
xmlUtil.toElement(content));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
long t1, t2;
double timeBetweenPointsTotal = 0;
double totalNumPoints = 0;
for (int i = fromCar; i <= toCar; i++) {
String id = "" + i;
if(!result.containsID(id)) {
System.out.println("Retrieving data for car " + id);
t1 = System.currentTimeMillis();
MovingEntity ent = getData(id);
result.entities.add(ent);
t2 = System.currentTimeMillis();
LOG.info("adding entity " + i + ": " +
ent.path.size() + " points - " + Math.abs(t2 - t1) + "ms");
ent.getNetworkOutages();
try {
String xml = xmlUtil.toString(result);
Util.storeStringGzipped(file, xml);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
MovingEntity ent = result.getEntity(id);
/* assert that path order is correct */
PathPoint ptNoNetFirst = null;
PathPoint ptNoNetLast = null;
double maxDistance = 60*10;
for(int j = 0; j < ent.path.size(); j ++) {
PathPoint p1 = ent.path.points.get(j);
CellularCoverage.setOverrideIfExists(p1);
// if(p1.cellNetworkCoverage._2g_gsm && !p1.cellNetworkCoverage._3g_hspa && !
// p1.cellNetworkCoverage._3g_umts && !p1.cellNetworkCoverage._4g_lte)
// System.out.println("ONLY 2G!!");
if(j < ent.path.size() - 1) {
PathPoint p2 = ent.path.points.get(j + 1);
double timeBetweenPoints = p2.time.time - p1.time.time;
timeBetweenPointsTotal += timeBetweenPoints;
if(p1.time.time > p2.time.time) {
throw new RuntimeException("non-chronological path order");
}
/* if the distance between two points is too big, end the path here. */
if(timeBetweenPoints > maxDistance) {
LOG.info("Cutting path of vehicle " + id + " at time " +
p1.time + ", distance: " + timeBetweenPoints);
ent.path.points = ent.path.points.subList(0, j + 1);
}
}
/* check for connectivity */
if(!p1.cellNetworkCoverage.hasSufficientCoverage()) {
if(j <= 1) {
/* the first point (and one before that) should always
* have connectivity (for simulation purposes) */
NetworkQuality goodNetworkCoverage = new NetworkQuality(true);
if(j == 0) {
p1.cellNetworkCoverage = goodNetworkCoverage;
LOG.info("Setting net coverage to true: " + p1.time + " - " + p1.coordinates);
} else if(j == 1) {
PathPoint pNew = new PathPoint(p1.time.add(-10.0), p1.coordinates, goodNetworkCoverage);
ent.path.points.add(0, pNew);
j ++;
}
}
if(ptNoNetFirst == null) {
if(j > 0) {
LOG.debug(ent.path.points.get(j - 1).coordinates + ": " +
CellularCoverage.getCoverage(46.552472293321905,8.589899093722023));
}
ptNoNetFirst = p1;
}
ptNoNetLast = p1;
}
if(p1.cellNetworkCoverage.hasSufficientCoverage() || j >= ent.path.size() - 1){
if(ptNoNetLast != null) {
double time = ptNoNetLast.time.time - ptNoNetFirst.time.time;
LOG.info("Car " + id + ": net down for " + time + "secs at " +
ptNoNetFirst.time.time + ": " + ptNoNetFirst.coordinates +
" to " + ptNoNetLast.coordinates);
if(time > maxDistance) {
int index = ent.path.points.indexOf(ptNoNetFirst);
LOG.info("Offline for too long. Cutting path of vehicle " + id +
" at time " + ptNoNetFirst.time + ", index " + index);
ent.path.points = ent.path.points.subList(0, index);
break;
}
//LOG.debug(ent.path.points.get(j + 1).coordinates);
}
ptNoNetFirst = null;
ptNoNetLast = null;
}
}
if(fromCar == toCar) { // TODO
System.out.println(ent.path.points);
}
totalNumPoints += ent.path.size();
}
/* trim to number of cars */
for(MovingEntity ent : new LinkedList<>(result.entities)) {
if(Double.parseDouble(ent.id) < fromCar ||
Double.parseDouble(ent.id) > toCar)
result.entities.remove(ent);
}
//System.out.println(result.entities.get(0).path);
LOG.info("Average time between time points: " + (timeBetweenPointsTotal / totalNumPoints));
return result;
}
static ServiceUsage getServiceUsage1() throws Exception {
String template =
"<tns:getVicinityInfo " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"<lat>{{" + Context.ATTR_LOCATION_LAT + "}}</lat>" +
"<lon>{{" + Context.ATTR_LOCATION_LON + "}}</lon>" +
"</tns:getVicinityInfo>";
//UsagePattern usagePattern = UsagePattern.periodic(60, 100, 10);
ContextPredictor<Object> ctxPredict = new ContextPredictor.DefaultPredictor();
return constructServiceUsage(template, false, ctxPredict, 100);
}
static ServiceUsage getServiceUsage2() throws Exception {
String template =
"<tns:getTrafficInfo " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"<lat>{{" + Context.ATTR_LOCATION_LAT + "}}</lat>" +
"<lon>{{" + Context.ATTR_LOCATION_LON + "}}</lon>" +
"</tns:getTrafficInfo>";
final double updateSeconds = 60;
double timeInterval = 10;
ContextPredictor<Object> ctxPredict = new ContextPredictor.
DefaultPredictorWithUpdateInterval(updateSeconds, timeInterval);
return constructServiceUsage(template, false, ctxPredict, 50);
}
static ServiceUsage getServiceUsage3() throws Exception {
String template =
"<tns:streamMedia " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"<mediaID>{{" + TestConstants.ATTR_MEDIA_ID + "}}</mediaID>" +
"<chunkID>{{" + TestConstants.ATTR_MEDIA_NEXT_CHUNK + "}}</chunkID>" +
"</tns:streamMedia>";
final double updateSeconds = 20;
final double songLength = 180;
double timeInterval = 10;
ContextPredictor<Object> ctxPredict = new ContextPredictor.
DefaultPredictorWithUpdateInterval(updateSeconds, timeInterval) {
public Context<Object> predict(Context<Object> currentContext, Time t) {
Context<Object> context = super.predict(currentContext, t);
context.setContextAttribute(TestConstants.ATTR_MEDIA_ID, "song" + (int)(t.time / songLength));
context.setContextAttribute(TestConstants.ATTR_MEDIA_NEXT_CHUNK,
"chunk" + (int)(((double)(t.time % songLength))/updateSeconds));
return context;
}
};
return constructServiceUsage(template, true, ctxPredict, 150);
}
static ServiceUsage getServiceUsage4() throws Exception {
String template =
"<tns:reroute " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"<lat>{{" + Context.ATTR_LOCATION_LAT + "}}</lat>" +
"<lon>{{" + Context.ATTR_LOCATION_LON + "}}</lon>" +
"</tns:reroute>";
final double updateSeconds = 300;
double timeInterval = 10;
ContextPredictor<Object> ctxPredict = new ContextPredictor.
DefaultPredictorWithUpdateInterval(updateSeconds, timeInterval);
return constructServiceUsage(template, false, ctxPredict, 75);
}
static ServiceUsage getServiceUsage5() throws Exception {
String template =
"<tns:getMail " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"</tns:getMail>";
final double updateSeconds = 180;
double timeInterval = 10;
ContextPredictor<Object> ctxPredict = new ContextPredictor.
DefaultPredictorWithUpdateInterval(updateSeconds, timeInterval);
return constructServiceUsage(template, false, ctxPredict, 50);
}
static ServiceUsage getServiceUsage6() throws Exception {
String template =
"<tns:syncUpdates " +
"xmlns:tns=\"" + VehicleInfoService.NAMESPACE + "\">" +
"</tns:syncUpdates>";
final double updateSeconds = 600;
double timeInterval = 10;
ContextPredictor<Object> ctxPredict = new ContextPredictor.
DefaultPredictorWithUpdateInterval(updateSeconds, timeInterval);
return constructServiceUsage(template, false, ctxPredict, 100);
}
static ServiceUsage constructServiceUsage(String template,
boolean prefetchPossible,
//UsagePattern usagePattern,
ContextPredictor<Object> ctxPredictor, double invocationKbps) {
try {
Element body = WSClient.toElement(template);
ServiceInvocation tmp = new ServiceInvocation();
tmp.serviceCall = WSClient.createEnvelopeFromBody(body);
// tmp.prefetchPossible = prefetchPossible;
tmp.serviceEPR = eprTrafficService;
double stepSize = 10;
InvocationPredictor invPred = new InvocationPredictor.
TemplateBasedInvocationPredictor(xmlUtil.toString(tmp),
ctxPredictor, stepSize);
UsagePattern usagePattern = UsagePattern.predictionBased(invPred, null, invocationKbps);
// ServiceInvocationBuilder b = new ServiceInvocationBuilder.
// TemplateBasedInvocationBuilder(xmlUtil.toString(tmp));
// b.prefetchPossible = tmp.prefetchPossible;
// b.serviceEPR = tmp.serviceEPR;
ServiceUsage use = new ServiceUsage();
// use.invocation = b;
use.pattern = usagePattern;
use.invocationPredictor = invPred;
return use;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static String urlTrafficService;
private static W3CEndpointReference eprTrafficService;
public static void setServiceURL(String serviceURL) {
urlTrafficService = serviceURL;
eprTrafficService = W3CEndpointReferenceUtils.
createEndpointReference(urlTrafficService);
}
static void deployTestServices(String url) throws Exception {
setServiceURL(url);
VehicleInfoService s = new VehicleInfoServiceImpl();
Endpoint.publish(urlTrafficService, s);
WSClient.cachedResponseObject = WSClient.createEnvelopeFromBody(
new XMLUtil().toElement("<result/>"));
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http.websocketx.extensions.compression;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.websocketx.extensions.WebSocketExtensionData;
import io.netty.handler.codec.http.websocketx.extensions.WebSocketExtensionUtil;
import io.netty.handler.codec.http.websocketx.extensions.WebSocketServerExtensionHandler;
import java.util.List;
import org.junit.jupiter.api.Test;
import static io.netty.handler.codec.http.websocketx.extensions.compression.
PerMessageDeflateServerExtensionHandshaker.*;
import static io.netty.handler.codec.http.websocketx.extensions.WebSocketExtensionTestUtil.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class WebSocketServerCompressionHandlerTest {
@Test
public void testNormalSuccess() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerCompressionHandler());
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertTrue(exts.get(0).parameters().isEmpty());
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testClientWindowSizeSuccess() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerExtensionHandler(
new PerMessageDeflateServerExtensionHandshaker(6, false, 10, false, false)));
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + CLIENT_MAX_WINDOW);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertEquals("10", exts.get(0).parameters().get(CLIENT_MAX_WINDOW));
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testClientWindowSizeUnavailable() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerExtensionHandler(
new PerMessageDeflateServerExtensionHandshaker(6, false, 10, false, false)));
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertTrue(exts.get(0).parameters().isEmpty());
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testServerWindowSizeSuccess() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerExtensionHandler(
new PerMessageDeflateServerExtensionHandshaker(6, true, 15, false, false)));
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + SERVER_MAX_WINDOW + "=10");
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertEquals("10", exts.get(0).parameters().get(SERVER_MAX_WINDOW));
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testServerWindowSizeDisable() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerExtensionHandler(
new PerMessageDeflateServerExtensionHandshaker(6, false, 15, false, false)));
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + SERVER_MAX_WINDOW + "=10");
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
assertFalse(res2.headers().contains(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testServerNoContext() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerCompressionHandler());
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + SERVER_NO_CONTEXT);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
assertFalse(res2.headers().contains(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testClientNoContext() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerCompressionHandler());
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + CLIENT_NO_CONTEXT);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertTrue(exts.get(0).parameters().isEmpty());
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
@Test
public void testServerWindowSizeDisableThenFallback() {
EmbeddedChannel ch = new EmbeddedChannel(new WebSocketServerExtensionHandler(
new PerMessageDeflateServerExtensionHandshaker(6, false, 15, false, false)));
HttpRequest req = newUpgradeRequest(PERMESSAGE_DEFLATE_EXTENSION + "; " + SERVER_MAX_WINDOW + "=10, " +
PERMESSAGE_DEFLATE_EXTENSION);
ch.writeInbound(req);
HttpResponse res = newUpgradeResponse(null);
ch.writeOutbound(res);
HttpResponse res2 = ch.readOutbound();
List<WebSocketExtensionData> exts = WebSocketExtensionUtil.extractExtensions(
res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS));
assertEquals(PERMESSAGE_DEFLATE_EXTENSION, exts.get(0).name());
assertTrue(exts.get(0).parameters().isEmpty());
assertNotNull(ch.pipeline().get(PerMessageDeflateDecoder.class));
assertNotNull(ch.pipeline().get(PerMessageDeflateEncoder.class));
}
}
| |
package edu.miami.ccs.goma;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.ServiceRegistryBuilder;
import org.jboss.logging.Logger;
import edu.miami.ccs.goma.pojos.ApprovalRequest;
import edu.miami.ccs.goma.pojos.ApprovalRequestType;
import edu.miami.ccs.goma.pojos.DictionaryTerm;
import edu.miami.ccs.goma.pojos.ObservationParameter;
import edu.miami.ccs.goma.pojos.ObservationTuple;
import edu.miami.ccs.goma.pojos.Station;
import edu.miami.ccs.goma.pojos.StatusValue;
import edu.miami.ccs.goma.pojos.User;
public class ObservationParameterOperations extends HttpServlet {
private static ServiceRegistry serviceRegistry;
private static SessionFactory sf;
private static PrintWriter out;
private static HttpServletResponse response;
private static HttpServletRequest request;
private static Logger logger;
private static SimpleDateFormat sdf;
public void init(ServletConfig config) throws ServletException
{
sf = connect();
sdf = new SimpleDateFormat("MM/dd/yyyy");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
// TODO Auto-generated method stub
response = resp;
request = req;
try
{
out = response.getWriter();
}
catch(IOException e)
{
logger.debug("Error opening output stream using PrintWriter");
}
if(request.getParameter("mode").equals("create"))
create();
else if(request.getParameter("mode").equals("list"))
list();
else if(request.getParameter("mode").equals("delete"))
delete();
else if(request.getParameter("mode").equals("fetch"))
fetch();
else if(request.getParameter("mode").equals("update"))
update();
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
// TODO Auto-generated method stub
this.doGet(req, resp);
}
/**
* Called by the init(...) method to generate hibernate Session object
*
* @return Hibernate SessionFactory object
*
*/
public static SessionFactory connect()
{
SessionFactory sf;
Configuration cfg = new Configuration();
cfg.configure();
serviceRegistry = new ServiceRegistryBuilder().applySettings(cfg.getProperties()).buildServiceRegistry();
sf = cfg.buildSessionFactory(serviceRegistry);
return sf;
}
private static void fetch()
{
Session session = sf.openSession();
ObservationParameter op = (ObservationParameter) session.load(ObservationParameter.class, new Long(request.getParameter("obs_param_id")));
response.setContentType("application/json");
out.print("{ \"data\": [ {");
out.print("\"obs_param_id\": \""+op.getObservationParamId()+
"\", \"sampling_freq\": \""+op.getDictionaryTermBySamplingFreqId().getTerm());
if(op.getDictionaryTermBySamplingDepthId() != null)
out.print("\", \"sampling_depth\": \""+op.getDictionaryTermBySamplingDepthId().getTerm());
out.print("\", \"medium\": \""+op.getObservationTuple().getDictionaryTermByMediumId().getTerm()+
"\", \"category\": \""+op.getObservationTuple().getDictionaryTermByParamCatId().getTerm()+
"\", \"type\": \""+op.getObservationTuple().getDictionaryTermByParamTypeId().getTerm()+
"\", \"method\": \""+op.getObservationTuple().getDictionaryTermByAnalysisMethodId().getTerm()+
"\", \"organization\": \""+op.getStation().getProgram().getOrganization().getName()+
"\", \"program\": \""+op.getStation().getProgram().getName()+
"\", \"project\": \""+op.getStation().getProject().getName()+
"\", \"station\": \""+op.getStation().getName()+
"\", \"approval_status\": \""+op.getStatusValue().getStatusValue()+"\"");
if(op.getStartDate() != null)
out.print(",\"start_date\": \""+sdf.format(op.getStartDate())+"\", \"end_date\": \""+sdf.format(op.getEndDate())+"\"");
out.print("} ] }");
session.close();
}
private static void list()
{
Session session = sf.openSession();
session.clear();
Station s = (Station) session.load(Station.class, new Long(request.getParameter("station_id")));
Set<ObservationParameter> obsParamList = s.getObservationParameters();
//Set up the data type for the JSON
response.setContentType("application/json");
out.print("{ \"hits\":"+obsParamList.size()+", \"data\": [");
for (Iterator iter = obsParamList.iterator(); iter.hasNext();)
{
ObservationParameter op = (ObservationParameter) iter.next();
out.print("{ \"obs_param_id\": \""+op.getObservationParamId()+
"\", \"medium\": \""+op.getObservationTuple().getDictionaryTermByMediumId().getTerm()+
"\", \"category\": \""+op.getObservationTuple().getDictionaryTermByParamCatId().getTerm()+
"\", \"type\": \""+op.getObservationTuple().getDictionaryTermByParamTypeId().getTerm()+
"\", \"method\": \""+op.getObservationTuple().getDictionaryTermByAnalysisMethodId().getTerm()+
"\", \"sampling_freq\": \""+op.getDictionaryTermBySamplingFreqId().getTerm()+
"\", \"approval_status\": \""+op.getStatusValue().getStatusValue()+"\"}");
if(iter.hasNext())
out.print(",");
}
out.print("] }");
session.close();
}
private static void create()
{
Session session = sf.openSession();
Date date= new Date();
Date startDate = null;
Date endDate = null;
Transaction tx = null;
HttpSession hs = request.getSession(true);
User curr_user = (User) hs.getAttribute("curr_user");
StatusValue approval_stat = (StatusValue) session.load(StatusValue.class, new Long(Statics.PENDING));
User u = (User) session.load(User.class, curr_user.getUserId());
DictionaryTerm sampling_freq = (DictionaryTerm) session.load(DictionaryTerm.class, new Long(request.getParameter("sampling_frequency_id")));
Station s = (Station) session.load(Station.class, new Long(request.getParameter("station_id")));
ObservationTuple ot = (ObservationTuple) session.load(ObservationTuple.class, new Long(request.getParameter("tuple_id")));
ObservationParameter op = new ObservationParameter(u, sampling_freq, ot, s, approval_stat, date);
if(request.getParameter("sampling_depth_id") != null)
op.setDictionaryTermBySamplingDepthId((DictionaryTerm) session.load(DictionaryTerm.class, new Long(request.getParameter("sampling_depth_id"))));
try
{
if(request.getParameter("start_date") != null)
{
startDate = sdf.parse(request.getParameter("start_date"));
op.setStartDate(startDate);
}
if(request.getParameter("end_date") != null)
{
endDate = sdf.parse(request.getParameter("end_date"));
op.setEndDate(endDate);
}
}
catch (ParseException pe) {
pe.printStackTrace();
}
try
{
tx = session.beginTransaction();
session.save(op);
session.flush();
ApprovalRequestType art = (ApprovalRequestType) session.load(ApprovalRequestType.class, new Long(Statics.OBSERVATION_PARAMETER));
ApprovalRequest ar = new ApprovalRequest(curr_user, art, curr_user, approval_stat, date, op.getObservationParamId());
ar.setComment("New Record Added");
session.save(ar);
tx.commit();
}
catch (RuntimeException e)
{
if (tx != null && tx.isActive())
{
try
{
// Second try catch as the rollback could fail as well
tx.rollback();
}
catch (HibernateException e1)
{
logger.debug("Error rolling back transaction");
}
// throw the first exception again
throw e;
}
}
catch (Exception e)
{
out.print("{ \"code\": \"failure\", \"message\": \""+e.getMessage()+"\", \"data\": [] }");
tx.rollback();
return;
}
out.print("{ \"code\": \"success\", \"message\": \"Save Successful\", \"obs_param_id\": \""+op.getObservationParamId()+"\", \"data\": [] }");
session.close();
}
private static void update()
{
Session session = sf.openSession();
java.util.Date date= new java.util.Date();
Date startDate = null;
Date endDate = null;
Transaction tx = null;
HttpSession hs = request.getSession(true);
User curr_user = (User) hs.getAttribute("curr_user");
User u = (User) session.load(User.class, curr_user.getUserId());
StatusValue approval_stat = (StatusValue) session.load(StatusValue.class, new Long(Statics.PENDING));
ObservationParameter op = (ObservationParameter) session.load(ObservationParameter.class, new Long(request.getParameter("obs_param_id")));
op.setUserByUpdatedBy(u);
op.setDateUpdated(date);
op.setStatusValue(approval_stat);
try
{
if(request.getParameter("start_date") != null)
{
startDate = sdf.parse(request.getParameter("start_date"));
op.setStartDate(startDate);
}
if(request.getParameter("end_date") != null)
{
endDate = sdf.parse(request.getParameter("end_date"));
op.setEndDate(endDate);
}
}
catch (ParseException pe) {
pe.printStackTrace();
}
if(request.getParameter("sampling_depth_id") != null)
op.setDictionaryTermBySamplingDepthId((DictionaryTerm) session.load(DictionaryTerm.class, new Long(request.getParameter("sampling_depth_id"))));
try
{
tx = session.beginTransaction();
session.update(op);
session.flush();
ApprovalRequestType art = (ApprovalRequestType) session.load(ApprovalRequestType.class, new Long(Statics.OBSERVATION_PARAMETER));
ApprovalRequest ar = new ApprovalRequest(curr_user, art, curr_user, approval_stat, date, op.getObservationParamId());
ar.setComment("New Record Added");
session.save(ar);
tx.commit();
}
catch (RuntimeException e)
{
if (tx != null && tx.isActive())
{
try
{
// Second try catch as the rollback could fail as well
tx.rollback();
}
catch (HibernateException e1)
{
logger.debug("Error rolling back transaction");
}
// throw the first exception again
throw e;
}
}
catch (Exception e)
{
out.print("{ \"code\": \"failure\", \"message\": \""+e.getMessage()+"\", \"data\": [] }");
tx.rollback();
return;
}
out.print("{ \"code\": \"success\", \"message\": \"Save Successful\", \"obs_param_id\": \""+op.getObservationParamId()+"\", \"data\": [] }");
session.close();
}
private static void delete()
{
Session session = sf.openSession();
Transaction tx = null;
String[] uList = request.getParameterValues("obs_param_id");
for(String item : uList)
{
ObservationParameter op = (ObservationParameter) session.load(ObservationParameter.class, new Long(item));
try
{
tx = session.beginTransaction();
session.delete(op);
tx.commit();
}
catch (ConstraintViolationException cve)
{
out.print("{ \"code\": \"failure\", \"message\": \"Delete Failed: Cannot delete "+ op.getObservationParamId() +" while it has data assigned to it.\", \"data\": [] }");
tx.rollback();
return;
}
catch (RuntimeException e)
{
if (tx != null && tx.isActive())
{
try
{
// Second try catch as the rollback could fail as well
tx.rollback();
}
catch (HibernateException e1)
{
logger.debug("Error rolling back transaction");
}
// throw the first exception again
throw e;
}
}
catch (Exception e)
{
out.print("{ \"code\": \"failure\", \"message\": \""+e.getMessage()+"\", \"data\": [] }");
tx.rollback();
return;
}
}
out.print("{ \"code\": \"success\", \"message\": \"Delete Successful\", \"data\": [] }");
session.close();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lookoutmetrics.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Details about feedback submitted for an anomalous metric.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lookoutmetrics-2017-07-25/TimeSeriesFeedback" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TimeSeriesFeedback implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID of the metric.
* </p>
*/
private String timeSeriesId;
/**
* <p>
* Feedback on whether the metric is a legitimate anomaly.
* </p>
*/
private Boolean isAnomaly;
/**
* <p>
* The ID of the metric.
* </p>
*
* @param timeSeriesId
* The ID of the metric.
*/
public void setTimeSeriesId(String timeSeriesId) {
this.timeSeriesId = timeSeriesId;
}
/**
* <p>
* The ID of the metric.
* </p>
*
* @return The ID of the metric.
*/
public String getTimeSeriesId() {
return this.timeSeriesId;
}
/**
* <p>
* The ID of the metric.
* </p>
*
* @param timeSeriesId
* The ID of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TimeSeriesFeedback withTimeSeriesId(String timeSeriesId) {
setTimeSeriesId(timeSeriesId);
return this;
}
/**
* <p>
* Feedback on whether the metric is a legitimate anomaly.
* </p>
*
* @param isAnomaly
* Feedback on whether the metric is a legitimate anomaly.
*/
public void setIsAnomaly(Boolean isAnomaly) {
this.isAnomaly = isAnomaly;
}
/**
* <p>
* Feedback on whether the metric is a legitimate anomaly.
* </p>
*
* @return Feedback on whether the metric is a legitimate anomaly.
*/
public Boolean getIsAnomaly() {
return this.isAnomaly;
}
/**
* <p>
* Feedback on whether the metric is a legitimate anomaly.
* </p>
*
* @param isAnomaly
* Feedback on whether the metric is a legitimate anomaly.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TimeSeriesFeedback withIsAnomaly(Boolean isAnomaly) {
setIsAnomaly(isAnomaly);
return this;
}
/**
* <p>
* Feedback on whether the metric is a legitimate anomaly.
* </p>
*
* @return Feedback on whether the metric is a legitimate anomaly.
*/
public Boolean isAnomaly() {
return this.isAnomaly;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTimeSeriesId() != null)
sb.append("TimeSeriesId: ").append(getTimeSeriesId()).append(",");
if (getIsAnomaly() != null)
sb.append("IsAnomaly: ").append(getIsAnomaly());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TimeSeriesFeedback == false)
return false;
TimeSeriesFeedback other = (TimeSeriesFeedback) obj;
if (other.getTimeSeriesId() == null ^ this.getTimeSeriesId() == null)
return false;
if (other.getTimeSeriesId() != null && other.getTimeSeriesId().equals(this.getTimeSeriesId()) == false)
return false;
if (other.getIsAnomaly() == null ^ this.getIsAnomaly() == null)
return false;
if (other.getIsAnomaly() != null && other.getIsAnomaly().equals(this.getIsAnomaly()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTimeSeriesId() == null) ? 0 : getTimeSeriesId().hashCode());
hashCode = prime * hashCode + ((getIsAnomaly() == null) ? 0 : getIsAnomaly().hashCode());
return hashCode;
}
@Override
public TimeSeriesFeedback clone() {
try {
return (TimeSeriesFeedback) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.lookoutmetrics.model.transform.TimeSeriesFeedbackMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uribeacon.widget;
import android.bluetooth.BluetoothDevice;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.widget.BaseAdapter;
import org.uribeacon.scan.compat.ScanResult;
import org.uribeacon.scan.util.RegionResolver;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Adapter for holding and sorting devices found through scanning that can be used in a ListView.
*/
public abstract class ScanResultAdapter extends BaseAdapter {
private final Map<String /* device address */, DeviceSighting> mLeScanResults;
private List<DeviceSighting> mSortedSightings;
public final LayoutInflater mInflater;
public final RegionResolver mRegionResolver;
private final Handler mHandler;
// Timeout to remove a ScanResult after lifetime expires.
private class Timeout implements Runnable {
final String mAddress;
Timeout(String address) {
mAddress = address;
}
public void run() {
remove(mAddress);
}
}
private final HashMap<String, Timeout> mTimeouts = new HashMap<>();
public ScanResultAdapter(LayoutInflater inflater) {
mLeScanResults = new HashMap<>();
mRegionResolver = new RegionResolver();
mSortedSightings = null;
mInflater = inflater;
mHandler = new Handler();
}
public void setSmoothFactor(double smoothFactor) {
mRegionResolver.setSmoothFactor(smoothFactor);
}
/**
* Adds the ScanResult to the adapter with an auto-removal after lifetimeSeconds.
* <p/>
* Note: The lifetime param is useful when a device stops matching a filter
* (e.g. advertising the config service) but is not reported lost by the scanner
* because it is still advertising a UriBeacon.
*
* @param scanResult The ScanResult to add to the adapter.
* @param calibratedTxPower The Transmit Power used for the ScanResult.
* @param lifetimeSeconds The number of seconds before the ScanResult is removed.
*/
public void add(ScanResult scanResult, int calibratedTxPower, int lifetimeSeconds) {
final String address = scanResult.getDevice().getAddress();
// Create or replace the timeout runnable
Timeout timer = mTimeouts.get(address);
if (timer != null) {
mHandler.removeCallbacks(timer);
} else {
timer = new Timeout(address);
mTimeouts.put(address, timer);
}
mHandler.postDelayed(timer, TimeUnit.SECONDS.toMillis(lifetimeSeconds));
add(scanResult, calibratedTxPower);
}
/**
* Adds the scanResult to the adapter.
*
* @param scanResult The ScanResult to add to the adapter.
* @param txPower The Transmit Power used for the ScanResult.
*/
public void add(ScanResult scanResult, int txPower) {
final String address = scanResult.getDevice().getAddress();
mRegionResolver.onUpdate(address, scanResult.getRssi(), txPower);
double distance = mRegionResolver.getDistance(address);
DeviceSighting sightings = mLeScanResults.get(address);
if (sightings == null) {
mLeScanResults.put(scanResult.getDevice().getAddress(),
new DeviceSighting(scanResult, distance));
} else {
sightings.updateSighting(scanResult, distance);
}
notifyDataSetChanged();
}
/**
* Remove ScanResult associated with the device from the adapter.
*
* @param device The device to remove from the adapter.
*/
public void remove(BluetoothDevice device) {
String address = device.getAddress();
remove(address);
}
private void remove(String address) {
mRegionResolver.onLost(address);
mLeScanResults.remove(address);
// Clean out the timeout runnable
Timeout timer = mTimeouts.get(address);
if (timer != null) {
mHandler.removeCallbacks(timer);
mTimeouts.remove(address);
}
notifyDataSetChanged();
}
/**
* Remove all elements from the collection.
*/
public void clear() {
mLeScanResults.clear();
notifyDataSetChanged();
}
@Override
public void notifyDataSetChanged() {
mSortedSightings = null;
super.notifyDataSetChanged();
}
@Override
public int getCount() {
return mLeScanResults.size();
}
@Override
public DeviceSighting getItem(int i) {
if (mSortedSightings == null) {
mSortedSightings = new ArrayList<>(mLeScanResults.values());
Collections.sort(mSortedSightings);
}
return mSortedSightings.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
/**
* Hold the ScanResult and distance information.
*/
public class DeviceSighting implements Comparable<DeviceSighting> {
public ScanResult scanResult;
public double latestDistance;
public long period;
public DeviceSighting(ScanResult scanResult, double distance) {
this.scanResult = scanResult;
this.latestDistance = distance;
}
public void updateSighting(ScanResult scanResult, double distance) {
long currentPeriod = TimeUnit.NANOSECONDS.toMillis(scanResult.getTimestampNanos()
- this.scanResult.getTimestampNanos());
this.period = this.period != 0 ?
(this.period + currentPeriod)/2
: currentPeriod;
this.scanResult = scanResult;
this.latestDistance = distance;
}
@Override
public int compareTo(@NonNull DeviceSighting other) {
final String address = scanResult.getDevice().getAddress();
final String otherAddress = other.scanResult.getDevice().getAddress();
// Sort by the stabilized region of the device, unless
// they are the same, in which case sort by distance.
final String nearest = mRegionResolver.getNearestAddress();
if (address.equals(nearest)) {
return -1;
}
if (otherAddress.equals(nearest)) {
return 1;
}
int r1 = mRegionResolver.getRegion(address);
int r2 = mRegionResolver.getRegion(otherAddress);
if (r1 != r2) {
return ((Integer) r1).compareTo(r2);
}
// The two devices are in the same region, sort by device address.
return address.compareTo(other.scanResult.getDevice().getAddress());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.filter;
import java.io.IOException;
import java.util.List;
import org.apache.drill.common.expression.ErrorCollector;
import org.apache.drill.common.expression.ErrorCollectorImpl;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.ClassGenerator;
import org.apache.drill.exec.expr.CodeGenerator;
import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.physical.config.Filter;
import org.apache.drill.exec.record.AbstractSingleRecordBatch;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.record.TransferPair;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
public class FilterRecordBatch extends AbstractSingleRecordBatch<Filter> {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FilterRecordBatch.class);
private SelectionVector2 sv2;
private SelectionVector4 sv4;
private Filterer filter;
public FilterRecordBatch(Filter pop, RecordBatch incoming, FragmentContext context) throws OutOfMemoryException {
super(pop, context, incoming);
}
@Override
public FragmentContext getContext() {
return context;
}
@Override
public int getRecordCount() {
return sv2 != null ? sv2.getCount() : sv4.getCount();
}
@Override
public SelectionVector2 getSelectionVector2() {
return sv2;
}
@Override
public SelectionVector4 getSelectionVector4() {
return sv4;
}
@Override
protected IterOutcome doWork() {
container.zeroVectors();
int recordCount = incoming.getRecordCount();
try {
filter.filterBatch(recordCount);
} catch (SchemaChangeException e) {
throw new UnsupportedOperationException(e);
}
return getFinalOutcome(false);
}
@Override
public void close() {
if (sv2 != null) {
sv2.clear();
}
if (sv4 != null) {
sv4.clear();
}
super.close();
}
@Override
protected boolean setupNewSchema() throws SchemaChangeException {
if (sv2 != null) {
sv2.clear();
}
switch (incoming.getSchema().getSelectionVectorMode()) {
case NONE:
if (sv2 == null) {
sv2 = new SelectionVector2(oContext.getAllocator());
}
this.filter = generateSV2Filterer();
break;
case TWO_BYTE:
sv2 = new SelectionVector2(oContext.getAllocator());
this.filter = generateSV2Filterer();
break;
case FOUR_BYTE:
/*
* Filter does not support SV4 handling. There are couple of minor issues in the
* logic that handles SV4 + filter should always be pushed beyond sort so disabling
* it in FilterPrel.
*
*/
default:
throw new UnsupportedOperationException();
}
if (container.isSchemaChanged()) {
container.buildSchema(SelectionVectorMode.TWO_BYTE);
return true;
}
return false;
}
protected Filterer generateSV4Filterer() throws SchemaChangeException {
final ErrorCollector collector = new ErrorCollectorImpl();
final List<TransferPair> transfers = Lists.newArrayList();
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION4, context.getOptions());
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr), ClassGenerator.BlkCreateMode.FALSE);
for (final VectorWrapper<?> vw : incoming) {
for (final ValueVector vv : vw.getValueVectors()) {
final TransferPair pair = vv.getTransferPair(oContext.getAllocator());
container.add(pair.getTo());
transfers.add(pair);
}
}
// allocate outgoing sv4
container.buildSchema(SelectionVectorMode.FOUR_BYTE);
try {
final TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
final Filterer filter = context.getImplementationClass(cg);
filter.setup(context, incoming, this, tx);
return filter;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
protected Filterer generateSV2Filterer() throws SchemaChangeException {
final ErrorCollector collector = new ErrorCollectorImpl();
final List<TransferPair> transfers = Lists.newArrayList();
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION2, context.getOptions());
// Uncomment below lines to enable saving generated code file for debugging
// cg.getCodeGenerator().plainJavaCapable(true);
// cg.getCodeGenerator().saveCodeForDebugging(true);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector,
context.getFunctionRegistry(), false, unionTypeEnabled);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr), ClassGenerator.BlkCreateMode.FALSE);
for (final VectorWrapper<?> v : incoming) {
final TransferPair pair = v.getValueVector().makeTransferPair(container.addOrGet(v.getField(), callBack));
transfers.add(pair);
}
try {
final TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
CodeGenerator<Filterer> codeGen = cg.getCodeGenerator();
codeGen.plainJavaCapable(true);
final Filterer filter = context.getImplementationClass(codeGen);
filter.setup(context, incoming, this, tx);
return filter;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
@Override
public void dump() {
logger.error("FilterRecordBatch[container={}, selectionVector2={}, filter={}, popConfig={}]", container, sv2, filter, popConfig);
}
}
| |
package com.hsj.egameserver.server;
import org.slf4j.LoggerFactory;
import java.sql.*;
public class DatabaseUtils {
private static DatabaseUtils _dinamicInstance = null;
private static DatabaseUtils _staticInstance = null;
private synchronized static void createDinamicInstance() {
if (_dinamicInstance == null) {
_dinamicInstance = new DatabaseUtils();
}
}
public static DatabaseUtils getDinamicInstance() {
if (_dinamicInstance == null) {
createDinamicInstance();
}
return _dinamicInstance;
}
private synchronized static void createStaticInstance() {
if (_staticInstance == null) {
_staticInstance = new DatabaseUtils();
}
}
public static DatabaseUtils getStaticInstance() {
if (_staticInstance == null) {
createStaticInstance();
}
return _staticInstance;
}
private Database dinamicDatabase;
private DatabaseUtils() {
dinamicDatabase = null;
}
public boolean checkDinamicDatabase() {
if (dinamicDatabase != null) {
return true;
} else {
return false;
}
}
public void setDinamicDatabase(Database dinamicDatabase) {
this.dinamicDatabase = dinamicDatabase;
}
private void releaseResultAndStatement(ResultSet rs, Statement st) {
try {
if (rs != null) {
rs.close();
}
if (st != null) {
st.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
public int auth(String username, String password) {
if (!checkDinamicDatabase()) {
return -1;
} else {
Connection dinamicConn = dinamicDatabase.getDinamicConn();
if (dinamicConn == null) {
return -1;
}
int accountId = -1;
PreparedStatement preparedStatement = null;
ResultSet rs = null;
String sql = "SELECT id FROM accounts WHERE username=? and password=?";
try {
preparedStatement = dinamicConn.prepareStatement(sql);
preparedStatement.setObject(1, username);
preparedStatement.setObject(2, password);
rs = preparedStatement.executeQuery();
if (rs.next()) {
String s = rs.getString("id");
accountId = Integer.parseInt(s);
}
} catch (SQLException e) {
e.printStackTrace();
return -1;
} finally {
releaseResultAndStatement(rs, preparedStatement);
}
return accountId;
}
}
public int authAdmin(String username, String password) {
if (!checkDinamicDatabase()) {
return -1;
}
PreparedStatement preparedStatement = null;
ResultSet rs = null;
int level = -1;
String sql = "SELECT level FROM accounts WHERE username=? and password=?";
try {
Connection dinamicConn = dinamicDatabase.getDinamicConn();
if (dinamicConn == null) {
return -1;
}
preparedStatement = dinamicConn.prepareStatement(sql);
preparedStatement.setObject(1, username);
preparedStatement.setObject(2, password);
rs = preparedStatement.executeQuery();
if (rs.next()) {
String s = rs.getString("id");
level = Integer.parseInt(s);
}
} catch (SQLException e) {
e.printStackTrace();
return -1;
} finally {
releaseResultAndStatement(rs, preparedStatement);
}
return level;
}
public String getCharList(Client client) {
if (!checkDinamicDatabase()) {
return null;
}
long accountId = client.getAccountId();
String charlist = "";
int chars = 0;
// try {
// Statement stmt = dinamicDatabase.getDinamicConn().createStatement();
//
// ResultSet rs = stmt.executeQuery("SELECT * FROM `characters`,`slots` WHERE `characters`.`accountid`=" + accountId + " AND `characters`.`id`=`slots`.`charid` ORDER BY `slot` ASC");
// while (rs.next()) {
// int slot = rs.getInt("slot");
// boolean alreadyLogged = false;
// java.util.Map<SocketChannel, Client> clients = Server.getInstance().getWorld().getClients();
// synchronized (clients) {
// for (Client cl : clients.values()) {
// if (cl.equals(client))
// continue;
// if (cl.getAccountId() == client.getAccountId()) {
// Player player = cl.getPlayer();
//
// if (player != null && player.getSlot() == slot) {
//
// alreadyLogged = true;
// }
// }
// }
// }
// if (alreadyLogged)
// continue;
//
// Equipment eq = loadEquipment(new Equipment(null), rs.getInt("id"));
//
// charlist += "chars_exist " + slot + " "
// + (client.getVersion() >= 2000 ? rs.getString("id") + " " : "") // nga client have this extra value in the packet
// + rs.getString("name") + " "
// + rs.getString("race") + " "
// + rs.getString("sex") + " "
// + rs.getString("hair") + " "
// + rs.getString("level") + " "
// + 1 + " " //hp
// + 1 + " " //hp max
// + 1 + " " //mana
// + 1 + " " //mana max
// + 1 + " " //stamina
// + 1 + " " //stamina max
// + 1 + " " //electricity
// + 1 + " " //electricity max
// + rs.getString("strength") + " "
// + rs.getString("wisdom") + " "
// + rs.getString("dexterity") + " "
// + rs.getString("constitution") + " "
// + rs.getString("leadership") + " "
// + "0" + " " // unknown value
// + eq.getTypeId(Slot.HELMET) + " "
// + eq.getTypeId(Slot.CHEST) + " "
// + eq.getTypeId(Slot.PANTS) + " "
// + eq.getTypeId(Slot.SHOULDER) + " "
// + eq.getTypeId(Slot.BOOTS) + " "
// + eq.getTypeId(Slot.OFFHAND)
// + " 0\n"; //unknown value
//
// //chars_exist 3 12341234 0 0 0 2 90 12 15 15 90 90 15 15 30 5 5 30 10 309 -1 -1 -1 -1 -1 1
// // chars_exist [SlotNumber] [Name] [Race] [Sex] [HairStyle]
// // [Level] [Vitality] [Stamina] [Magic] [Energy] [Vitality]
// // [Stamina] [Magic] [Energy] [Strength] [Wisdom]
// // [Dexterity] [Constitution] [Leadership] [HeadGear]
// // [Chest] [Pants] [SoulderMount] [Feet] [Shield] 0
// chars++;
// }
//
// } catch (SQLException e) {
// LoggerFactory.getLogger(this.getClass()).warn("Exception", e);
// return null;
// }
LoggerFactory.getLogger(DatabaseUtils.class).info("found " + chars
+ " char(s) for Account(" + accountId + ")");
charlist += "chars_end 0 " + accountId + "\n";
return charlist;
}
}
| |
package org.hisp.dhis.dataset;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import com.google.common.collect.Sets;
import org.hisp.dhis.common.BaseDimensionalItemObject;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.DimensionItemType;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.MergeMode;
import org.hisp.dhis.common.VersionedObject;
import org.hisp.dhis.common.adapter.JacksonPeriodTypeDeserializer;
import org.hisp.dhis.common.adapter.JacksonPeriodTypeSerializer;
import org.hisp.dhis.common.annotation.Scanned;
import org.hisp.dhis.common.view.DetailedView;
import org.hisp.dhis.common.view.ExportView;
import org.hisp.dhis.dataapproval.DataApprovalWorkflow;
import org.hisp.dhis.dataelement.CategoryOptionGroupSet;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategory;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataelement.DataElementCategoryOption;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementOperand;
import org.hisp.dhis.dataentryform.DataEntryForm;
import org.hisp.dhis.indicator.Indicator;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.schema.PropertyType;
import org.hisp.dhis.schema.annotation.Property;
import org.hisp.dhis.user.UserGroup;
import java.util.HashSet;
import java.util.Set;
/**
* This class is used for defining the standardized DataSets. A DataSet consists
* of a collection of DataElements.
*
* @author Kristian Nordal
*/
@JacksonXmlRootElement( localName = "dataSet", namespace = DxfNamespaces.DXF_2_0 )
public class DataSet
extends BaseDimensionalItemObject
implements VersionedObject
{
public static final int NO_EXPIRY = 0;
/**
* The PeriodType indicating the frequency that this DataSet should be used
*/
private PeriodType periodType;
/**
* All DataElements associated with this DataSet.
*/
@Scanned
private Set<DataElement> dataElements = new HashSet<>();
/**
* Indicators associated with this data set. Indicators are used for view
* and output purposes, such as calculated fields in forms and reports.
*/
@Scanned
private Set<Indicator> indicators = new HashSet<>();
/**
* The DataElementOperands for which data must be entered in order for the
* DataSet to be considered as complete.
*/
private Set<DataElementOperand> compulsoryDataElementOperands = new HashSet<>();
/**
* All Sources that register data with this DataSet.
*/
@Scanned
private Set<OrganisationUnit> sources = new HashSet<>();
/**
* The Sections associated with the DataSet.
*/
private Set<Section> sections = new HashSet<>();
/**
* The CategoryCombo used for data attributes.
*/
private DataElementCategoryCombo categoryCombo;
/**
* Property indicating if the dataset could be collected using mobile data
* entry.
*/
private boolean mobile;
/**
* Indicating custom data entry form.
*/
private DataEntryForm dataEntryForm;
/**
* Indicating version number.
*/
private int version;
/**
* How many days after period is over will this dataSet auto-lock
*/
private int expiryDays;
/**
* Days after period end to qualify for timely data submission
*/
private int timelyDays;
/**
* User group which will receive notifications when data set is marked
* complete.
*/
private UserGroup notificationRecipients;
/**
* Indicating whether the user completing this data set should be sent a
* notification.
*/
private boolean notifyCompletingUser;
/**
* The approval workflow (if any) for this data set.
*/
private DataApprovalWorkflow workflow;
// -------------------------------------------------------------------------
// Form properties
// -------------------------------------------------------------------------
/**
* Number of periods in the future to open for data capture, 0 means capture
* not allowed for current period.
*/
private int openFuturePeriods;
/**
* Property indicating that all fields for a data element must be filled.
*/
private boolean fieldCombinationRequired;
/**
* Property indicating that all validation rules must pass before the form
* can be completed.
*/
private boolean validCompleteOnly;
/**
* Property indicating whether a comment is required for all fields in a form
* which are not entered, including false for boolean values.
*/
private boolean noValueRequiresComment;
/**
* Property indicating whether offline storage is enabled for this dataSet
* or not
*/
private boolean skipOffline;
/**
* Property indicating whether it should enable data elements decoration in forms.
*/
private boolean dataElementDecoration;
/**
* Render default and section forms with tabs instead of multiple sections in one page
*/
private boolean renderAsTabs;
/**
* Render multi-organisationUnit forms either with OU vertically or horizontally.
*/
private boolean renderHorizontally;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public DataSet()
{
}
public DataSet( String name )
{
this.name = name;
}
public DataSet( String name, PeriodType periodType )
{
this( name );
this.periodType = periodType;
}
public DataSet( String name, String shortName, PeriodType periodType )
{
this( name, periodType );
this.shortName = shortName;
}
public DataSet( String name, String shortName, String code, PeriodType periodType )
{
this( name, shortName, periodType );
this.code = code;
}
// -------------------------------------------------------------------------
// Logic
// -------------------------------------------------------------------------
public void addOrganisationUnit( OrganisationUnit organisationUnit )
{
sources.add( organisationUnit );
organisationUnit.getDataSets().add( this );
}
public boolean removeOrganisationUnit( OrganisationUnit organisationUnit )
{
sources.remove( organisationUnit );
return organisationUnit.getDataSets().remove( this );
}
public void removeAllOrganisationUnits()
{
for ( OrganisationUnit unit : sources )
{
unit.getDataSets().remove( this );
}
sources.clear();
}
public void updateOrganisationUnits( Set<OrganisationUnit> updates )
{
Set<OrganisationUnit> toRemove = Sets.difference( sources, updates );
Set<OrganisationUnit> toAdd = Sets.difference( updates, sources );
toRemove.stream().forEach( u -> u.getDataSets().remove( this ) );
toAdd.stream().forEach( u -> u.getDataSets().add( this ) );
sources.clear();
sources.addAll( updates );
}
public void addDataElement( DataElement dataElement )
{
dataElements.add( dataElement );
dataElement.getDataSets().add( this );
}
public boolean removeDataElement( DataElement dataElement )
{
dataElements.remove( dataElement );
return dataElement.getDataSets().remove( dataElement );
}
public void updateDataElements( Set<DataElement> updates )
{
Set<DataElement> toRemove = Sets.difference( dataElements, updates );
Set<DataElement> toAdd = Sets.difference( updates, dataElements );
toRemove.stream().forEach( d -> d.getDataSets().remove( this ) );
toAdd.stream().forEach( d -> d.getDataSets().add( this ) );
dataElements.clear();
dataElements.addAll( updates );
}
public void addIndicator( Indicator indicator )
{
indicators.add( indicator );
indicator.getDataSets().add( this );
}
public boolean removeIndicator( Indicator indicator )
{
indicators.remove( indicator );
return indicator.getDataSets().remove( this );
}
public void addCompulsoryDataElementOperand( DataElementOperand dataElementOperand )
{
compulsoryDataElementOperands.add( dataElementOperand );
}
public void removeCompulsoryDataElementOperand( DataElementOperand dataElementOperand )
{
compulsoryDataElementOperands.remove( dataElementOperand );
}
public boolean hasDataEntryForm()
{
return dataEntryForm != null && dataEntryForm.hasForm();
}
public boolean hasSections()
{
return sections != null && sections.size() > 0;
}
/**
* Indicates whether data should be approved for this data set, i.e. whether
* this data set is part of a data approval workflow.
*/
public boolean isApproveData()
{
return workflow != null;
}
@JsonProperty
@JsonView( { DetailedView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public FormType getFormType()
{
if ( hasDataEntryForm() )
{
return FormType.CUSTOM;
}
if ( hasSections() )
{
return FormType.SECTION;
}
return FormType.DEFAULT;
}
public Set<DataElement> getDataElementsInSections()
{
Set<DataElement> dataElements = new HashSet<>();
for ( Section section : sections )
{
dataElements.addAll( section.getDataElements() );
}
return dataElements;
}
public Set<DataElementCategoryOptionCombo> getDataElementOptionCombos()
{
Set<DataElementCategoryOptionCombo> optionCombos = new HashSet<>();
for ( DataElement element : dataElements )
{
if ( element.hasCategoryCombo() )
{
optionCombos.addAll( element.getCategoryCombo().getOptionCombos() );
}
}
return optionCombos;
}
@Override
public int increaseVersion()
{
return ++version;
}
/**
* Returns a set of category option group sets which are linked to this data
* set through its category combination.
*/
public Set<CategoryOptionGroupSet> getCategoryOptionGroupSets()
{
Set<CategoryOptionGroupSet> groupSets = new HashSet<>();
if ( categoryCombo != null )
{
for ( DataElementCategory category : categoryCombo.getCategories() )
{
for ( DataElementCategoryOption categoryOption : category.getCategoryOptions() )
{
groupSets.addAll( categoryOption.getGroupSets() );
}
}
}
return groupSets;
}
/**
* Indicates whether this data set has a category combination which is different
* from the default category combination.
*/
public boolean hasCategoryCombo()
{
return categoryCombo != null && !DataElementCategoryCombo.DEFAULT_CATEGORY_COMBO_NAME.equals( categoryCombo.getName() );
}
// -------------------------------------------------------------------------
// DimensionalItemObject
// -------------------------------------------------------------------------
@Override
public DimensionItemType getDimensionItemType()
{
return DimensionItemType.REPORTING_RATE;
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
@Override
public boolean haveUniqueNames()
{
return false;
}
@JsonProperty
@JsonSerialize( using = JacksonPeriodTypeSerializer.class )
@JsonDeserialize( using = JacksonPeriodTypeDeserializer.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@Property( PropertyType.TEXT )
public PeriodType getPeriodType()
{
return periodType;
}
public void setPeriodType( PeriodType periodType )
{
this.periodType = periodType;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataEntryForm getDataEntryForm()
{
return dataEntryForm;
}
public void setDataEntryForm( DataEntryForm dataEntryForm )
{
this.dataEntryForm = dataEntryForm;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "dataElements", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "dataElement", namespace = DxfNamespaces.DXF_2_0 )
public Set<DataElement> getDataElements()
{
return dataElements;
}
public void setDataElements( Set<DataElement> dataElements )
{
this.dataElements = dataElements;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "indicators", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "indicator", namespace = DxfNamespaces.DXF_2_0 )
public Set<Indicator> getIndicators()
{
return indicators;
}
public void setIndicators( Set<Indicator> indicators )
{
this.indicators = indicators;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "compulsoryDataElementOperands", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "compulsoryDataElementOperand", namespace = DxfNamespaces.DXF_2_0 )
public Set<DataElementOperand> getCompulsoryDataElementOperands()
{
return compulsoryDataElementOperands;
}
public void setCompulsoryDataElementOperands( Set<DataElementOperand> compulsoryDataElementOperands )
{
this.compulsoryDataElementOperands = compulsoryDataElementOperands;
}
@JsonProperty( value = "organisationUnits" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "organisationUnits", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "organisationUnit", namespace = DxfNamespaces.DXF_2_0 )
public Set<OrganisationUnit> getSources()
{
return sources;
}
public void setSources( Set<OrganisationUnit> sources )
{
this.sources = sources;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class } )
@JacksonXmlElementWrapper( localName = "sections", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "section", namespace = DxfNamespaces.DXF_2_0 )
public Set<Section> getSections()
{
return sections;
}
public void setSections( Set<Section> sections )
{
this.sections = sections;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataElementCategoryCombo getCategoryCombo()
{
return categoryCombo;
}
public void setCategoryCombo( DataElementCategoryCombo categoryCombo )
{
this.categoryCombo = categoryCombo;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isMobile()
{
return mobile;
}
public void setMobile( boolean mobile )
{
this.mobile = mobile;
}
@Override
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getVersion()
{
return version;
}
@Override
public void setVersion( int version )
{
this.version = version;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getExpiryDays()
{
return expiryDays;
}
public void setExpiryDays( int expiryDays )
{
this.expiryDays = expiryDays;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getTimelyDays()
{
return timelyDays;
}
public void setTimelyDays( int timelyDays )
{
this.timelyDays = timelyDays;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public UserGroup getNotificationRecipients()
{
return notificationRecipients;
}
public void setNotificationRecipients( UserGroup notificationRecipients )
{
this.notificationRecipients = notificationRecipients;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isNotifyCompletingUser()
{
return notifyCompletingUser;
}
public void setNotifyCompletingUser( boolean notifyCompletingUser )
{
this.notifyCompletingUser = notifyCompletingUser;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataApprovalWorkflow getWorkflow()
{
return workflow;
}
public void setWorkflow( DataApprovalWorkflow workflow )
{
this.workflow = workflow;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getOpenFuturePeriods()
{
return openFuturePeriods;
}
public void setOpenFuturePeriods( int openFuturePeriods )
{
this.openFuturePeriods = openFuturePeriods;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isFieldCombinationRequired()
{
return fieldCombinationRequired;
}
public void setFieldCombinationRequired( boolean fieldCombinationRequired )
{
this.fieldCombinationRequired = fieldCombinationRequired;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isValidCompleteOnly()
{
return validCompleteOnly;
}
public void setValidCompleteOnly( boolean validCompleteOnly )
{
this.validCompleteOnly = validCompleteOnly;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isNoValueRequiresComment()
{
return noValueRequiresComment;
}
public void setNoValueRequiresComment( boolean noValueRequiresComment )
{
this.noValueRequiresComment = noValueRequiresComment;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isSkipOffline()
{
return skipOffline;
}
public void setSkipOffline( boolean skipOffline )
{
this.skipOffline = skipOffline;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isRenderAsTabs()
{
return renderAsTabs;
}
public void setRenderAsTabs( boolean renderAsTabs )
{
this.renderAsTabs = renderAsTabs;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isRenderHorizontally()
{
return renderHorizontally;
}
public void setRenderHorizontally( boolean renderHorizontally )
{
this.renderHorizontally = renderHorizontally;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isDataElementDecoration()
{
return dataElementDecoration;
}
public void setDataElementDecoration( boolean dataElementDecoration )
{
this.dataElementDecoration = dataElementDecoration;
}
@Override
public void mergeWith( IdentifiableObject other, MergeMode mergeMode )
{
super.mergeWith( other, mergeMode );
if ( other.getClass().isInstance( this ) )
{
DataSet dataSet = (DataSet) other;
dataElementDecoration = dataSet.isDataElementDecoration();
skipOffline = dataSet.isSkipOffline();
renderAsTabs = dataSet.isRenderAsTabs();
renderHorizontally = dataSet.isRenderHorizontally();
expiryDays = dataSet.getExpiryDays();
openFuturePeriods = dataSet.getOpenFuturePeriods();
fieldCombinationRequired = dataSet.isFieldCombinationRequired();
mobile = dataSet.isMobile();
validCompleteOnly = dataSet.isValidCompleteOnly();
version = dataSet.getVersion();
if ( mergeMode.isReplace() )
{
periodType = dataSet.getPeriodType();
dataEntryForm = dataSet.getDataEntryForm();
notificationRecipients = dataSet.getNotificationRecipients();
}
else if ( mergeMode.isMerge() )
{
periodType = dataSet.getPeriodType() == null ? periodType : dataSet.getPeriodType();
dataEntryForm = dataSet.getDataEntryForm() == null ? dataEntryForm : dataSet.getDataEntryForm();
notificationRecipients = dataSet.getNotificationRecipients() == null ? notificationRecipients : dataSet.getNotificationRecipients();
}
dataElements.clear();
dataSet.getDataElements().forEach( this::addDataElement );
indicators.clear();
dataSet.getIndicators().forEach( this::addIndicator );
compulsoryDataElementOperands.clear();
dataSet.getCompulsoryDataElementOperands().forEach( this::addCompulsoryDataElementOperand );
removeAllOrganisationUnits();
dataSet.getSources().forEach( this::addOrganisationUnit );
}
}
}
| |
/*
Copyright 2006 Jerry Huxtable
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.jhlabs.image;
import java.awt.Color;
import java.util.Random;
/**
* Some more useful math functions for image processing.
* These are becoming obsolete as we move to Java2D. Use MiscComposite instead.
*/
public class PixelUtils {
public final static int REPLACE = 0;
public final static int NORMAL = 1;
public final static int MIN = 2;
public final static int MAX = 3;
public final static int ADD = 4;
public final static int SUBTRACT = 5;
public final static int DIFFERENCE = 6;
public final static int MULTIPLY = 7;
public final static int HUE = 8;
public final static int SATURATION = 9;
public final static int VALUE = 10;
public final static int COLOR = 11;
public final static int SCREEN = 12;
public final static int AVERAGE = 13;
public final static int OVERLAY = 14;
public final static int CLEAR = 15;
public final static int EXCHANGE = 16;
public final static int DISSOLVE = 17;
public final static int DST_IN = 18;
public final static int ALPHA = 19;
public final static int ALPHA_TO_GRAY = 20;
private static Random randomGenerator = new Random();
/**
* Clamp a value to the range 0..255
*/
public static int clamp(int c) {
if (c < 0)
return 0;
if (c > 255)
return 255;
return c;
}
public static int interpolate(int v1, int v2, float f) {
return clamp((int)(v1+f*(v2-v1)));
}
public static int brightness(int rgb) {
int r = (rgb >> 16) & 0xff;
int g = (rgb >> 8) & 0xff;
int b = rgb & 0xff;
return (r+g+b)/3;
}
public static boolean nearColors(int rgb1, int rgb2, int tolerance) {
int r1 = (rgb1 >> 16) & 0xff;
int g1 = (rgb1 >> 8) & 0xff;
int b1 = rgb1 & 0xff;
int r2 = (rgb2 >> 16) & 0xff;
int g2 = (rgb2 >> 8) & 0xff;
int b2 = rgb2 & 0xff;
return Math.abs(r1-r2) <= tolerance && Math.abs(g1-g2) <= tolerance && Math.abs(b1-b2) <= tolerance;
}
private final static float hsb1[] = new float[3];//FIXME-not thread safe
private final static float hsb2[] = new float[3];//FIXME-not thread safe
// Return rgb1 painted onto rgb2
public static int combinePixels(int rgb1, int rgb2, int op) {
return combinePixels(rgb1, rgb2, op, 0xff);
}
public static int combinePixels(int rgb1, int rgb2, int op, int extraAlpha, int channelMask) {
return (rgb2 & ~channelMask) | combinePixels(rgb1 & channelMask, rgb2, op, extraAlpha);
}
public static int combinePixels(int rgb1, int rgb2, int op, int extraAlpha) {
if (op == REPLACE)
return rgb1;
int a1 = (rgb1 >> 24) & 0xff;
int r1 = (rgb1 >> 16) & 0xff;
int g1 = (rgb1 >> 8) & 0xff;
int b1 = rgb1 & 0xff;
int a2 = (rgb2 >> 24) & 0xff;
int r2 = (rgb2 >> 16) & 0xff;
int g2 = (rgb2 >> 8) & 0xff;
int b2 = rgb2 & 0xff;
switch (op) {
case NORMAL:
break;
case MIN:
r1 = Math.min(r1, r2);
g1 = Math.min(g1, g2);
b1 = Math.min(b1, b2);
break;
case MAX:
r1 = Math.max(r1, r2);
g1 = Math.max(g1, g2);
b1 = Math.max(b1, b2);
break;
case ADD:
r1 = clamp(r1+r2);
g1 = clamp(g1+g2);
b1 = clamp(b1+b2);
break;
case SUBTRACT:
r1 = clamp(r2-r1);
g1 = clamp(g2-g1);
b1 = clamp(b2-b1);
break;
case DIFFERENCE:
r1 = clamp(Math.abs(r1-r2));
g1 = clamp(Math.abs(g1-g2));
b1 = clamp(Math.abs(b1-b2));
break;
case MULTIPLY:
r1 = clamp(r1*r2/255);
g1 = clamp(g1*g2/255);
b1 = clamp(b1*b2/255);
break;
case DISSOLVE:
if ((randomGenerator.nextInt() & 0xff) <= a1) {
r1 = r2;
g1 = g2;
b1 = b2;
}
break;
case AVERAGE:
r1 = (r1+r2)/2;
g1 = (g1+g2)/2;
b1 = (b1+b2)/2;
break;
case HUE:
case SATURATION:
case VALUE:
case COLOR:
Color.RGBtoHSB(r1, g1, b1, hsb1);
Color.RGBtoHSB(r2, g2, b2, hsb2);
switch (op) {
case HUE:
hsb2[0] = hsb1[0];
break;
case SATURATION:
hsb2[1] = hsb1[1];
break;
case VALUE:
hsb2[2] = hsb1[2];
break;
case COLOR:
hsb2[0] = hsb1[0];
hsb2[1] = hsb1[1];
break;
}
rgb1 = Color.HSBtoRGB(hsb2[0], hsb2[1], hsb2[2]);
r1 = (rgb1 >> 16) & 0xff;
g1 = (rgb1 >> 8) & 0xff;
b1 = rgb1 & 0xff;
break;
case SCREEN:
r1 = 255 - ((255 - r1) * (255 - r2)) / 255;
g1 = 255 - ((255 - g1) * (255 - g2)) / 255;
b1 = 255 - ((255 - b1) * (255 - b2)) / 255;
break;
case OVERLAY:
int m, s;
s = 255 - ((255 - r1) * (255 - r2)) / 255;
m = r1 * r2 / 255;
r1 = (s * r1 + m * (255 - r1)) / 255;
s = 255 - ((255 - g1) * (255 - g2)) / 255;
m = g1 * g2 / 255;
g1 = (s * g1 + m * (255 - g1)) / 255;
s = 255 - ((255 - b1) * (255 - b2)) / 255;
m = b1 * b2 / 255;
b1 = (s * b1 + m * (255 - b1)) / 255;
break;
case CLEAR:
r1 = g1 = b1 = 0xff;
break;
case DST_IN:
r1 = clamp((r2*a1)/255);
g1 = clamp((g2*a1)/255);
b1 = clamp((b2*a1)/255);
a1 = clamp((a2*a1)/255);
return (a1 << 24) | (r1 << 16) | (g1 << 8) | b1;
case ALPHA:
a1 = a1*a2/255;
return (a1 << 24) | (r2 << 16) | (g2 << 8) | b2;
case ALPHA_TO_GRAY:
int na = 255-a1;
return (a1 << 24) | (na << 16) | (na << 8) | na;
}
if (extraAlpha != 0xff || a1 != 0xff) {
a1 = a1*extraAlpha/255;
int a3 = (255-a1)*a2/255;
r1 = clamp((r1*a1+r2*a3)/255);
g1 = clamp((g1*a1+g2*a3)/255);
b1 = clamp((b1*a1+b2*a3)/255);
a1 = clamp(a1+a3);
}
return (a1 << 24) | (r1 << 16) | (g1 << 8) | b1;
}
}
| |
/**
* The MIT License
* Copyright (c) 2014 Benoit Lacelle
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package blasd.apex.shared.logging;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import blasd.apex.core.logging.ApexLogHelper;
public class TestApexLogHelper {
@Test
public void lazyToString() {
// Not the String
Assert.assertNotEquals("Youpi", ApexLogHelper.lazyToString(() -> "Youpi"));
// But same .toString
Assert.assertEquals("Youpi", ApexLogHelper.lazyToString(() -> "Youpi").toString());
}
@Test
public void testLazyToString() {
// The lazyToString should not be a String
Assert.assertNotEquals("Youpi", ApexLogHelper.lazyToString(() -> "Youpi"));
Assert.assertEquals("Youpi", ApexLogHelper.lazyToString(() -> "Youpi").toString());
}
@Test
public void getPercentage() {
Assert.assertEquals("10%", ApexLogHelper.getNicePercentage(100, 1000).toString());
}
@Test
public void getPercentageDivideBy0() {
Assert.assertEquals("-%", ApexLogHelper.getNicePercentage(100, 0).toString());
}
@Test
public void getSmallPercentage() {
Assert.assertEquals("0.3%", ApexLogHelper.getNicePercentage(3, 1000).toString());
}
@Test
public void getVerySmallPercentage() {
Assert.assertEquals("0.03%", ApexLogHelper.getNicePercentage(3, 10000).toString());
}
@Test
public void getProgressAboveMax() {
Assert.assertEquals("1000%", ApexLogHelper.getNicePercentage(1000, 100).toString());
}
@Test
public void testBigTimeLowRate() {
Assert.assertEquals("1#/days", ApexLogHelper.getNiceRate(10, 10, TimeUnit.DAYS).toString());
}
@Test
public void testBigTimeVeryLowRate() {
Assert.assertEquals("10#/sec", ApexLogHelper.getNiceRate(1, 100, TimeUnit.MILLISECONDS).toString());
}
@Test
public void testBigTimeVeryLowRate1() {
Assert.assertEquals("30#/min", ApexLogHelper.getNiceRate(5, 10 * 1000, TimeUnit.MILLISECONDS).toString());
}
@Test
public void testBigTimeHIghRate() {
Assert.assertEquals("2#/ms", ApexLogHelper.getNiceRate(Integer.MAX_VALUE, 10, TimeUnit.DAYS).toString());
}
@Test
public void testLowTimeLowRate() {
Assert.assertEquals("1#/ms", ApexLogHelper.getNiceRate(10, 10, TimeUnit.MILLISECONDS).toString());
}
@Test
public void testLowTimeHighRate() {
Assert.assertEquals("214#/ns",
ApexLogHelper.getNiceRate(Integer.MAX_VALUE, 10, TimeUnit.MILLISECONDS).toString());
}
@Test
public void testRightUnderRatePerSecond() {
Assert.assertEquals("999#/sec", ApexLogHelper.getNiceRate(999, 1000, TimeUnit.MILLISECONDS).toString());
}
@Test
public void testZeroTime() {
Assert.assertEquals("999#/0SECONDS", ApexLogHelper.getNiceRate(999, 0, TimeUnit.SECONDS).toString());
}
@Test
public void testPercentageNoDecimals() {
Assert.assertEquals("100370%", ApexLogHelper.getNicePercentage(123456, 123).toString());
}
@Test
public void testPercentage() {
Assert.assertEquals("100370%", ApexLogHelper.getNicePercentage(123456, 123).toString());
Assert.assertEquals("0.09%", ApexLogHelper.getNicePercentage(123, 123456).toString());
}
@Test
public void testPercentage2() {
Assert.assertEquals("9.8%", ApexLogHelper.getNicePercentage(98, 1000).toString());
}
@Test
public void testGetNiceTimeMillis() {
Assert.assertEquals("912ms", ApexLogHelper.getNiceTime(912).toString());
}
@Test
public void testGetNiceTimeSecondsAndMillis() {
Assert.assertEquals("9sec 600ms", ApexLogHelper.getNiceTime(9600).toString());
}
@Test
public void testGetNiceTimeSecondsAndMillis_NoHundredsInMillis() {
Assert.assertEquals("9sec 60ms", ApexLogHelper.getNiceTime(9060).toString());
}
@Test
public void testGetNiceTimeMinAndSeconds() {
Assert.assertEquals("2min 11sec", ApexLogHelper.getNiceTime(131, TimeUnit.SECONDS).toString());
}
@Test
public void testGetNiceTimeRoundMinutes() {
Assert.assertEquals("2min", ApexLogHelper.getNiceTime(120, TimeUnit.SECONDS).toString());
}
@Test
public void testGetNiceTimeHoursAndMinutes() {
Assert.assertEquals("2hours 11min", ApexLogHelper.getNiceTime(131, TimeUnit.MINUTES).toString());
}
@Test
public void testGetNiceDays() {
Assert.assertEquals("5days", ApexLogHelper.getNiceTime(5, TimeUnit.DAYS).toString());
}
@Test
public void testGetNiceDaysAndHours() {
Assert.assertEquals("4days 4hours", ApexLogHelper.getNiceTime(100, TimeUnit.HOURS).toString());
}
@Test
public void testGetNiceTimeFromNanos() {
Assert.assertEquals("1sec",
ApexLogHelper.getNiceTime(TimeUnit.SECONDS.toNanos(1), TimeUnit.NANOSECONDS).toString());
}
@Test
public void testCollectionLimit_under() {
Assert.assertEquals("[0, 1]", ApexLogHelper.getToStringWithLimit(Arrays.asList(0, 1), 3).toString());
}
@Test
public void testCollectionLimit_same() {
Assert.assertEquals("[0, 1, 2]", ApexLogHelper.getToStringWithLimit(Arrays.asList(0, 1, 2), 3).toString());
}
@Test
public void testCollectionLimit_above() {
Assert.assertEquals("[0, 1, (3 more elements)]",
ApexLogHelper.getToStringWithLimit(Arrays.asList(0, 1, 2, 3, 4), 2).toString());
}
@Test
public void testLimitChars() {
Assert.assertEquals("'12345...(4 more chars)'", ApexLogHelper.getFirstChars("123456789", 5).toString());
}
@Test
public void testLimitChars_underlimit() {
Assert.assertEquals("123456789", ApexLogHelper.getFirstChars("123456789", 15).toString());
}
@Test
public void testLimitChars_null() {
Assert.assertEquals("null", ApexLogHelper.getFirstChars(null, 5).toString());
}
@Test
public void testRemoveNewLines() {
Assert.assertEquals("a b", ApexLogHelper.removeNewLines("a\rb").toString());
Assert.assertEquals("a b", ApexLogHelper.removeNewLines("a\nb").toString());
Assert.assertEquals("a b", ApexLogHelper.removeNewLines("a\r\nb").toString());
// \n\r leads to 2 whitespaces
Assert.assertEquals("a b", ApexLogHelper.removeNewLines("a\n\rb").toString());
Assert.assertEquals(" a b c ", ApexLogHelper.removeNewLines("\na\rb\r\nc\r").toString());
}
@Test
public void testEscapeNewLines() {
Assert.assertEquals("a\\rb", ApexLogHelper.escapeNewLines("a\rb").toString());
Assert.assertEquals("a\\nb", ApexLogHelper.escapeNewLines("a\nb").toString());
Assert.assertEquals("a\\r\\nb", ApexLogHelper.escapeNewLines("a\r\nb").toString());
}
@Test
public void testObjectAndClass() {
Assert.assertEquals("{k=v(java.lang.String), k2=2(java.lang.Long)}",
ApexLogHelper.getObjectAndClass(ImmutableMap.of("k", "v", "k2", 2L)).toString());
}
@Test
public void testObjectAndClass_recursive() {
Map<Object, Object> map = new LinkedHashMap<>();
Assert.assertEquals("{}", ApexLogHelper.getObjectAndClass(map).toString());
// Add itself as value
map.put("k", map);
// Legimitate use-case as handle by AsbtractMap.toString()
Assert.assertEquals("{k=(this Map)}", map.toString());
Assert.assertEquals("{k=(this Map)}", ApexLogHelper.getObjectAndClass(map).toString());
// Add another value
map.put("k2", "v2");
Assert.assertEquals("{k=(this Map), k2=v2(java.lang.String)}", ApexLogHelper.getObjectAndClass(map).toString());
}
@Test
public void testGetNiceMemory() {
Assert.assertEquals("789B", ApexLogHelper.getNiceMemory(789L).toString());
Assert.assertEquals("607KB", ApexLogHelper.getNiceMemory(789L * 789).toString());
Assert.assertEquals("468MB", ApexLogHelper.getNiceMemory(789L * 789 * 789).toString());
Assert.assertEquals("360GB", ApexLogHelper.getNiceMemory(789L * 789 * 789 * 789).toString());
Assert.assertEquals("278TB", ApexLogHelper.getNiceMemory(789L * 789 * 789 * 789 * 789).toString());
Assert.assertEquals("214PB", ApexLogHelper.getNiceMemory(789L * 789 * 789 * 789 * 789 * 789).toString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.PartitionAttributes;
import com.gemstone.gemfire.cache.PartitionAttributesFactory;
import com.gemstone.gemfire.cache.PartitionedRegionStorageException;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.control.InternalResourceManager;
import com.gemstone.gemfire.internal.cache.control.InternalResourceManager.ResourceObserver;
import com.gemstone.gemfire.internal.cache.control.InternalResourceManager.ResourceObserverAdapter;
import dunit.AsyncInvocation;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.SerializableCallable;
import dunit.SerializableRunnable;
import dunit.VM;
/**
* @author tapshank, Created on Jan 17, 2006
*
*/
public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
{
//////constructor //////////
public PartitionedRegionHADUnitTest(String name) {
super(name);
}//end of constructor
public static final String PR_PREFIX = "PR";
Properties props = new Properties();
volatile static int regionCnt = 0;
final static int MAX_REGIONS = 1;
final int totalNumBuckets = 5;
/**
* Test to ensure that we have proper bucket failover, with no data loss, in the face
* of sequential cache.close() events.
* @throws Exception
*/
public void testBucketFailOverDuringCacheClose() throws Exception {
final String regionName = getUniqueName();
final Boolean value = new Boolean(true);
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
CacheSerializableRunnable createPR = new CacheSerializableRunnable(
"createRegion") {
public void run2() throws CacheException
{
Cache cache = getCache();
final CountDownLatch rebalancingFinished = new CountDownLatch(1);
InternalResourceManager.setResourceObserver(new ResourceObserverAdapter(){
@Override
public void rebalancingOrRecoveryFinished(Region region) {
rebalancingFinished.countDown();
}
});
try {
Region partitionedregion = cache.createRegion(regionName,
createRegionAttributesForPR(1, 20));
if(!rebalancingFinished.await(60000, TimeUnit.MILLISECONDS)) {
fail("Redundancy recovery did not happen within 60 seconds");
}
assertNotNull(partitionedregion);
} catch (InterruptedException e) {
fail("interrupted",e);
} finally {
InternalResourceManager.setResourceObserver(null);
}
}
};
vm2.invoke(createPR);
vm3.invoke(createPR);
vm3.invoke(
new CacheSerializableRunnable(
"createPRBuckets") {
public void run2() throws CacheException
{
Cache cache = getCache();
PartitionedRegion pr = (PartitionedRegion) cache.getRegion(regionName);
assertTrue(pr.isEmpty());
Integer k;
// Create keys such that all buckets are created, Integer works well
// assuming buckets are allocated on the mod of the key hashCode, x 2 just to be safe
final int numEntries=pr.getTotalNumberOfBuckets()*2;
for(int i=numEntries; i>=0; --i) {
k = new Integer(i);
pr.put(k, value);
}
assertEquals(numEntries+1,pr.size());
assertEquals(pr.getRegionAdvisor().getBucketSet().size(), pr.getTotalNumberOfBuckets());
}
}
);
CacheSerializableRunnable existsEntryCheck = new CacheSerializableRunnable(
"PRExistsEntryCheck") {
public void run2() throws CacheException
{
Cache cache = getCache();
PartitionedRegion pr = (PartitionedRegion) cache.getRegion(regionName);
Integer k;
for(int i=pr.getTotalNumberOfBuckets()*2; i>=0; --i) {
k=new Integer(i);
assertTrue("containsKey for key="+k, pr.containsKey(k));
assertEquals("get for key="+k, value, pr.get(k));
}
}
};
vm3.invoke(existsEntryCheck);
vm2.invoke(existsEntryCheck);
CacheSerializableRunnable closeCache = new CacheSerializableRunnable(
"PRCloseCache") {
public void run2() throws CacheException
{
Cache cache = getCache();
cache.close();
}
};
// origin VM down!
vm2.invoke(closeCache);
// origin down, but no data loss
vm3.invoke(existsEntryCheck);
// get back to the desired redundancy
vm0.invoke(createPR);
// verify no data loss
vm0.invoke(existsEntryCheck);
// 2nd oldest VM down!
vm3.invoke(closeCache);
// 2nd down, but no data loss
vm0.invoke(existsEntryCheck);
// get back (for 2nd time) to desired redundancy
vm1.invoke(createPR);
// verify no data loss
vm1.invoke(existsEntryCheck);
vm0.invoke(existsEntryCheck);
}
//////////test methods ////////////////
public void testGrabBackupBuckets() throws Throwable
{
Host host = Host.getHost(0);
VM dataStore0 = host.getVM(0);
// VM dataStore1 = host.getVM(1);
VM dataStore2 = host.getVM(2);
VM accessor = host.getVM(3);
final int redundantCopies = 1;
// Create PRs On 2 VMs
CacheSerializableRunnable createPRs = new CacheSerializableRunnable(
"createPrRegions") {
public void run2() throws CacheException
{
final CountDownLatch recoveryDone = new CountDownLatch(MAX_REGIONS);
ResourceObserver waitForRecovery = new ResourceObserverAdapter() {
@Override
public void rebalancingOrRecoveryFinished(Region region) {
recoveryDone.countDown();
}
};
InternalResourceManager.setResourceObserver(waitForRecovery);
try {
Cache cache = getCache();
System.setProperty(PartitionedRegion.RETRY_TIMEOUT_PROPERTY, "20000");
for (int i = 0; i < MAX_REGIONS; i++) {
cache.createRegion(PR_PREFIX + i,
createRegionAttributesForPR(redundantCopies, 200));
}
System.setProperty(PartitionedRegion.RETRY_TIMEOUT_PROPERTY,
Integer.toString(PartitionedRegionHelper.DEFAULT_TOTAL_WAIT_RETRY_ITERATION));
if(!recoveryDone.await(60, TimeUnit.SECONDS)) {
fail("recovery didn't happen in 60 seconds");
}
} catch (InterruptedException e) {
fail("recovery wait interrupted", e);
} finally {
InternalResourceManager.setResourceObserver(null);
}
}
};
CacheSerializableRunnable createAccessor = new CacheSerializableRunnable(
"createAccessor") {
public void run2() throws CacheException
{
Cache cache = getCache();
for (int i = 0; i < MAX_REGIONS; i++) {
cache.createRegion(PR_PREFIX + i,
createRegionAttributesForPR(redundantCopies, 0));
}
}
};
// Create PRs on only 2 VMs
dataStore0.invoke(createPRs);
// dataStore1.invoke(createPRs);
final String expectedExceptions = PartitionedRegionStorageException.class.getName();
SerializableRunnable addExpectedExceptions =
new CacheSerializableRunnable("addExpectedExceptions") {
public void run2() throws CacheException {
getCache().getLogger().info("<ExpectedException action=add>" +
expectedExceptions + "</ExpectedException>");
getLogWriter().info("<ExpectedException action=add>" +
expectedExceptions + "</ExpectedException>");
}
};
SerializableRunnable removeExpectedExceptions =
new CacheSerializableRunnable("removeExpectedExceptions") {
public void run2() throws CacheException {
getLogWriter().info("<ExpectedException action=remove>" +
expectedExceptions + "</ExpectedException>");
getCache().getLogger().info("<ExpectedException action=remove>" +
expectedExceptions + "</ExpectedException>");
}
};
// Do put operations on these 2 PRs asynchronosly.
CacheSerializableRunnable dataStore0Puts = new CacheSerializableRunnable("dataStore0PutOperations") {
public void run2()
{
Cache cache = getCache();
for (int j = 0; j < MAX_REGIONS; j++) {
Region pr = cache.getRegion(Region.SEPARATOR + PR_PREFIX + j);
assertNotNull(pr);
for (int k = 0; k < 10; k++) {
pr.put(j + PR_PREFIX + k, PR_PREFIX + k);
}
getLogWriter().info("VM0 Done put successfully for PR = " + PR_PREFIX
+ j);
}
}
};
CacheSerializableRunnable dataStore1Puts = new CacheSerializableRunnable("dataStore1PutOperations") { // TODO bug36296
public void run2()
{
Cache cache = getCache();
for (int j = 0; j < MAX_REGIONS; j++) {
Region pr = cache.getRegion(Region.SEPARATOR + PR_PREFIX + (j));
assertNotNull(pr);
for (int k = 10; k < 20; k++) {
pr.put(j + PR_PREFIX + k, PR_PREFIX + k);
}
getLogWriter().info("VM1 Done put successfully for PR = " + PR_PREFIX
+ j);
}
}
};
dataStore0.invoke(addExpectedExceptions);
// dataStore1.invoke(addExpectedExceptions);
AsyncInvocation async0 = dataStore0.invokeAsync(dataStore0Puts);
// AsyncInvocation async1 = dataStore1.invokeAsync(dataStore1Puts);
DistributedTestCase.join(async0, 30 * 1000, getLogWriter());
// async1.join();
dataStore0.invoke(removeExpectedExceptions);
// dataStore1.invoke(removeExpectedExceptions);
// Verify that buckets can not be created if there are not enough Nodes to support
// the redundancy Configuration
assertFalse(async0.exceptionOccurred());
// assertTrue(async0.getException() instanceof PartitionedRegionStorageException);
// assertTrue(async1.exceptionOccurred());
// assertTrue(async1.getException() instanceof PartitionedRegionStorageException);
// At this point redundancy criterion is not meet.
// now if we create PRs on more VMs, it should create those "supposed to
// be redundant" buckets on these nodes, if it can accommodate the data
// (localMaxMemory>0).
dataStore2.invoke(createPRs);
async0 = dataStore0.invokeAsync(dataStore0Puts);
// async1 = dataStore1.invokeAsync(dataStore1Puts);
DistributedTestCase.join(async0, 30 * 1000, getLogWriter());
// async1.join();
if (async0.exceptionOccurred()) {
fail("async0 failed", async0.getException());
}
// assertFalse(async1.exceptionOccurred());
accessor.invoke(createAccessor);
for (int c = 0; c < MAX_REGIONS; c++) {
final Integer ri = new Integer(c);
final SerializableCallable validateLocalBucket2RegionMapSize =
new SerializableCallable("validateLocalBucket2RegionMapSize") {
public Object call() throws Exception {
int size = 0;
Cache cache = getCache();
PartitionedRegion pr = (PartitionedRegion)cache
.getRegion(Region.SEPARATOR + PR_PREFIX + ri.intValue());
if (pr.getDataStore() != null) {
size = pr.getDataStore().getBucketsManaged();
}
return new Integer(size);
}
};
final SerializableCallable validateBucketsOnNode =
new SerializableCallable("validateBucketOnNode") {
public Object call() throws Exception {
int containsNode = 0;
Cache cache = getCache();
PartitionedRegion pr = (PartitionedRegion)cache
.getRegion(Region.SEPARATOR + PR_PREFIX + ri.intValue());
Iterator it = pr.getRegionAdvisor().getBucketSet().iterator();
Set nodeList;
try {
while (it.hasNext()) {
Integer bucketId = (Integer) it.next();
nodeList = pr.getRegionAdvisor().getBucketOwners(bucketId.intValue());
if ((nodeList != null) && (nodeList.contains(pr.getMyId()))) {
containsNode++;
}
else {
getCache().getLogger().fine("I don't contain member " + pr.getMyId());
}
}
} catch (NoSuchElementException done) {
}
return new Integer(containsNode);
}
};
// int vm0LBRsize = ((Integer)dataStore0.invoke(validateLocalBucket2RegionMapSize)).intValue();
int vm2LBRsize = ((Integer)dataStore2.invoke(validateLocalBucket2RegionMapSize)).intValue();
int vm3LBRsize = ((Integer)accessor.invoke(validateLocalBucket2RegionMapSize)).intValue();
// This would mean that up coming node didn't pick up any buckets
assertFalse(vm2LBRsize == 0);
// This accessor should NOT have picked up any buckets.
assertFalse(vm3LBRsize != 0);
int vm2B2Nsize = ((Integer)dataStore2.invoke(validateBucketsOnNode)).intValue();
getLogWriter().info("vm2B2Nsize = " + vm2B2Nsize);
assertEquals(vm2B2Nsize, vm2LBRsize);
}
}
/**
* This verifies the Bucket Regions on the basis of
* redundantCopies set in RegionAttributes.
* @see PartitionedRegionSingleNodeOperationsJUnitTest#testBucketScope()
* @throws Exception
*/
public void testBucketsScope() throws Exception
{
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String PR_ZeroRedundancy = "PR_ZeroRedundancy";
final String PR_SingleRedundancy = "PR_SingleRedundancy";
// Create PRs On 2 VMs
CacheSerializableRunnable createPRs = new CacheSerializableRunnable(
"createPrRegionWithZeroRed") {
public void run2() throws CacheException
{
Cache cache = getCache();
// RedundantCopies = 0 , Scope = DISTRIBUTED_ACK
cache.createRegion(PR_ZeroRedundancy, createRegionAttributesForPR(
0, 200));
// RedundantCopies > 0 , Scope = DISTRIBUTED_ACK
cache.createRegion(PR_SingleRedundancy, createRegionAttributesForPR(1,
200));
}
};
// Create PRs on only 2 VMs
vm0.invoke(createPRs);
vm1.invoke(createPRs);
// Do put operations on these 2 PRs asynchronosly.
vm0.invoke(new CacheSerializableRunnable("doPutOperations") {
public void run2()
{
Cache cache = getCache();
String regionName = PR_ZeroRedundancy;
Region pr = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(pr);
for (int k = 0; k < 10; k++) {
pr.put(k + "", k + "");
}
cache.getLogger().fine(
"VM0 Done put successfully for PR = " + regionName);
regionName = PR_SingleRedundancy;
Region pr1 = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(pr1);
for (int k = 0; k < 10; k++) {
pr1.put(k + "", k + "");
}
cache.getLogger().fine(
"VM0 Done put successfully for PR = " + regionName);
}
});
CacheSerializableRunnable validateBucketScope = new CacheSerializableRunnable(
"validateBucketScope") {
public void run2()
{
Cache cache = getCache();
String regionName = PR_ZeroRedundancy;
PartitionedRegion pr = (PartitionedRegion)cache
.getRegion(Region.SEPARATOR + regionName);
java.util.Iterator buckRegionIterator = pr.getDataStore().localBucket2RegionMap
.values().iterator();
while (buckRegionIterator.hasNext()) {
BucketRegion bucket = (BucketRegion)buckRegionIterator.next();
assertTrue(bucket.getAttributes().getScope().isDistributedAck());
}
regionName = PR_SingleRedundancy;
PartitionedRegion pr1 = (PartitionedRegion)cache
.getRegion(Region.SEPARATOR + regionName);
java.util.Iterator buckRegionIterator1 = pr1.getDataStore().localBucket2RegionMap
.values().iterator();
while (buckRegionIterator1.hasNext()) {
Region bucket = (Region)buckRegionIterator1.next();
assertEquals(DataPolicy.REPLICATE, bucket.getAttributes().getDataPolicy());
}
}
};
vm0.invoke(validateBucketScope);
vm1.invoke(validateBucketScope);
}
/**
* This private methods sets the passed attributes and returns RegionAttribute
* object, which is used in create region
* @param redundancy
* @param localMaxMem
*
* @return
*/
protected RegionAttributes createRegionAttributesForPR(int redundancy,
int localMaxMem)
{
AttributesFactory attr = new AttributesFactory();
PartitionAttributesFactory paf = new PartitionAttributesFactory();
PartitionAttributes prAttr = paf.setRedundantCopies(redundancy)
.setLocalMaxMemory(localMaxMem)
.setTotalNumBuckets(totalNumBuckets)
.create();
attr.setPartitionAttributes(prAttr);
return attr.create();
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.bodycomplex.implementation;
import retrofit2.Retrofit;
import fixtures.bodycomplex.Primitives;
import com.google.common.reflect.TypeToken;
import com.microsoft.rest.ServiceCall;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.ServiceResponseBuilder;
import com.microsoft.rest.Validator;
import fixtures.bodycomplex.models.BooleanWrapper;
import fixtures.bodycomplex.models.ByteWrapper;
import fixtures.bodycomplex.models.Datetimerfc1123Wrapper;
import fixtures.bodycomplex.models.DatetimeWrapper;
import fixtures.bodycomplex.models.DateWrapper;
import fixtures.bodycomplex.models.DoubleWrapper;
import fixtures.bodycomplex.models.DurationWrapper;
import fixtures.bodycomplex.models.ErrorException;
import fixtures.bodycomplex.models.FloatWrapper;
import fixtures.bodycomplex.models.IntWrapper;
import fixtures.bodycomplex.models.LongWrapper;
import fixtures.bodycomplex.models.StringWrapper;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Headers;
import retrofit2.http.PUT;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Primitives.
*/
public final class PrimitivesImpl implements Primitives {
/** The Retrofit service to perform REST calls. */
private PrimitivesService service;
/** The service client containing this operation class. */
private AutoRestComplexTestServiceImpl client;
/**
* Initializes an instance of Primitives.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public PrimitivesImpl(Retrofit retrofit, AutoRestComplexTestServiceImpl client) {
this.service = retrofit.create(PrimitivesService.class);
this.client = client;
}
/**
* The interface defining all the services for Primitives to be
* used by Retrofit to perform actually REST calls.
*/
interface PrimitivesService {
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/integer")
Observable<Response<ResponseBody>> getInt();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/integer")
Observable<Response<ResponseBody>> putInt(@Body IntWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/long")
Observable<Response<ResponseBody>> getLong();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/long")
Observable<Response<ResponseBody>> putLong(@Body LongWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/float")
Observable<Response<ResponseBody>> getFloat();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/float")
Observable<Response<ResponseBody>> putFloat(@Body FloatWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/double")
Observable<Response<ResponseBody>> getDouble();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/double")
Observable<Response<ResponseBody>> putDouble(@Body DoubleWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/bool")
Observable<Response<ResponseBody>> getBool();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/bool")
Observable<Response<ResponseBody>> putBool(@Body BooleanWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/string")
Observable<Response<ResponseBody>> getString();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/string")
Observable<Response<ResponseBody>> putString(@Body StringWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/date")
Observable<Response<ResponseBody>> getDate();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/date")
Observable<Response<ResponseBody>> putDate(@Body DateWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/datetime")
Observable<Response<ResponseBody>> getDateTime();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/datetime")
Observable<Response<ResponseBody>> putDateTime(@Body DatetimeWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/datetimerfc1123")
Observable<Response<ResponseBody>> getDateTimeRfc1123();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/datetimerfc1123")
Observable<Response<ResponseBody>> putDateTimeRfc1123(@Body Datetimerfc1123Wrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/duration")
Observable<Response<ResponseBody>> getDuration();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/duration")
Observable<Response<ResponseBody>> putDuration(@Body DurationWrapper complexBody);
@Headers("Content-Type: application/json; charset=utf-8")
@GET("complex/primitive/byte")
Observable<Response<ResponseBody>> getByte();
@Headers("Content-Type: application/json; charset=utf-8")
@PUT("complex/primitive/byte")
Observable<Response<ResponseBody>> putByte(@Body ByteWrapper complexBody);
}
/**
* Get complex types with integer properties.
*
* @return the IntWrapper object if successful.
*/
public IntWrapper getInt() {
return getIntWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with integer properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<IntWrapper> getIntAsync(final ServiceCallback<IntWrapper> serviceCallback) {
return ServiceCall.create(getIntWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with integer properties.
*
* @return the observable to the IntWrapper object
*/
public Observable<IntWrapper> getIntAsync() {
return getIntWithServiceResponseAsync().map(new Func1<ServiceResponse<IntWrapper>, IntWrapper>() {
@Override
public IntWrapper call(ServiceResponse<IntWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with integer properties.
*
* @return the observable to the IntWrapper object
*/
public Observable<ServiceResponse<IntWrapper>> getIntWithServiceResponseAsync() {
return service.getInt()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<IntWrapper>>>() {
@Override
public Observable<ServiceResponse<IntWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<IntWrapper> clientResponse = getIntDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<IntWrapper> getIntDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<IntWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<IntWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with integer properties.
*
* @param complexBody Please put -1 and 2
*/
public void putInt(IntWrapper complexBody) {
putIntWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with integer properties.
*
* @param complexBody Please put -1 and 2
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putIntAsync(IntWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putIntWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with integer properties.
*
* @param complexBody Please put -1 and 2
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putIntAsync(IntWrapper complexBody) {
return putIntWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with integer properties.
*
* @param complexBody Please put -1 and 2
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putIntWithServiceResponseAsync(IntWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putInt(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putIntDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putIntDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with long properties.
*
* @return the LongWrapper object if successful.
*/
public LongWrapper getLong() {
return getLongWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with long properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<LongWrapper> getLongAsync(final ServiceCallback<LongWrapper> serviceCallback) {
return ServiceCall.create(getLongWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with long properties.
*
* @return the observable to the LongWrapper object
*/
public Observable<LongWrapper> getLongAsync() {
return getLongWithServiceResponseAsync().map(new Func1<ServiceResponse<LongWrapper>, LongWrapper>() {
@Override
public LongWrapper call(ServiceResponse<LongWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with long properties.
*
* @return the observable to the LongWrapper object
*/
public Observable<ServiceResponse<LongWrapper>> getLongWithServiceResponseAsync() {
return service.getLong()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<LongWrapper>>>() {
@Override
public Observable<ServiceResponse<LongWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<LongWrapper> clientResponse = getLongDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<LongWrapper> getLongDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<LongWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<LongWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with long properties.
*
* @param complexBody Please put 1099511627775 and -999511627788
*/
public void putLong(LongWrapper complexBody) {
putLongWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with long properties.
*
* @param complexBody Please put 1099511627775 and -999511627788
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putLongAsync(LongWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putLongWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with long properties.
*
* @param complexBody Please put 1099511627775 and -999511627788
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putLongAsync(LongWrapper complexBody) {
return putLongWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with long properties.
*
* @param complexBody Please put 1099511627775 and -999511627788
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putLongWithServiceResponseAsync(LongWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putLong(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putLongDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putLongDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with float properties.
*
* @return the FloatWrapper object if successful.
*/
public FloatWrapper getFloat() {
return getFloatWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with float properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<FloatWrapper> getFloatAsync(final ServiceCallback<FloatWrapper> serviceCallback) {
return ServiceCall.create(getFloatWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with float properties.
*
* @return the observable to the FloatWrapper object
*/
public Observable<FloatWrapper> getFloatAsync() {
return getFloatWithServiceResponseAsync().map(new Func1<ServiceResponse<FloatWrapper>, FloatWrapper>() {
@Override
public FloatWrapper call(ServiceResponse<FloatWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with float properties.
*
* @return the observable to the FloatWrapper object
*/
public Observable<ServiceResponse<FloatWrapper>> getFloatWithServiceResponseAsync() {
return service.getFloat()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<FloatWrapper>>>() {
@Override
public Observable<ServiceResponse<FloatWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<FloatWrapper> clientResponse = getFloatDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<FloatWrapper> getFloatDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<FloatWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<FloatWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with float properties.
*
* @param complexBody Please put 1.05 and -0.003
*/
public void putFloat(FloatWrapper complexBody) {
putFloatWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with float properties.
*
* @param complexBody Please put 1.05 and -0.003
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putFloatAsync(FloatWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putFloatWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with float properties.
*
* @param complexBody Please put 1.05 and -0.003
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putFloatAsync(FloatWrapper complexBody) {
return putFloatWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with float properties.
*
* @param complexBody Please put 1.05 and -0.003
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putFloatWithServiceResponseAsync(FloatWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putFloat(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putFloatDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putFloatDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with double properties.
*
* @return the DoubleWrapper object if successful.
*/
public DoubleWrapper getDouble() {
return getDoubleWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with double properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<DoubleWrapper> getDoubleAsync(final ServiceCallback<DoubleWrapper> serviceCallback) {
return ServiceCall.create(getDoubleWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with double properties.
*
* @return the observable to the DoubleWrapper object
*/
public Observable<DoubleWrapper> getDoubleAsync() {
return getDoubleWithServiceResponseAsync().map(new Func1<ServiceResponse<DoubleWrapper>, DoubleWrapper>() {
@Override
public DoubleWrapper call(ServiceResponse<DoubleWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with double properties.
*
* @return the observable to the DoubleWrapper object
*/
public Observable<ServiceResponse<DoubleWrapper>> getDoubleWithServiceResponseAsync() {
return service.getDouble()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DoubleWrapper>>>() {
@Override
public Observable<ServiceResponse<DoubleWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DoubleWrapper> clientResponse = getDoubleDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<DoubleWrapper> getDoubleDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<DoubleWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<DoubleWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with double properties.
*
* @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005
*/
public void putDouble(DoubleWrapper complexBody) {
putDoubleWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with double properties.
*
* @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putDoubleAsync(DoubleWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putDoubleWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with double properties.
*
* @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDoubleAsync(DoubleWrapper complexBody) {
return putDoubleWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with double properties.
*
* @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDoubleWithServiceResponseAsync(DoubleWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putDouble(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDoubleDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDoubleDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with bool properties.
*
* @return the BooleanWrapper object if successful.
*/
public BooleanWrapper getBool() {
return getBoolWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with bool properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<BooleanWrapper> getBoolAsync(final ServiceCallback<BooleanWrapper> serviceCallback) {
return ServiceCall.create(getBoolWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with bool properties.
*
* @return the observable to the BooleanWrapper object
*/
public Observable<BooleanWrapper> getBoolAsync() {
return getBoolWithServiceResponseAsync().map(new Func1<ServiceResponse<BooleanWrapper>, BooleanWrapper>() {
@Override
public BooleanWrapper call(ServiceResponse<BooleanWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with bool properties.
*
* @return the observable to the BooleanWrapper object
*/
public Observable<ServiceResponse<BooleanWrapper>> getBoolWithServiceResponseAsync() {
return service.getBool()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<BooleanWrapper>>>() {
@Override
public Observable<ServiceResponse<BooleanWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<BooleanWrapper> clientResponse = getBoolDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<BooleanWrapper> getBoolDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<BooleanWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<BooleanWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with bool properties.
*
* @param complexBody Please put true and false
*/
public void putBool(BooleanWrapper complexBody) {
putBoolWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with bool properties.
*
* @param complexBody Please put true and false
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putBoolAsync(BooleanWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putBoolWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with bool properties.
*
* @param complexBody Please put true and false
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putBoolAsync(BooleanWrapper complexBody) {
return putBoolWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with bool properties.
*
* @param complexBody Please put true and false
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putBoolWithServiceResponseAsync(BooleanWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putBool(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putBoolDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putBoolDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with string properties.
*
* @return the StringWrapper object if successful.
*/
public StringWrapper getString() {
return getStringWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with string properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<StringWrapper> getStringAsync(final ServiceCallback<StringWrapper> serviceCallback) {
return ServiceCall.create(getStringWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with string properties.
*
* @return the observable to the StringWrapper object
*/
public Observable<StringWrapper> getStringAsync() {
return getStringWithServiceResponseAsync().map(new Func1<ServiceResponse<StringWrapper>, StringWrapper>() {
@Override
public StringWrapper call(ServiceResponse<StringWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with string properties.
*
* @return the observable to the StringWrapper object
*/
public Observable<ServiceResponse<StringWrapper>> getStringWithServiceResponseAsync() {
return service.getString()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<StringWrapper>>>() {
@Override
public Observable<ServiceResponse<StringWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<StringWrapper> clientResponse = getStringDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<StringWrapper> getStringDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<StringWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<StringWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with string properties.
*
* @param complexBody Please put 'goodrequest', '', and null
*/
public void putString(StringWrapper complexBody) {
putStringWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with string properties.
*
* @param complexBody Please put 'goodrequest', '', and null
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putStringAsync(StringWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putStringWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with string properties.
*
* @param complexBody Please put 'goodrequest', '', and null
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putStringAsync(StringWrapper complexBody) {
return putStringWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with string properties.
*
* @param complexBody Please put 'goodrequest', '', and null
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putStringWithServiceResponseAsync(StringWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putString(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putStringDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putStringDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with date properties.
*
* @return the DateWrapper object if successful.
*/
public DateWrapper getDate() {
return getDateWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with date properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<DateWrapper> getDateAsync(final ServiceCallback<DateWrapper> serviceCallback) {
return ServiceCall.create(getDateWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with date properties.
*
* @return the observable to the DateWrapper object
*/
public Observable<DateWrapper> getDateAsync() {
return getDateWithServiceResponseAsync().map(new Func1<ServiceResponse<DateWrapper>, DateWrapper>() {
@Override
public DateWrapper call(ServiceResponse<DateWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with date properties.
*
* @return the observable to the DateWrapper object
*/
public Observable<ServiceResponse<DateWrapper>> getDateWithServiceResponseAsync() {
return service.getDate()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DateWrapper>>>() {
@Override
public Observable<ServiceResponse<DateWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DateWrapper> clientResponse = getDateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<DateWrapper> getDateDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<DateWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<DateWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with date properties.
*
* @param complexBody Please put '0001-01-01' and '2016-02-29'
*/
public void putDate(DateWrapper complexBody) {
putDateWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with date properties.
*
* @param complexBody Please put '0001-01-01' and '2016-02-29'
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putDateAsync(DateWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putDateWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with date properties.
*
* @param complexBody Please put '0001-01-01' and '2016-02-29'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDateAsync(DateWrapper complexBody) {
return putDateWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with date properties.
*
* @param complexBody Please put '0001-01-01' and '2016-02-29'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDateWithServiceResponseAsync(DateWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putDate(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDateDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with datetime properties.
*
* @return the DatetimeWrapper object if successful.
*/
public DatetimeWrapper getDateTime() {
return getDateTimeWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with datetime properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<DatetimeWrapper> getDateTimeAsync(final ServiceCallback<DatetimeWrapper> serviceCallback) {
return ServiceCall.create(getDateTimeWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with datetime properties.
*
* @return the observable to the DatetimeWrapper object
*/
public Observable<DatetimeWrapper> getDateTimeAsync() {
return getDateTimeWithServiceResponseAsync().map(new Func1<ServiceResponse<DatetimeWrapper>, DatetimeWrapper>() {
@Override
public DatetimeWrapper call(ServiceResponse<DatetimeWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with datetime properties.
*
* @return the observable to the DatetimeWrapper object
*/
public Observable<ServiceResponse<DatetimeWrapper>> getDateTimeWithServiceResponseAsync() {
return service.getDateTime()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DatetimeWrapper>>>() {
@Override
public Observable<ServiceResponse<DatetimeWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DatetimeWrapper> clientResponse = getDateTimeDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<DatetimeWrapper> getDateTimeDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<DatetimeWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<DatetimeWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with datetime properties.
*
* @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00'
*/
public void putDateTime(DatetimeWrapper complexBody) {
putDateTimeWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with datetime properties.
*
* @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00'
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putDateTimeAsync(DatetimeWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putDateTimeWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with datetime properties.
*
* @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDateTimeAsync(DatetimeWrapper complexBody) {
return putDateTimeWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with datetime properties.
*
* @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDateTimeWithServiceResponseAsync(DatetimeWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putDateTime(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDateTimeDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDateTimeDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with datetimeRfc1123 properties.
*
* @return the Datetimerfc1123Wrapper object if successful.
*/
public Datetimerfc1123Wrapper getDateTimeRfc1123() {
return getDateTimeRfc1123WithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with datetimeRfc1123 properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Datetimerfc1123Wrapper> getDateTimeRfc1123Async(final ServiceCallback<Datetimerfc1123Wrapper> serviceCallback) {
return ServiceCall.create(getDateTimeRfc1123WithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with datetimeRfc1123 properties.
*
* @return the observable to the Datetimerfc1123Wrapper object
*/
public Observable<Datetimerfc1123Wrapper> getDateTimeRfc1123Async() {
return getDateTimeRfc1123WithServiceResponseAsync().map(new Func1<ServiceResponse<Datetimerfc1123Wrapper>, Datetimerfc1123Wrapper>() {
@Override
public Datetimerfc1123Wrapper call(ServiceResponse<Datetimerfc1123Wrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with datetimeRfc1123 properties.
*
* @return the observable to the Datetimerfc1123Wrapper object
*/
public Observable<ServiceResponse<Datetimerfc1123Wrapper>> getDateTimeRfc1123WithServiceResponseAsync() {
return service.getDateTimeRfc1123()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Datetimerfc1123Wrapper>>>() {
@Override
public Observable<ServiceResponse<Datetimerfc1123Wrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Datetimerfc1123Wrapper> clientResponse = getDateTimeRfc1123Delegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Datetimerfc1123Wrapper> getDateTimeRfc1123Delegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<Datetimerfc1123Wrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Datetimerfc1123Wrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with datetimeRfc1123 properties.
*
* @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT'
*/
public void putDateTimeRfc1123(Datetimerfc1123Wrapper complexBody) {
putDateTimeRfc1123WithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with datetimeRfc1123 properties.
*
* @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT'
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putDateTimeRfc1123Async(Datetimerfc1123Wrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putDateTimeRfc1123WithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with datetimeRfc1123 properties.
*
* @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDateTimeRfc1123Async(Datetimerfc1123Wrapper complexBody) {
return putDateTimeRfc1123WithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with datetimeRfc1123 properties.
*
* @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDateTimeRfc1123WithServiceResponseAsync(Datetimerfc1123Wrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putDateTimeRfc1123(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDateTimeRfc1123Delegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDateTimeRfc1123Delegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with duration properties.
*
* @return the DurationWrapper object if successful.
*/
public DurationWrapper getDuration() {
return getDurationWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with duration properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<DurationWrapper> getDurationAsync(final ServiceCallback<DurationWrapper> serviceCallback) {
return ServiceCall.create(getDurationWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with duration properties.
*
* @return the observable to the DurationWrapper object
*/
public Observable<DurationWrapper> getDurationAsync() {
return getDurationWithServiceResponseAsync().map(new Func1<ServiceResponse<DurationWrapper>, DurationWrapper>() {
@Override
public DurationWrapper call(ServiceResponse<DurationWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with duration properties.
*
* @return the observable to the DurationWrapper object
*/
public Observable<ServiceResponse<DurationWrapper>> getDurationWithServiceResponseAsync() {
return service.getDuration()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DurationWrapper>>>() {
@Override
public Observable<ServiceResponse<DurationWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DurationWrapper> clientResponse = getDurationDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<DurationWrapper> getDurationDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<DurationWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<DurationWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with duration properties.
*
* @param complexBody Please put 'P123DT22H14M12.011S'
*/
public void putDuration(DurationWrapper complexBody) {
putDurationWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with duration properties.
*
* @param complexBody Please put 'P123DT22H14M12.011S'
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putDurationAsync(DurationWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putDurationWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with duration properties.
*
* @param complexBody Please put 'P123DT22H14M12.011S'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putDurationAsync(DurationWrapper complexBody) {
return putDurationWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with duration properties.
*
* @param complexBody Please put 'P123DT22H14M12.011S'
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putDurationWithServiceResponseAsync(DurationWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putDuration(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putDurationDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putDurationDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Get complex types with byte properties.
*
* @return the ByteWrapper object if successful.
*/
public ByteWrapper getByte() {
return getByteWithServiceResponseAsync().toBlocking().single().getBody();
}
/**
* Get complex types with byte properties.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<ByteWrapper> getByteAsync(final ServiceCallback<ByteWrapper> serviceCallback) {
return ServiceCall.create(getByteWithServiceResponseAsync(), serviceCallback);
}
/**
* Get complex types with byte properties.
*
* @return the observable to the ByteWrapper object
*/
public Observable<ByteWrapper> getByteAsync() {
return getByteWithServiceResponseAsync().map(new Func1<ServiceResponse<ByteWrapper>, ByteWrapper>() {
@Override
public ByteWrapper call(ServiceResponse<ByteWrapper> response) {
return response.getBody();
}
});
}
/**
* Get complex types with byte properties.
*
* @return the observable to the ByteWrapper object
*/
public Observable<ServiceResponse<ByteWrapper>> getByteWithServiceResponseAsync() {
return service.getByte()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ByteWrapper>>>() {
@Override
public Observable<ServiceResponse<ByteWrapper>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ByteWrapper> clientResponse = getByteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ByteWrapper> getByteDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return new ServiceResponseBuilder<ByteWrapper, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<ByteWrapper>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Put complex types with byte properties.
*
* @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6)
*/
public void putByte(ByteWrapper complexBody) {
putByteWithServiceResponseAsync(complexBody).toBlocking().single().getBody();
}
/**
* Put complex types with byte properties.
*
* @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6)
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/
public ServiceCall<Void> putByteAsync(ByteWrapper complexBody, final ServiceCallback<Void> serviceCallback) {
return ServiceCall.create(putByteWithServiceResponseAsync(complexBody), serviceCallback);
}
/**
* Put complex types with byte properties.
*
* @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6)
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> putByteAsync(ByteWrapper complexBody) {
return putByteWithServiceResponseAsync(complexBody).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.getBody();
}
});
}
/**
* Put complex types with byte properties.
*
* @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6)
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> putByteWithServiceResponseAsync(ByteWrapper complexBody) {
if (complexBody == null) {
throw new IllegalArgumentException("Parameter complexBody is required and cannot be null.");
}
Validator.validate(complexBody);
return service.putByte(complexBody)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = putByteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> putByteDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return new ServiceResponseBuilder<Void, ErrorException>(this.client.mapperAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
}
| |
/*
* Copyright 2016 andryr
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.andryr.musicplayer.fragments;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import android.content.Context;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.graphics.drawable.DrawerArrowDrawable;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.andryr.musicplayer.MainActivity;
import com.andryr.musicplayer.R;
import com.andryr.musicplayer.utils.ToolbarDrawerToggle;
/**
* A simple {@link Fragment} subclass. Use the {@link LibraryFragment#newInstance}
* factory method to create an instance of this fragment.
*/
public class LibraryFragment extends BaseFragment {
/**
* The {@link ViewPager} that will host the section contents.
*/
ViewPager mViewPager;
/**
* The {@link android.support.v4.view.PagerAdapter} that will provide
* fragments for each of the sections. We use a {@link FragmentPagerAdapter}
* derivative, which will keep every loaded fragment in memory. If this
* becomes too memory intensive, it may be best to switch to a
* {@link android.support.v4.app.FragmentStatePagerAdapter}.
*/
SectionsPagerAdapter mSectionsPagerAdapter;
public static LibraryFragment newInstance() {
LibraryFragment fragment = new LibraryFragment();
return fragment;
}
public LibraryFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container,
false);
// Create the adapter that will return a fragment for each of the three
// primary sections of the activity.
mSectionsPagerAdapter = new SectionsPagerAdapter(
getChildFragmentManager());
// Set up the ViewPager with the sections adapter.
mViewPager = (ViewPager) rootView.findViewById(R.id.pager);
mViewPager.setAdapter(mSectionsPagerAdapter);
TabLayout tabLayout = (TabLayout) rootView.findViewById(R.id.tab_layout);
tabLayout.setupWithViewPager(mViewPager);
MainActivity activity = (MainActivity) getActivity();
Toolbar toolbar = (Toolbar) rootView.findViewById(R.id.toolbar);
DrawerLayout drawerLayout = activity.getDrawerLayout();
activity.setSupportActionBar(toolbar);
ToolbarDrawerToggle drawerToggle = new ToolbarDrawerToggle(activity,drawerLayout,toolbar, new int[]{Gravity.START});
drawerLayout.setDrawerListener(drawerToggle);
return rootView;
}
@Override
public void load() {
int fragmentCount = mSectionsPagerAdapter.getCount();
for(int pos = 0; pos < fragmentCount; pos++)
{
BaseFragment fragment = (BaseFragment) mSectionsPagerAdapter.getFragment(pos);
if(fragment != null)
{
Log.d("frag1", fragment.getClass().getCanonicalName());
fragment.load();
}
}
}
/**
* A {@link FragmentPagerAdapter} that returns a fragment corresponding to
* one of the sections/tabs/pages.
*/
public class SectionsPagerAdapter extends FragmentPagerAdapter {
private Map<Integer, String> mFragmentTags;
public SectionsPagerAdapter(FragmentManager fm) {
super(fm);
mFragmentTags = new HashMap<Integer, String>();
}
@Override
public Fragment getItem(int position) {
switch (position) {
case 0:
return SongListFragment.newInstance();
case 1:
return AlbumListFragment.newInstance(null);
case 2:
return ArtistListFragment.newInstance();
case 3:
return GenreListFragment.newInstance();
case 4:
return PlaylistListFragment.newInstance();
}
return null;
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
Object obj = super.instantiateItem(container, position);
if (obj instanceof Fragment) {
Fragment f = (Fragment) obj;
String tag = f.getTag();
mFragmentTags.put(position, tag);
Log.d("fragtag", tag);
}
return obj;
}
public Fragment getFragment(int position) {
String tag = mFragmentTags.get(position);
if (tag == null)
return null;
return getChildFragmentManager().findFragmentByTag(tag);
}
@Override
public int getCount() {
return 5;
}
@Override
public CharSequence getPageTitle(int position) {
Locale l = Locale.getDefault();
switch (position) {
case 0:
return getString(R.string.titles).toUpperCase(l);
case 1:
return getString(R.string.albums).toUpperCase(l);
case 2:
return getString(R.string.artists).toUpperCase(l);
case 3:
return getString(R.string.genres).toUpperCase(l);
case 4:
return getString(R.string.playlists).toUpperCase(l);
}
return null;
}
}
}
| |
package es.tid.swagger.api;
import com.sun.jersey.multipart.FormDataParam;
import es.tid.swagger.api.*;
import es.tid.swagger.model.*;
import java.util.*;
import java.util.List;
import java.io.InputStream;
import com.sun.jersey.core.header.FormDataContentDisposition;
import com.sun.jersey.multipart.FormDataParam;
import javax.ws.rs.core.Response;
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JaxRSServerCodegen", date = "2015-10-29T10:48:30.233+01:00")
public abstract class ConfigApiService {
public abstract Response retrieveCalls()
throws NotFoundException;
public abstract Response updateCallsById(List<Call> calls)
throws NotFoundException;
public abstract Response createCallsById(List<Call> calls)
throws NotFoundException;
public abstract Response deleteCallsById()
throws NotFoundException;
public abstract Response retrieveCallsCallCallById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallCallById(String callId,Call call)
throws NotFoundException;
public abstract Response createCallsCallCallById(String callId,Call call)
throws NotFoundException;
public abstract Response deleteCallsCallCallById(String callId)
throws NotFoundException;
public abstract Response retrieveCallsCallAEndAEndById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallAEndAEndById(String callId,Endpoint aEnd)
throws NotFoundException;
public abstract Response createCallsCallAEndAEndById(String callId,Endpoint aEnd)
throws NotFoundException;
public abstract Response deleteCallsCallAEndAEndById(String callId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsConnectionsById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsAEndAEndById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsAEndAEndById(String callId,String connectionId,Endpoint aEnd)
throws NotFoundException;
public abstract Response createCallsCallConnectionsAEndAEndById(String callId,String connectionId,Endpoint aEnd)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsAEndAEndById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsMatchMatchById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsMatchMatchById(String callId,String connectionId,MatchRules match)
throws NotFoundException;
public abstract Response createCallsCallConnectionsMatchMatchById(String callId,String connectionId,MatchRules match)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsMatchMatchById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsPathPathById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsPathPathById(String callId,String connectionId,PathType path)
throws NotFoundException;
public abstract Response createCallsCallConnectionsPathPathById(String callId,String connectionId,PathType path)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsPathPathById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsPathLabelLabelById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsPathLabelLabelById(String callId,String connectionId,Label label)
throws NotFoundException;
public abstract Response createCallsCallConnectionsPathLabelLabelById(String callId,String connectionId,Label label)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsPathLabelLabelById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsPathTopoComponentsTopoComponentsById(String callId,String connectionId,String endpointId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsPathTopoComponentsTopoComponentsById(String callId,String connectionId,String endpointId,Endpoint topoComponents)
throws NotFoundException;
public abstract Response createCallsCallConnectionsPathTopoComponentsTopoComponentsById(String callId,String connectionId,String endpointId,Endpoint topoComponents)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsPathTopoComponentsTopoComponentsById(String callId,String connectionId,String endpointId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsTrafficParamsTrafficParamsById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsTrafficParamsTrafficParamsById(String callId,String connectionId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response createCallsCallConnectionsTrafficParamsTrafficParamsById(String callId,String connectionId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsTrafficParamsTrafficParamsById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsTransportLayerTransportLayerById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsTransportLayerTransportLayerById(String callId,String connectionId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response createCallsCallConnectionsTransportLayerTransportLayerById(String callId,String connectionId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsTransportLayerTransportLayerById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallConnectionsZEndZEndById(String callId,String connectionId)
throws NotFoundException;
public abstract Response updateCallsCallConnectionsZEndZEndById(String callId,String connectionId,Endpoint zEnd)
throws NotFoundException;
public abstract Response createCallsCallConnectionsZEndZEndById(String callId,String connectionId,Endpoint zEnd)
throws NotFoundException;
public abstract Response deleteCallsCallConnectionsZEndZEndById(String callId,String connectionId)
throws NotFoundException;
public abstract Response retrieveCallsCallMatchMatchById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallMatchMatchById(String callId,MatchRules match)
throws NotFoundException;
public abstract Response createCallsCallMatchMatchById(String callId,MatchRules match)
throws NotFoundException;
public abstract Response deleteCallsCallMatchMatchById(String callId)
throws NotFoundException;
public abstract Response retrieveCallsCallTrafficParamsTrafficParamsById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallTrafficParamsTrafficParamsById(String callId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response createCallsCallTrafficParamsTrafficParamsById(String callId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response deleteCallsCallTrafficParamsTrafficParamsById(String callId)
throws NotFoundException;
public abstract Response retrieveCallsCallTransportLayerTransportLayerById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallTransportLayerTransportLayerById(String callId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response createCallsCallTransportLayerTransportLayerById(String callId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response deleteCallsCallTransportLayerTransportLayerById(String callId)
throws NotFoundException;
public abstract Response retrieveCallsCallZEndZEndById(String callId)
throws NotFoundException;
public abstract Response updateCallsCallZEndZEndById(String callId,Endpoint zEnd)
throws NotFoundException;
public abstract Response createCallsCallZEndZEndById(String callId,Endpoint zEnd)
throws NotFoundException;
public abstract Response deleteCallsCallZEndZEndById(String callId)
throws NotFoundException;
public abstract Response retrieveConnections()
throws NotFoundException;
public abstract Response updateConnectionsById(List<Connection> connections)
throws NotFoundException;
public abstract Response createConnectionsById(List<Connection> connections)
throws NotFoundException;
public abstract Response deleteConnectionsById()
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionConnectionById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionConnectionById(String connectionId,Connection connection)
throws NotFoundException;
public abstract Response createConnectionsConnectionConnectionById(String connectionId,Connection connection)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionConnectionById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionAEndAEndById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionAEndAEndById(String connectionId,Endpoint aEnd)
throws NotFoundException;
public abstract Response createConnectionsConnectionAEndAEndById(String connectionId,Endpoint aEnd)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionAEndAEndById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionMatchMatchById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionMatchMatchById(String connectionId,MatchRules match)
throws NotFoundException;
public abstract Response createConnectionsConnectionMatchMatchById(String connectionId,MatchRules match)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionMatchMatchById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionPathPathById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionPathPathById(String connectionId,PathType path)
throws NotFoundException;
public abstract Response createConnectionsConnectionPathPathById(String connectionId,PathType path)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionPathPathById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionPathLabelLabelById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionPathLabelLabelById(String connectionId,Label label)
throws NotFoundException;
public abstract Response createConnectionsConnectionPathLabelLabelById(String connectionId,Label label)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionPathLabelLabelById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionPathTopoComponentsTopoComponentsById(String connectionId,String endpointId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionPathTopoComponentsTopoComponentsById(String connectionId,String endpointId,Endpoint topoComponents)
throws NotFoundException;
public abstract Response createConnectionsConnectionPathTopoComponentsTopoComponentsById(String connectionId,String endpointId,Endpoint topoComponents)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionPathTopoComponentsTopoComponentsById(String connectionId,String endpointId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionTrafficParamsTrafficParamsById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionTrafficParamsTrafficParamsById(String connectionId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response createConnectionsConnectionTrafficParamsTrafficParamsById(String connectionId,TrafficParams trafficParams)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionTrafficParamsTrafficParamsById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionTransportLayerTransportLayerById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionTransportLayerTransportLayerById(String connectionId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response createConnectionsConnectionTransportLayerTransportLayerById(String connectionId,TransportLayerType transportLayer)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionTransportLayerTransportLayerById(String connectionId)
throws NotFoundException;
public abstract Response retrieveConnectionsConnectionZEndZEndById(String connectionId)
throws NotFoundException;
public abstract Response updateConnectionsConnectionZEndZEndById(String connectionId,Endpoint zEnd)
throws NotFoundException;
public abstract Response createConnectionsConnectionZEndZEndById(String connectionId,Endpoint zEnd)
throws NotFoundException;
public abstract Response deleteConnectionsConnectionZEndZEndById(String connectionId)
throws NotFoundException;
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
@Immutable
public class OperatorStats
{
private final int operatorId;
private final String operatorType;
private final long addInputCalls;
private final Duration addInputWall;
private final Duration addInputCpu;
private final Duration addInputUser;
private final DataSize inputDataSize;
private final long inputPositions;
private final long getOutputCalls;
private final Duration getOutputWall;
private final Duration getOutputCpu;
private final Duration getOutputUser;
private final DataSize outputDataSize;
private final long outputPositions;
private final Duration blockedWall;
private final long finishCalls;
private final Duration finishWall;
private final Duration finishCpu;
private final Duration finishUser;
private final DataSize memoryReservation;
private final Object info;
@JsonCreator
public OperatorStats(
@JsonProperty("operatorId") int operatorId,
@JsonProperty("operatorType") String operatorType,
@JsonProperty("addInputCalls") long addInputCalls,
@JsonProperty("addInputWall") Duration addInputWall,
@JsonProperty("addInputCpu") Duration addInputCpu,
@JsonProperty("addInputUser") Duration addInputUser,
@JsonProperty("inputDataSize") DataSize inputDataSize,
@JsonProperty("inputPositions") long inputPositions,
@JsonProperty("getOutputCalls") long getOutputCalls,
@JsonProperty("getOutputWall") Duration getOutputWall,
@JsonProperty("getOutputCpu") Duration getOutputCpu,
@JsonProperty("getOutputUser") Duration getOutputUser,
@JsonProperty("outputDataSize") DataSize outputDataSize,
@JsonProperty("outputPositions") long outputPositions,
@JsonProperty("blockedWall") Duration blockedWall,
@JsonProperty("finishCalls") long finishCalls,
@JsonProperty("finishWall") Duration finishWall,
@JsonProperty("finishCpu") Duration finishCpu,
@JsonProperty("finishUser") Duration finishUser,
@JsonProperty("memoryReservation") DataSize memoryReservation,
@JsonProperty("info") Object info)
{
checkArgument(operatorId >= 0, "operatorId is negative");
this.operatorId = operatorId;
this.operatorType = checkNotNull(operatorType, "operatorType is null");
this.addInputCalls = addInputCalls;
this.addInputWall = checkNotNull(addInputWall, "addInputWall is null");
this.addInputCpu = checkNotNull(addInputCpu, "addInputCpu is null");
this.addInputUser = checkNotNull(addInputUser, "addInputUser is null");
this.inputDataSize = checkNotNull(inputDataSize, "inputDataSize is null");
checkArgument(inputPositions >= 0, "inputPositions is negative");
this.inputPositions = inputPositions;
this.getOutputCalls = getOutputCalls;
this.getOutputWall = checkNotNull(getOutputWall, "getOutputWall is null");
this.getOutputCpu = checkNotNull(getOutputCpu, "getOutputCpu is null");
this.getOutputUser = checkNotNull(getOutputUser, "getOutputUser is null");
this.outputDataSize = checkNotNull(outputDataSize, "outputDataSize is null");
checkArgument(outputPositions >= 0, "outputPositions is negative");
this.outputPositions = outputPositions;
this.blockedWall = checkNotNull(blockedWall, "blockedWall is null");
this.finishCalls = finishCalls;
this.finishWall = checkNotNull(finishWall, "finishWall is null");
this.finishCpu = checkNotNull(finishCpu, "finishCpu is null");
this.finishUser = checkNotNull(finishUser, "finishUser is null");
this.memoryReservation = checkNotNull(memoryReservation, "memoryReservation is null");
this.info = info;
}
@JsonProperty
public int getOperatorId()
{
return operatorId;
}
@JsonProperty
public String getOperatorType()
{
return operatorType;
}
@JsonProperty
public long getAddInputCalls()
{
return addInputCalls;
}
@JsonProperty
public Duration getAddInputWall()
{
return addInputWall;
}
@JsonProperty
public Duration getAddInputCpu()
{
return addInputCpu;
}
@JsonProperty
public Duration getAddInputUser()
{
return addInputUser;
}
@JsonProperty
public DataSize getInputDataSize()
{
return inputDataSize;
}
@JsonProperty
public long getInputPositions()
{
return inputPositions;
}
@JsonProperty
public long getGetOutputCalls()
{
return getOutputCalls;
}
@JsonProperty
public Duration getGetOutputWall()
{
return getOutputWall;
}
@JsonProperty
public Duration getGetOutputCpu()
{
return getOutputCpu;
}
@JsonProperty
public Duration getGetOutputUser()
{
return getOutputUser;
}
@JsonProperty
public DataSize getOutputDataSize()
{
return outputDataSize;
}
@JsonProperty
public long getOutputPositions()
{
return outputPositions;
}
@JsonProperty
public Duration getBlockedWall()
{
return blockedWall;
}
@JsonProperty
public long getFinishCalls()
{
return finishCalls;
}
@JsonProperty
public Duration getFinishWall()
{
return finishWall;
}
@JsonProperty
public Duration getFinishCpu()
{
return finishCpu;
}
@JsonProperty
public Duration getFinishUser()
{
return finishUser;
}
@JsonProperty
public DataSize getMemoryReservation()
{
return memoryReservation;
}
@Nullable
@JsonProperty
public Object getInfo()
{
return info;
}
public OperatorStats add(OperatorStats... operators)
{
return add(ImmutableList.copyOf(operators));
}
public OperatorStats add(Iterable<OperatorStats> operators)
{
long addInputCalls = this.addInputCalls;
long addInputWall = this.addInputWall.roundTo(NANOSECONDS);
long addInputCpu = this.addInputCpu.roundTo(NANOSECONDS);
long addInputUser = this.addInputUser.roundTo(NANOSECONDS);
long inputDataSize = this.inputDataSize.toBytes();
long inputPositions = this.inputPositions;
long getOutputCalls = this.getOutputCalls;
long getOutputWall = this.getOutputWall.roundTo(NANOSECONDS);
long getOutputCpu = this.getOutputCpu.roundTo(NANOSECONDS);
long getOutputUser = this.getOutputUser.roundTo(NANOSECONDS);
long outputDataSize = this.outputDataSize.toBytes();
long outputPositions = this.outputPositions;
long blockedWall = this.blockedWall.roundTo(NANOSECONDS);
long finishCalls = this.finishCalls;
long finishWall = this.finishWall.roundTo(NANOSECONDS);
long finishCpu = this.finishCpu.roundTo(NANOSECONDS);
long finishUser = this.finishUser.roundTo(NANOSECONDS);
long memoryReservation = this.memoryReservation.toBytes();
Mergeable<?> base = null;
if (info instanceof Mergeable) {
base = (Mergeable<?>) info;
}
for (OperatorStats operator : operators) {
checkArgument(operator.getOperatorId() == operatorId, "Expected operatorId to be %s but was %s", operatorId, operator.getOperatorId());
addInputCalls += operator.getAddInputCalls();
addInputWall += operator.getAddInputWall().roundTo(NANOSECONDS);
addInputCpu += operator.getAddInputCpu().roundTo(NANOSECONDS);
addInputUser += operator.getAddInputUser().roundTo(NANOSECONDS);
inputDataSize += operator.getInputDataSize().toBytes();
inputPositions += operator.getInputPositions();
getOutputCalls += operator.getGetOutputCalls();
getOutputWall += operator.getGetOutputWall().roundTo(NANOSECONDS);
getOutputCpu += operator.getGetOutputCpu().roundTo(NANOSECONDS);
getOutputUser += operator.getGetOutputUser().roundTo(NANOSECONDS);
outputDataSize += operator.getOutputDataSize().toBytes();
outputPositions += operator.getOutputPositions();
finishCalls += operator.getFinishCalls();
finishWall += operator.getFinishWall().roundTo(NANOSECONDS);
finishCpu += operator.getFinishCpu().roundTo(NANOSECONDS);
finishUser += operator.getFinishUser().roundTo(NANOSECONDS);
blockedWall += operator.getBlockedWall().roundTo(NANOSECONDS);
memoryReservation += operator.getMemoryReservation().toBytes();
Object info = operator.getInfo();
if (base != null && info != null && base.getClass() == info.getClass()) {
base = mergeInfo(base, info);
}
}
return new OperatorStats(
operatorId,
operatorType,
addInputCalls,
new Duration(addInputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(addInputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(addInputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new DataSize(inputDataSize, BYTE).convertToMostSuccinctDataSize(),
inputPositions,
getOutputCalls,
new Duration(getOutputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(getOutputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(getOutputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new DataSize(outputDataSize, BYTE).convertToMostSuccinctDataSize(),
outputPositions,
new Duration(blockedWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
finishCalls,
new Duration(finishWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(finishCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(finishUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new DataSize(memoryReservation, BYTE).convertToMostSuccinctDataSize(),
base);
}
public static <T extends Mergeable<T>> Mergeable<?> mergeInfo(Object base, Object other)
{
return ((T) base).mergeWith(((T) other));
}
}
| |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.recyclerview.widget;
import android.util.Log;
import androidx.core.util.Pools;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Helper class that can enqueue and process adapter update operations.
* <p>
* To support animations, RecyclerView presents an older version the Adapter to best represent
* previous state of the layout. Sometimes, this is not trivial when items are removed that were
* not laid out, in which case, RecyclerView has no way of providing that item's view for
* animations.
* <p>
* AdapterHelper creates an UpdateOp for each adapter data change then pre-processes them. During
* pre processing, AdapterHelper finds out which UpdateOps can be deferred to second layout pass
* and which cannot. For the UpdateOps that cannot be deferred, AdapterHelper will change them
* according to previously deferred operation and dispatch them before the first layout pass. It
* also takes care of updating deferred UpdateOps since order of operations is changed by this
* process.
* <p>
* Although operations may be forwarded to LayoutManager in different orders, resulting data set
* is guaranteed to be the consistent.
*/
class AdapterHelper implements OpReorderer.Callback {
static final int POSITION_TYPE_INVISIBLE = 0;
static final int POSITION_TYPE_NEW_OR_LAID_OUT = 1;
private static final boolean DEBUG = false;
private static final String TAG = "AHT";
private Pools.Pool<UpdateOp> mUpdateOpPool = new Pools.SimplePool<UpdateOp>(UpdateOp.POOL_SIZE);
final ArrayList<UpdateOp> mPendingUpdates = new ArrayList<UpdateOp>();
final ArrayList<UpdateOp> mPostponedList = new ArrayList<UpdateOp>();
final Callback mCallback;
Runnable mOnItemProcessedCallback;
final boolean mDisableRecycler;
final OpReorderer mOpReorderer;
private int mExistingUpdateTypes = 0;
AdapterHelper(Callback callback) {
this(callback, false);
}
AdapterHelper(Callback callback, boolean disableRecycler) {
mCallback = callback;
mDisableRecycler = disableRecycler;
mOpReorderer = new OpReorderer(this);
}
AdapterHelper addUpdateOp(UpdateOp... ops) {
Collections.addAll(mPendingUpdates, ops);
return this;
}
void reset() {
recycleUpdateOpsAndClearList(mPendingUpdates);
recycleUpdateOpsAndClearList(mPostponedList);
mExistingUpdateTypes = 0;
}
void preProcess() {
mOpReorderer.reorderOps(mPendingUpdates);
final int count = mPendingUpdates.size();
for (int i = 0; i < count; i++) {
UpdateOp op = mPendingUpdates.get(i);
switch (op.cmd) {
case UpdateOp.ADD:
applyAdd(op);
break;
case UpdateOp.REMOVE:
applyRemove(op);
break;
case UpdateOp.UPDATE:
applyUpdate(op);
break;
case UpdateOp.MOVE:
applyMove(op);
break;
}
if (mOnItemProcessedCallback != null) {
mOnItemProcessedCallback.run();
}
}
mPendingUpdates.clear();
}
void consumePostponedUpdates() {
final int count = mPostponedList.size();
for (int i = 0; i < count; i++) {
mCallback.onDispatchSecondPass(mPostponedList.get(i));
}
recycleUpdateOpsAndClearList(mPostponedList);
mExistingUpdateTypes = 0;
}
private void applyMove(UpdateOp op) {
// MOVE ops are pre-processed so at this point, we know that item is still in the adapter.
// otherwise, it would be converted into a REMOVE operation
postponeAndUpdateViewHolders(op);
}
private void applyRemove(UpdateOp op) {
int tmpStart = op.positionStart;
int tmpCount = 0;
int tmpEnd = op.positionStart + op.itemCount;
int type = -1;
for (int position = op.positionStart; position < tmpEnd; position++) {
boolean typeChanged = false;
RecyclerView.ViewHolder vh = mCallback.findViewHolder(position);
if (vh != null || canFindInPreLayout(position)) {
// If a ViewHolder exists or this is a newly added item, we can defer this update
// to post layout stage.
// * For existing ViewHolders, we'll fake its existence in the pre-layout phase.
// * For items that are added and removed in the same process cycle, they won't
// have any effect in pre-layout since their add ops are already deferred to
// post-layout pass.
if (type == POSITION_TYPE_INVISIBLE) {
// Looks like we have other updates that we cannot merge with this one.
// Create an UpdateOp and dispatch it to LayoutManager.
UpdateOp newOp = obtainUpdateOp(UpdateOp.REMOVE, tmpStart, tmpCount, null);
dispatchAndUpdateViewHolders(newOp);
typeChanged = true;
}
type = POSITION_TYPE_NEW_OR_LAID_OUT;
} else {
// This update cannot be recovered because we don't have a ViewHolder representing
// this position. Instead, post it to LayoutManager immediately
if (type == POSITION_TYPE_NEW_OR_LAID_OUT) {
// Looks like we have other updates that we cannot merge with this one.
// Create UpdateOp op and dispatch it to LayoutManager.
UpdateOp newOp = obtainUpdateOp(UpdateOp.REMOVE, tmpStart, tmpCount, null);
postponeAndUpdateViewHolders(newOp);
typeChanged = true;
}
type = POSITION_TYPE_INVISIBLE;
}
if (typeChanged) {
position -= tmpCount; // also equal to tmpStart
tmpEnd -= tmpCount;
tmpCount = 1;
} else {
tmpCount++;
}
}
if (tmpCount != op.itemCount) { // all 1 effect
recycleUpdateOp(op);
op = obtainUpdateOp(UpdateOp.REMOVE, tmpStart, tmpCount, null);
}
if (type == POSITION_TYPE_INVISIBLE) {
dispatchAndUpdateViewHolders(op);
} else {
postponeAndUpdateViewHolders(op);
}
}
private void applyUpdate(UpdateOp op) {
int tmpStart = op.positionStart;
int tmpCount = 0;
int tmpEnd = op.positionStart + op.itemCount;
int type = -1;
for (int position = op.positionStart; position < tmpEnd; position++) {
RecyclerView.ViewHolder vh = mCallback.findViewHolder(position);
if (vh != null || canFindInPreLayout(position)) { // deferred
if (type == POSITION_TYPE_INVISIBLE) {
UpdateOp newOp = obtainUpdateOp(UpdateOp.UPDATE, tmpStart, tmpCount,
op.payload);
dispatchAndUpdateViewHolders(newOp);
tmpCount = 0;
tmpStart = position;
}
type = POSITION_TYPE_NEW_OR_LAID_OUT;
} else { // applied
if (type == POSITION_TYPE_NEW_OR_LAID_OUT) {
UpdateOp newOp = obtainUpdateOp(UpdateOp.UPDATE, tmpStart, tmpCount,
op.payload);
postponeAndUpdateViewHolders(newOp);
tmpCount = 0;
tmpStart = position;
}
type = POSITION_TYPE_INVISIBLE;
}
tmpCount++;
}
if (tmpCount != op.itemCount) { // all 1 effect
Object payload = op.payload;
recycleUpdateOp(op);
op = obtainUpdateOp(UpdateOp.UPDATE, tmpStart, tmpCount, payload);
}
if (type == POSITION_TYPE_INVISIBLE) {
dispatchAndUpdateViewHolders(op);
} else {
postponeAndUpdateViewHolders(op);
}
}
private void dispatchAndUpdateViewHolders(UpdateOp op) {
// tricky part.
// traverse all postpones and revert their changes on this op if necessary, apply updated
// dispatch to them since now they are after this op.
if (op.cmd == UpdateOp.ADD || op.cmd == UpdateOp.MOVE) {
throw new IllegalArgumentException("should not dispatch add or move for pre layout");
}
if (DEBUG) {
Log.d(TAG, "dispatch (pre)" + op);
Log.d(TAG, "postponed state before:");
for (UpdateOp updateOp : mPostponedList) {
Log.d(TAG, updateOp.toString());
}
Log.d(TAG, "----");
}
// handle each pos 1 by 1 to ensure continuity. If it breaks, dispatch partial
// TODO Since move ops are pushed to end, we should not need this anymore
int tmpStart = updatePositionWithPostponed(op.positionStart, op.cmd);
if (DEBUG) {
Log.d(TAG, "pos:" + op.positionStart + ",updatedPos:" + tmpStart);
}
int tmpCnt = 1;
int offsetPositionForPartial = op.positionStart;
final int positionMultiplier;
switch (op.cmd) {
case UpdateOp.UPDATE:
positionMultiplier = 1;
break;
case UpdateOp.REMOVE:
positionMultiplier = 0;
break;
default:
throw new IllegalArgumentException("op should be remove or update." + op);
}
for (int p = 1; p < op.itemCount; p++) {
final int pos = op.positionStart + (positionMultiplier * p);
int updatedPos = updatePositionWithPostponed(pos, op.cmd);
if (DEBUG) {
Log.d(TAG, "pos:" + pos + ",updatedPos:" + updatedPos);
}
boolean continuous = false;
switch (op.cmd) {
case UpdateOp.UPDATE:
continuous = updatedPos == tmpStart + 1;
break;
case UpdateOp.REMOVE:
continuous = updatedPos == tmpStart;
break;
}
if (continuous) {
tmpCnt++;
} else {
// need to dispatch this separately
UpdateOp tmp = obtainUpdateOp(op.cmd, tmpStart, tmpCnt, op.payload);
if (DEBUG) {
Log.d(TAG, "need to dispatch separately " + tmp);
}
dispatchFirstPassAndUpdateViewHolders(tmp, offsetPositionForPartial);
recycleUpdateOp(tmp);
if (op.cmd == UpdateOp.UPDATE) {
offsetPositionForPartial += tmpCnt;
}
tmpStart = updatedPos; // need to remove previously dispatched
tmpCnt = 1;
}
}
Object payload = op.payload;
recycleUpdateOp(op);
if (tmpCnt > 0) {
UpdateOp tmp = obtainUpdateOp(op.cmd, tmpStart, tmpCnt, payload);
if (DEBUG) {
Log.d(TAG, "dispatching:" + tmp);
}
dispatchFirstPassAndUpdateViewHolders(tmp, offsetPositionForPartial);
recycleUpdateOp(tmp);
}
if (DEBUG) {
Log.d(TAG, "post dispatch");
Log.d(TAG, "postponed state after:");
for (UpdateOp updateOp : mPostponedList) {
Log.d(TAG, updateOp.toString());
}
Log.d(TAG, "----");
}
}
void dispatchFirstPassAndUpdateViewHolders(UpdateOp op, int offsetStart) {
mCallback.onDispatchFirstPass(op);
switch (op.cmd) {
case UpdateOp.REMOVE:
mCallback.offsetPositionsForRemovingInvisible(offsetStart, op.itemCount);
break;
case UpdateOp.UPDATE:
mCallback.markViewHoldersUpdated(offsetStart, op.itemCount, op.payload);
break;
default:
throw new IllegalArgumentException("only remove and update ops can be dispatched"
+ " in first pass");
}
}
private int updatePositionWithPostponed(int pos, int cmd) {
final int count = mPostponedList.size();
for (int i = count - 1; i >= 0; i--) {
UpdateOp postponed = mPostponedList.get(i);
if (postponed.cmd == UpdateOp.MOVE) {
int start, end;
if (postponed.positionStart < postponed.itemCount) {
start = postponed.positionStart;
end = postponed.itemCount;
} else {
start = postponed.itemCount;
end = postponed.positionStart;
}
if (pos >= start && pos <= end) {
//i'm affected
if (start == postponed.positionStart) {
if (cmd == UpdateOp.ADD) {
postponed.itemCount++;
} else if (cmd == UpdateOp.REMOVE) {
postponed.itemCount--;
}
// op moved to left, move it right to revert
pos++;
} else {
if (cmd == UpdateOp.ADD) {
postponed.positionStart++;
} else if (cmd == UpdateOp.REMOVE) {
postponed.positionStart--;
}
// op was moved right, move left to revert
pos--;
}
} else if (pos < postponed.positionStart) {
// postponed MV is outside the dispatched OP. if it is before, offset
if (cmd == UpdateOp.ADD) {
postponed.positionStart++;
postponed.itemCount++;
} else if (cmd == UpdateOp.REMOVE) {
postponed.positionStart--;
postponed.itemCount--;
}
}
} else {
if (postponed.positionStart <= pos) {
if (postponed.cmd == UpdateOp.ADD) {
pos -= postponed.itemCount;
} else if (postponed.cmd == UpdateOp.REMOVE) {
pos += postponed.itemCount;
}
} else {
if (cmd == UpdateOp.ADD) {
postponed.positionStart++;
} else if (cmd == UpdateOp.REMOVE) {
postponed.positionStart--;
}
}
}
if (DEBUG) {
Log.d(TAG, "dispath (step" + i + ")");
Log.d(TAG, "postponed state:" + i + ", pos:" + pos);
for (UpdateOp updateOp : mPostponedList) {
Log.d(TAG, updateOp.toString());
}
Log.d(TAG, "----");
}
}
for (int i = mPostponedList.size() - 1; i >= 0; i--) {
UpdateOp op = mPostponedList.get(i);
if (op.cmd == UpdateOp.MOVE) {
if (op.itemCount == op.positionStart || op.itemCount < 0) {
mPostponedList.remove(i);
recycleUpdateOp(op);
}
} else if (op.itemCount <= 0) {
mPostponedList.remove(i);
recycleUpdateOp(op);
}
}
return pos;
}
private boolean canFindInPreLayout(int position) {
final int count = mPostponedList.size();
for (int i = 0; i < count; i++) {
UpdateOp op = mPostponedList.get(i);
if (op.cmd == UpdateOp.MOVE) {
if (findPositionOffset(op.itemCount, i + 1) == position) {
return true;
}
} else if (op.cmd == UpdateOp.ADD) {
// TODO optimize.
final int end = op.positionStart + op.itemCount;
for (int pos = op.positionStart; pos < end; pos++) {
if (findPositionOffset(pos, i + 1) == position) {
return true;
}
}
}
}
return false;
}
private void applyAdd(UpdateOp op) {
postponeAndUpdateViewHolders(op);
}
private void postponeAndUpdateViewHolders(UpdateOp op) {
if (DEBUG) {
Log.d(TAG, "postponing " + op);
}
mPostponedList.add(op);
switch (op.cmd) {
case UpdateOp.ADD:
mCallback.offsetPositionsForAdd(op.positionStart, op.itemCount);
break;
case UpdateOp.MOVE:
mCallback.offsetPositionsForMove(op.positionStart, op.itemCount);
break;
case UpdateOp.REMOVE:
mCallback.offsetPositionsForRemovingLaidOutOrNewView(op.positionStart,
op.itemCount);
break;
case UpdateOp.UPDATE:
mCallback.markViewHoldersUpdated(op.positionStart, op.itemCount, op.payload);
break;
default:
throw new IllegalArgumentException("Unknown update op type for " + op);
}
}
boolean hasPendingUpdates() {
return mPendingUpdates.size() > 0;
}
boolean hasAnyUpdateTypes(int updateTypes) {
return (mExistingUpdateTypes & updateTypes) != 0;
}
int findPositionOffset(int position) {
return findPositionOffset(position, 0);
}
int findPositionOffset(int position, int firstPostponedItem) {
int count = mPostponedList.size();
for (int i = firstPostponedItem; i < count; ++i) {
UpdateOp op = mPostponedList.get(i);
if (op.cmd == UpdateOp.MOVE) {
if (op.positionStart == position) {
position = op.itemCount;
} else {
if (op.positionStart < position) {
position--; // like a remove
}
if (op.itemCount <= position) {
position++; // like an add
}
}
} else if (op.positionStart <= position) {
if (op.cmd == UpdateOp.REMOVE) {
if (position < op.positionStart + op.itemCount) {
return -1;
}
position -= op.itemCount;
} else if (op.cmd == UpdateOp.ADD) {
position += op.itemCount;
}
}
}
return position;
}
/**
* @return True if updates should be processed.
*/
boolean onItemRangeChanged(int positionStart, int itemCount, Object payload) {
if (itemCount < 1) {
return false;
}
mPendingUpdates.add(obtainUpdateOp(UpdateOp.UPDATE, positionStart, itemCount, payload));
mExistingUpdateTypes |= UpdateOp.UPDATE;
return mPendingUpdates.size() == 1;
}
/**
* @return True if updates should be processed.
*/
boolean onItemRangeInserted(int positionStart, int itemCount) {
if (itemCount < 1) {
return false;
}
mPendingUpdates.add(obtainUpdateOp(UpdateOp.ADD, positionStart, itemCount, null));
mExistingUpdateTypes |= UpdateOp.ADD;
return mPendingUpdates.size() == 1;
}
/**
* @return True if updates should be processed.
*/
boolean onItemRangeRemoved(int positionStart, int itemCount) {
if (itemCount < 1) {
return false;
}
mPendingUpdates.add(obtainUpdateOp(UpdateOp.REMOVE, positionStart, itemCount, null));
mExistingUpdateTypes |= UpdateOp.REMOVE;
return mPendingUpdates.size() == 1;
}
/**
* @return True if updates should be processed.
*/
boolean onItemRangeMoved(int from, int to, int itemCount) {
if (from == to) {
return false; // no-op
}
if (itemCount != 1) {
throw new IllegalArgumentException("Moving more than 1 item is not supported yet");
}
mPendingUpdates.add(obtainUpdateOp(UpdateOp.MOVE, from, to, null));
mExistingUpdateTypes |= UpdateOp.MOVE;
return mPendingUpdates.size() == 1;
}
/**
* Skips pre-processing and applies all updates in one pass.
*/
void consumeUpdatesInOnePass() {
// we still consume postponed updates (if there is) in case there was a pre-process call
// w/o a matching consumePostponedUpdates.
consumePostponedUpdates();
final int count = mPendingUpdates.size();
for (int i = 0; i < count; i++) {
UpdateOp op = mPendingUpdates.get(i);
switch (op.cmd) {
case UpdateOp.ADD:
mCallback.onDispatchSecondPass(op);
mCallback.offsetPositionsForAdd(op.positionStart, op.itemCount);
break;
case UpdateOp.REMOVE:
mCallback.onDispatchSecondPass(op);
mCallback.offsetPositionsForRemovingInvisible(op.positionStart, op.itemCount);
break;
case UpdateOp.UPDATE:
mCallback.onDispatchSecondPass(op);
mCallback.markViewHoldersUpdated(op.positionStart, op.itemCount, op.payload);
break;
case UpdateOp.MOVE:
mCallback.onDispatchSecondPass(op);
mCallback.offsetPositionsForMove(op.positionStart, op.itemCount);
break;
}
if (mOnItemProcessedCallback != null) {
mOnItemProcessedCallback.run();
}
}
recycleUpdateOpsAndClearList(mPendingUpdates);
mExistingUpdateTypes = 0;
}
public int applyPendingUpdatesToPosition(int position) {
final int size = mPendingUpdates.size();
for (int i = 0; i < size; i++) {
UpdateOp op = mPendingUpdates.get(i);
switch (op.cmd) {
case UpdateOp.ADD:
if (op.positionStart <= position) {
position += op.itemCount;
}
break;
case UpdateOp.REMOVE:
if (op.positionStart <= position) {
final int end = op.positionStart + op.itemCount;
if (end > position) {
return RecyclerView.NO_POSITION;
}
position -= op.itemCount;
}
break;
case UpdateOp.MOVE:
if (op.positionStart == position) {
position = op.itemCount; //position end
} else {
if (op.positionStart < position) {
position -= 1;
}
if (op.itemCount <= position) {
position += 1;
}
}
break;
}
}
return position;
}
boolean hasUpdates() {
return !mPostponedList.isEmpty() && !mPendingUpdates.isEmpty();
}
/**
* Queued operation to happen when child views are updated.
*/
static class UpdateOp {
static final int ADD = 1;
static final int REMOVE = 1 << 1;
static final int UPDATE = 1 << 2;
static final int MOVE = 1 << 3;
static final int POOL_SIZE = 30;
int cmd;
int positionStart;
Object payload;
// holds the target position if this is a MOVE
int itemCount;
UpdateOp(int cmd, int positionStart, int itemCount, Object payload) {
this.cmd = cmd;
this.positionStart = positionStart;
this.itemCount = itemCount;
this.payload = payload;
}
String cmdToString() {
switch (cmd) {
case ADD:
return "add";
case REMOVE:
return "rm";
case UPDATE:
return "up";
case MOVE:
return "mv";
}
return "??";
}
@Override
public String toString() {
return Integer.toHexString(System.identityHashCode(this))
+ "[" + cmdToString() + ",s:" + positionStart + "c:" + itemCount
+ ",p:" + payload + "]";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpdateOp op = (UpdateOp) o;
if (cmd != op.cmd) {
return false;
}
if (cmd == MOVE && Math.abs(itemCount - positionStart) == 1) {
// reverse of this is also true
if (itemCount == op.positionStart && positionStart == op.itemCount) {
return true;
}
}
if (itemCount != op.itemCount) {
return false;
}
if (positionStart != op.positionStart) {
return false;
}
if (payload != null) {
if (!payload.equals(op.payload)) {
return false;
}
} else if (op.payload != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = cmd;
result = 31 * result + positionStart;
result = 31 * result + itemCount;
return result;
}
}
@Override
public UpdateOp obtainUpdateOp(int cmd, int positionStart, int itemCount, Object payload) {
UpdateOp op = mUpdateOpPool.acquire();
if (op == null) {
op = new UpdateOp(cmd, positionStart, itemCount, payload);
} else {
op.cmd = cmd;
op.positionStart = positionStart;
op.itemCount = itemCount;
op.payload = payload;
}
return op;
}
@Override
public void recycleUpdateOp(UpdateOp op) {
if (!mDisableRecycler) {
op.payload = null;
mUpdateOpPool.release(op);
}
}
void recycleUpdateOpsAndClearList(List<UpdateOp> ops) {
final int count = ops.size();
for (int i = 0; i < count; i++) {
recycleUpdateOp(ops.get(i));
}
ops.clear();
}
/**
* Contract between AdapterHelper and RecyclerView.
*/
interface Callback {
RecyclerView.ViewHolder findViewHolder(int position);
void offsetPositionsForRemovingInvisible(int positionStart, int itemCount);
void offsetPositionsForRemovingLaidOutOrNewView(int positionStart, int itemCount);
void markViewHoldersUpdated(int positionStart, int itemCount, Object payloads);
void onDispatchFirstPass(UpdateOp updateOp);
void onDispatchSecondPass(UpdateOp updateOp);
void offsetPositionsForAdd(int positionStart, int itemCount);
void offsetPositionsForMove(int from, int to);
}
}
| |
/**
* Copyright 2009-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.submitted.cache;
import java.io.Reader;
import java.sql.Connection;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
// issue #524
public class CacheTest {
private static SqlSessionFactory sqlSessionFactory;
@Before
public void setUp() throws Exception {
// create a SqlSessionFactory
Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cache/mybatis-config.xml");
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
reader.close();
// populate in-memory database
SqlSession session = sqlSessionFactory.openSession();
Connection conn = session.getConnection();
reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cache/CreateDB.sql");
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.runScript(reader);
reader.close();
session.close();
}
/*
* Test Plan:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 executes "select * from A"
*
* Assert:
* Step 4 returns 1 row. (This case fails when caching is enabled.)
*/
@Test
public void testplan1() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
}
finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
Assert.assertEquals(1, pm.findAll().size());
}
finally {
sqlSession2.commit();
sqlSession2.close();
}
}
/*
* Test Plan:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 executes "select * from A"
* 5) SqlSession 2 rollback
* 6) SqlSession 3 executes "select * from A"
*
* Assert:
* Step 6 returns 2 rows.
*/
@Test
public void testplan2() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
}
finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
}
finally {
sqlSession2.rollback();
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(false);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
}
finally {
sqlSession3.close();
}
}
/*
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "delete from A where id = 1"
* 4) SqlSession 2 closes.
* 5) SqlSession 2 executes "select * from A".
* 6) SqlSession 3 closes.
*
* Assert:
* Step 6 returns 1 row.
*/
@Test
public void testplan3() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
}
finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
pm.delete(1);
}
finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(1, pm.findAll().size());
}
finally {
sqlSession3.close();
}
}
/*-
* Test case for #405
*
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes "select * from A".
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes "insert into person (id, firstname, lastname) values (3, hello, world)"
* 4) SqlSession 2 closes.
* 5) SqlSession 3 executes "select * from A".
* 6) SqlSession 3 closes.
*
* Assert:
* Step 5 returns 3 row.
*/
@Test
public void shouldInsertWithOptionsFlushesCache() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
Person p = new Person(3, "hello", "world");
pm.createWithOptions(p);
} finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findAll().size());
} finally {
sqlSession3.close();
}
}
/*-
* Test Plan with Autocommit on:
* 1) SqlSession 1 executes select to cache result
* 2) SqlSession 1 closes.
* 3) SqlSession 2 executes insert without flushing cache
* 4) SqlSession 2 closes.
* 5) SqlSession 3 executes select (flushCache = false)
* 6) SqlSession 3 closes.
* 7) SqlSession 4 executes select (flushCache = true)
* 8) SqlSession 4 closes.
*
* Assert:
* Step 5 returns 2 row.
* Step 7 returns 3 row.
*/
@Test
public void shouldApplyFlushCacheOptions() {
SqlSession sqlSession1 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession1.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession1.close();
}
SqlSession sqlSession2 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession2.getMapper(PersonMapper.class);
Person p = new Person(3, "hello", "world");
pm.createWithoutFlushCache(p);
} finally {
sqlSession2.close();
}
SqlSession sqlSession3 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession3.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession3.close();
}
SqlSession sqlSession4 = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession4.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findWithFlushCache().size());
} finally {
sqlSession4.close();
}
}
@Test
public void shouldApplyCacheNamespaceRef() {
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
Person p = new Person(3, "hello", "world");
pm.createWithoutFlushCache(p);
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(2, pm.findAll().size());
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
ImportantPersonMapper pm = sqlSession.getMapper(ImportantPersonMapper.class);
Assert.assertEquals(3, pm.findWithFlushCache().size());
} finally {
sqlSession.close();
}
}
{
SqlSession sqlSession = sqlSessionFactory.openSession(true);
try {
PersonMapper pm = sqlSession.getMapper(PersonMapper.class);
Assert.assertEquals(3, pm.findAll().size());
} finally {
sqlSession.close();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.jdbc;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.text.Format;
import java.util.Calendar;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.compile.ColumnProjector;
import org.apache.phoenix.compile.RowProjector;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.exception.SQLExceptionInfo;
import org.apache.phoenix.iterate.ResultIterator;
import org.apache.phoenix.schema.tuple.ResultTuple;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PBoolean;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDate;
import org.apache.phoenix.schema.types.PDecimal;
import org.apache.phoenix.schema.types.PDouble;
import org.apache.phoenix.schema.types.PFloat;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PSmallint;
import org.apache.phoenix.schema.types.PTime;
import org.apache.phoenix.schema.types.PTimestamp;
import org.apache.phoenix.schema.types.PTinyint;
import org.apache.phoenix.schema.types.PVarbinary;
import org.apache.phoenix.schema.types.PVarchar;
import org.apache.phoenix.util.SQLCloseable;
import com.google.common.annotations.VisibleForTesting;
/**
*
* JDBC ResultSet implementation of Phoenix.
* Currently only the following data types are supported:
* - String
* - Date
* - Time
* - Timestamp
* - BigDecimal
* - Double
* - Float
* - Int
* - Short
* - Long
* - Binary
* - Array - 1D
* None of the update or delete methods are supported.
* The ResultSet only supports the following options:
* - ResultSet.FETCH_FORWARD
* - ResultSet.CONCUR_READ_ONLY
* - ResultSet.TYPE_FORWARD_ONLY
* - ResultSet.CLOSE_CURSORS_AT_COMMIT
*
*
* @since 0.1
*/
public class PhoenixResultSet implements ResultSet, SQLCloseable, org.apache.phoenix.jdbc.Jdbc7Shim.ResultSet {
private static final Log LOG = LogFactory.getLog(PhoenixResultSet.class);
private final static String STRING_FALSE = "0";
private final static BigDecimal BIG_DECIMAL_FALSE = BigDecimal.valueOf(0);
private final static Integer INTEGER_FALSE = Integer.valueOf(0);
private final static Tuple BEFORE_FIRST = new ResultTuple();
private final ResultIterator scanner;
private final RowProjector rowProjector;
private final PhoenixStatement statement;
private final ImmutableBytesWritable ptr = new ImmutableBytesWritable();
private Tuple currentRow = BEFORE_FIRST;
private boolean isClosed = false;
private boolean wasNull = false;
public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector, PhoenixStatement statement) throws SQLException {
this.rowProjector = rowProjector;
this.scanner = resultIterator;
this.statement = statement;
}
@Override
public boolean absolute(int row) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void afterLast() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void beforeFirst() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void cancelRowUpdates() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void clearWarnings() throws SQLException {
}
@Override
public void close() throws SQLException {
if (isClosed) {
return;
}
try {
scanner.close();
} finally {
isClosed = true;
statement.getResultSets().remove(this);
}
}
@Override
public void deleteRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int findColumn(String columnLabel) throws SQLException {
Integer index = rowProjector.getColumnIndex(columnLabel);
return index + 1;
}
@Override
public boolean first() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Array getArray(int columnIndex) throws SQLException {
checkCursorState();
// Get the value using the expected type instead of trying to coerce to VARCHAR.
// We can't coerce using our formatter because we don't have enough context in PDataType.
ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
Array value = (Array)projector.getValue(currentRow, projector.getExpression().getDataType(), ptr);
wasNull = (value == null);
return value;
}
@Override
public Array getArray(String columnLabel) throws SQLException {
return getArray(findColumn(columnLabel));
}
@Override
public InputStream getAsciiStream(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public InputStream getAsciiStream(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
private void checkOpen() throws SQLException {
if (isClosed) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.RESULTSET_CLOSED).build().buildException();
}
}
private void checkCursorState() throws SQLException {
checkOpen();
if (currentRow == BEFORE_FIRST) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CURSOR_BEFORE_FIRST_ROW).build().buildException();
}else if (currentRow == null) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CURSOR_PAST_LAST_ROW).build().buildException();
}
}
@Override
public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
checkCursorState();
BigDecimal value = (BigDecimal)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PDecimal.INSTANCE, ptr);
wasNull = (value == null);
return value;
}
@Override
public BigDecimal getBigDecimal(String columnLabel) throws SQLException {
return getBigDecimal(findColumn(columnLabel));
}
@Override
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
BigDecimal value = getBigDecimal(columnIndex);
return value.setScale(scale);
}
@Override
public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
return getBigDecimal(findColumn(columnLabel), scale);
}
@Override
public InputStream getBinaryStream(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public InputStream getBinaryStream(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Blob getBlob(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Blob getBlob(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean getBoolean(int columnIndex) throws SQLException {
checkCursorState();
ColumnProjector colProjector = rowProjector.getColumnProjector(columnIndex-1);
PDataType type = colProjector.getExpression().getDataType();
Object value = colProjector.getValue(currentRow, type, ptr);
wasNull = (value == null);
if (value == null) {
return false;
}
if (type == PBoolean.INSTANCE) {
return Boolean.TRUE.equals(value);
} else if (type == PVarchar.INSTANCE) {
return !STRING_FALSE.equals(value);
} else if (type == PInteger.INSTANCE) {
return !INTEGER_FALSE.equals(value);
} else if (type == PDecimal.INSTANCE) {
return !BIG_DECIMAL_FALSE.equals(value);
} else {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_CALL_METHOD_ON_TYPE)
.setMessage("Method: getBoolean; Type:" + type).build().buildException();
}
}
@Override
public boolean getBoolean(String columnLabel) throws SQLException {
return getBoolean(findColumn(columnLabel));
}
@Override
public byte[] getBytes(int columnIndex) throws SQLException {
checkCursorState();
byte[] value = (byte[])rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PVarbinary.INSTANCE, ptr);
wasNull = (value == null);
return value;
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
return getBytes(findColumn(columnLabel));
}
@Override
public byte getByte(int columnIndex) throws SQLException {
// throw new SQLFeatureNotSupportedException();
checkCursorState();
Byte value = (Byte)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PTinyint.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public byte getByte(String columnLabel) throws SQLException {
return getByte(findColumn(columnLabel));
}
@Override
public Reader getCharacterStream(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Reader getCharacterStream(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Clob getClob(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Clob getClob(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getConcurrency() throws SQLException {
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public String getCursorName() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Date getDate(int columnIndex) throws SQLException {
checkCursorState();
Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PDate.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return null;
}
return value;
}
@Override
public Date getDate(String columnLabel) throws SQLException {
return getDate(findColumn(columnLabel));
}
@Override
public Date getDate(int columnIndex, Calendar cal) throws SQLException {
checkCursorState();
Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PDate.INSTANCE, ptr);
cal.setTime(value);
return new Date(cal.getTimeInMillis());
}
@Override
public Date getDate(String columnLabel, Calendar cal) throws SQLException {
return getDate(findColumn(columnLabel), cal);
}
@Override
public double getDouble(int columnIndex) throws SQLException {
checkCursorState();
Double value = (Double)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PDouble.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public double getDouble(String columnLabel) throws SQLException {
return getDouble(findColumn(columnLabel));
}
@Override
public int getFetchDirection() throws SQLException {
return ResultSet.FETCH_FORWARD;
}
@Override
public int getFetchSize() throws SQLException {
return statement.getFetchSize();
}
@Override
public float getFloat(int columnIndex) throws SQLException {
checkCursorState();
Float value = (Float)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PFloat.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public float getFloat(String columnLabel) throws SQLException {
return getFloat(findColumn(columnLabel));
}
@Override
public int getHoldability() throws SQLException {
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public int getInt(int columnIndex) throws SQLException {
checkCursorState();
Integer value = (Integer)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PInteger.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public int getInt(String columnLabel) throws SQLException {
return getInt(findColumn(columnLabel));
}
@Override
public long getLong(int columnIndex) throws SQLException {
checkCursorState();
Long value = (Long)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PLong.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public long getLong(String columnLabel) throws SQLException {
return getLong(findColumn(columnLabel));
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return new PhoenixResultSetMetaData(statement.getConnection(), rowProjector);
}
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public NClob getNClob(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public NClob getNClob(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getNString(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getNString(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Object getObject(int columnIndex) throws SQLException {
checkCursorState();
ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
Object value = projector.getValue(currentRow, projector.getExpression().getDataType(), ptr);
wasNull = (value == null);
return value;
}
@Override
public Object getObject(String columnLabel) throws SQLException {
return getObject(findColumn(columnLabel));
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException {
return getObject(columnIndex); // Just ignore map since we only support built-in types
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException {
return getObject(findColumn(columnLabel), map);
}
@Override
public Ref getRef(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public Ref getRef(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public RowId getRowId(int columnIndex) throws SQLException {
// TODO: support?
throw new SQLFeatureNotSupportedException();
}
@Override
public RowId getRowId(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public short getShort(int columnIndex) throws SQLException {
checkCursorState();
Short value = (Short)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PSmallint.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return 0;
}
return value;
}
@Override
public short getShort(String columnLabel) throws SQLException {
return getShort(findColumn(columnLabel));
}
@Override
public PhoenixStatement getStatement() throws SQLException {
return statement;
}
@Override
public String getString(int columnIndex) throws SQLException {
checkCursorState();
// Get the value using the expected type instead of trying to coerce to VARCHAR.
// We can't coerce using our formatter because we don't have enough context in PDataType.
ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
PDataType type = projector.getExpression().getDataType();
Object value = projector.getValue(currentRow,type, ptr);
if (wasNull = (value == null)) {
return null;
}
// Run Object through formatter to get String.
// This provides a simple way of getting a reasonable string representation
// for types like DATE and TIME
Format formatter = statement.getFormatter(type);
return formatter == null ? value.toString() : formatter.format(value);
}
@Override
public String getString(String columnLabel) throws SQLException {
return getString(findColumn(columnLabel));
}
@Override
public Time getTime(int columnIndex) throws SQLException {
checkCursorState();
Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PTime.INSTANCE, ptr);
wasNull = (value == null);
return value;
}
@Override
public Time getTime(String columnLabel) throws SQLException {
return getTime(findColumn(columnLabel));
}
@Override
public Time getTime(int columnIndex, Calendar cal) throws SQLException {
checkCursorState();
Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PTime.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return null;
}
cal.setTime(value);
value.setTime(cal.getTimeInMillis());
return value;
}
@Override
public Time getTime(String columnLabel, Calendar cal) throws SQLException {
return getTime(findColumn(columnLabel),cal);
}
@Override
public Timestamp getTimestamp(int columnIndex) throws SQLException {
checkCursorState();
Timestamp value = (Timestamp)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
PTimestamp.INSTANCE, ptr);
wasNull = (value == null);
return value;
}
@Override
public Timestamp getTimestamp(String columnLabel) throws SQLException {
return getTimestamp(findColumn(columnLabel));
}
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
return getTimestamp(columnIndex);
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException {
return getTimestamp(findColumn(columnLabel),cal);
}
@Override
public int getType() throws SQLException {
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public URL getURL(int columnIndex) throws SQLException {
checkCursorState();
String value = (String)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PVarchar.INSTANCE, ptr);
wasNull = (value == null);
if (value == null) {
return null;
}
try {
return new URL(value);
} catch (MalformedURLException e) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.MALFORMED_URL).setRootCause(e).build().buildException();
}
}
@Override
public URL getURL(String columnLabel) throws SQLException {
return getURL(findColumn(columnLabel));
}
@Override
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return null;
}
@Override
public void insertRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isAfterLast() throws SQLException {
return currentRow == null;
}
@Override
public boolean isBeforeFirst() throws SQLException {
return currentRow == BEFORE_FIRST;
}
@Override
public boolean isClosed() throws SQLException {
return isClosed;
}
@Override
public boolean isFirst() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isLast() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean last() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void moveToCurrentRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void moveToInsertRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
public Tuple getCurrentRow() {
return currentRow;
}
@Override
public boolean next() throws SQLException {
checkOpen();
try {
currentRow = scanner.next();
rowProjector.reset();
} catch (RuntimeException e) {
// FIXME: Expression.evaluate does not throw SQLException
// so this will unwrap throws from that.
if (e.getCause() instanceof SQLException) {
throw (SQLException) e.getCause();
}
throw e;
}
return currentRow != null;
}
@Override
public boolean previous() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void refreshRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean relative(int rows) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean rowDeleted() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean rowInserted() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean rowUpdated() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
if (direction != ResultSet.FETCH_FORWARD) {
throw new SQLFeatureNotSupportedException();
}
}
@Override
public void setFetchSize(int rows) throws SQLException {
LOG.warn("Ignoring setFetchSize(" + rows + ")");
}
@Override
public void updateArray(int columnIndex, Array x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateArray(String columnLabel, Array x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(int columnIndex, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(String columnLabel, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBoolean(String columnLabel, boolean x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateByte(String columnLabel, byte x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateBytes(String columnLabel, byte[] x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(int columnIndex, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(String columnLabel, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateDate(int columnIndex, Date x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateDate(String columnLabel, Date x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateDouble(String columnLabel, double x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateFloat(String columnLabel, float x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateInt(String columnLabel, int x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateLong(String columnLabel, long x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNull(int columnIndex) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateNull(String columnLabel) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateRef(int columnIndex, Ref x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateRef(String columnLabel, Ref x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateRow() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateShort(String columnLabel, short x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateString(String columnLabel, String x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateTime(int columnIndex, Time x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateTime(String columnLabel, Time x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean wasNull() throws SQLException {
return wasNull;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface.isInstance(this);
}
@SuppressWarnings("unchecked")
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
if (!iface.isInstance(this)) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CLASS_NOT_UNWRAPPABLE)
.setMessage(this.getClass().getName() + " not unwrappable from " + iface.getName())
.build().buildException();
}
return (T)this;
}
@SuppressWarnings("unchecked")
@Override
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException {
return (T) getObject(columnIndex); // Just ignore type since we only support built-in types
}
@SuppressWarnings("unchecked")
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
return (T) getObject(columnLabel); // Just ignore type since we only support built-in types
}
@VisibleForTesting
public ResultIterator getUnderlyingIterator() {
return scanner;
}
}
| |
package net.floodlightcontroller.qos;
/**
* Copyright 2012 Marist College, New York
* Author Ryan Wallner (ryan.wallner1@marist.edu)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* Provides Queuing and L2/L3 Quality of Service Policies to a
* virtualized network using DiffServ/ToS class based model, and queuing techniques.
* This module provides overlapping flowspace for policies that governed by their priority.
* This QoS modules acts in a pro-active manner having to abide by existing "Policies"
* within a network.
*
**/
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFPort;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.action.OFActionEnqueue;
import org.openflow.protocol.action.OFActionNetworkTypeOfService;
import org.openflow.protocol.action.OFActionType;
import org.openflow.util.HexString;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.staticflowentry.IStaticFlowEntryPusherService;
import net.floodlightcontroller.storage.IResultSet;
import net.floodlightcontroller.storage.IStorageSourceService;
import net.floodlightcontroller.storage.StorageException;
import net.floodlightcontroller.qos.QoSPolicy;
import net.floodlightcontroller.qos.QoSTypeOfService;
import net.floodlightcontroller.core.IFloodlightProviderService;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QoS implements IQoSService, IFloodlightModule,
IOFMessageListener {
protected IFloodlightProviderService floodlightProvider;
protected IStaticFlowEntryPusherService flowPusher;
protected List<QoSPolicy> policies; //Synchronized
protected List<QoSTypeOfService> services; //Synchronized
protected IRestApiService restApi;
protected FloodlightContext cntx;
protected IStorageSourceService storageSource;
protected Properties props = new Properties();
protected String[] tools;
protected static Logger logger;
protected boolean enabled;
//regex for dpid string, this can/needs to be more elegant. Maybe use of a Matcher
protected String dpidPattern = "^[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]$";
public static final String TABLE_NAME = "controller_qos";
public static final String COLUMN_POLID = "policyid";
public static final String COLUMN_NAME = "name";
public static final String COLUMN_MATCH_PROTOCOL = "protocol";
public static final String COLUMN_MATCH_ETHTYPE = "eth-type";
public static final String COLUMN_MATCH_INGRESSPRT = "ingressport";
public static final String COLUMN_MATCH_IPDST = "ipdst";
public static final String COLUMN_MATCH_IPSRC = "ipsrc";
public static final String COLUMN_MATCH_VLANID = "vlanid";
public static final String COLUMN_MATCH_ETHSRC = "ethsrc";
public static final String COLUMN_MATCH_ETHDST = "ethdst";
public static final String COLUMN_MATCH_TCPUDP_SRCPRT = "tcpudpsrcport";
public static final String COLUMN_MATCH_TCPUDP_DSTPRT = "tcpudpdstport";
public static final String COLUMN_NW_TOS = "nw_tos";
public static final String COLUMN_SW = "switches";
public static final String COLUMN_QUEUE = "queue";
public static final String COLUMN_ENQPORT = "equeueport";
public static final String COLUMN_PRIORITY = "priority";
public static final String COLUMN_SERVICE = "service";
public static String ColumnNames[] = { COLUMN_POLID,
COLUMN_NAME,COLUMN_MATCH_PROTOCOL, COLUMN_MATCH_ETHTYPE,COLUMN_MATCH_INGRESSPRT,
COLUMN_MATCH_IPDST,COLUMN_MATCH_IPSRC,COLUMN_MATCH_VLANID,
COLUMN_MATCH_ETHSRC,COLUMN_MATCH_ETHDST,COLUMN_MATCH_TCPUDP_SRCPRT,
COLUMN_MATCH_TCPUDP_DSTPRT,COLUMN_NW_TOS,COLUMN_SW,
COLUMN_QUEUE,COLUMN_ENQPORT,COLUMN_PRIORITY,COLUMN_SERVICE,};
public static final String TOS_TABLE_NAME = "controller_qos_tos";
public static final String COLUMN_SID = "serviceid";
public static final String COLUMN_SNAME = "servicename";
public static final String COLUMN_TOSBITS = "tosbits";
public static String TOSColumnNames[] = {COLUMN_SID,
COLUMN_SNAME,
COLUMN_TOSBITS};
@Override
public String getName() {
return "qos";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
return false;
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
return false;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IQoSService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>,
IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>,
IFloodlightService>();
// We are the class that implements the service
m.put(IQoSService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
//This module should depend on FloodlightProviderService,
// IStorageSourceProviderService, IRestApiService, &
// IStaticFlowEntryPusherService
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(IStorageSourceService.class);
l.add(IRestApiService.class);
l.add(IStaticFlowEntryPusherService.class);
return l;
}
/**
* Reads the policies from the storage and creates a sorted
* ArrayList of QoSPolicy's from them.
* @return the sorted ArrayList of Policy instances (rules from storage)
*
* Based on work from below, Credit to
* @author Amer Tahir
* @edited KC Wang
* @author (re-authored) Ryan Wallner
*/
protected ArrayList<QoSPolicy> readPoliciesFromStorage() {
ArrayList<QoSPolicy> l = new ArrayList<QoSPolicy>();
try{
Map<String, Object> row;
IResultSet policySet = storageSource
.executeQuery(TABLE_NAME, ColumnNames, null, null );
for( Iterator<IResultSet> iter = policySet.iterator(); iter.hasNext();){
row = iter.next().getRow();
QoSPolicy p = new QoSPolicy();
if(!row.containsKey(COLUMN_POLID)
|| !row.containsKey(COLUMN_SW)
|| !row.containsKey(COLUMN_QUEUE)
|| !row.containsKey(COLUMN_ENQPORT)
|| !row.containsKey(COLUMN_SERVICE)){
logger.error("Skipping entry with required fields {}", row);
continue;
}
try{
p.policyid = Integer.parseInt((String) row.get(COLUMN_POLID));
p.queue = Short.parseShort((String) row.get(COLUMN_QUEUE));
p.enqueueport = Short.parseShort((String) row.get(COLUMN_ENQPORT));
p.service = (String) row.get(COLUMN_SERVICE);
//TODO change for String[] of switches
p.sw = (String) row.get(COLUMN_SW);
for(String key: row.keySet()){
if(row.get(key) == null){
continue;
}
else if(key.equals(COLUMN_POLID)
|| key.equals(COLUMN_SW)
|| key.equals(COLUMN_QUEUE)
|| key.equals(COLUMN_ENQPORT)
|| key.equals(COLUMN_SERVICE)){
continue;
}
else if(key.equals(COLUMN_NAME)){
p.name = (String) row.get(COLUMN_NAME);
}
else if(key.equals(COLUMN_MATCH_ETHDST)){
p.ethdst = (String) row.get(COLUMN_MATCH_ETHDST);
}
else if(key.equals(COLUMN_MATCH_ETHSRC)){
p.ethsrc = (String) row.get(COLUMN_MATCH_ETHSRC);
}
else if(key.equals(COLUMN_MATCH_ETHTYPE)){
p.ethtype = Short.parseShort((String)
row.get(COLUMN_MATCH_ETHTYPE));
}
else if(key.equals(COLUMN_MATCH_INGRESSPRT)){
p.ingressport = Short.parseShort((String)
row.get(COLUMN_MATCH_INGRESSPRT));
}
else if(key.equals(COLUMN_MATCH_IPDST)){
p.ipdst = Integer.parseInt((String)
row.get(COLUMN_MATCH_IPDST));
}
else if(key.equals(COLUMN_MATCH_IPSRC)){
p.ipsrc = Integer.parseInt((String)
row.get(COLUMN_MATCH_IPSRC));
}
else if(key.equals(COLUMN_MATCH_PROTOCOL)){
p.protocol = Byte.parseByte((String)
row.get(COLUMN_MATCH_PROTOCOL));
}
else if(key.equals(COLUMN_MATCH_TCPUDP_DSTPRT)){
p.tcpudpdstport = Short.parseShort((String)
row.get(COLUMN_MATCH_TCPUDP_DSTPRT));
}
else if(key.equals(COLUMN_MATCH_TCPUDP_SRCPRT)){
p.tcpudpsrcport = Short.parseShort((String)
row.get(COLUMN_MATCH_TCPUDP_SRCPRT));
}
else if(key.equals(COLUMN_MATCH_VLANID)){
p.vlanid = Short.parseShort((String)
row.get(COLUMN_MATCH_VLANID));
}
else if(key.equals(COLUMN_NW_TOS)){
p.tos = Byte.parseByte((String)
row.get(COLUMN_NW_TOS));
}
else if(key.equals(COLUMN_PRIORITY)){
p.priority = Short.parseShort((String)
row.get(COLUMN_PRIORITY));
}
}
}catch(ClassCastException e){
logger.error("Error, Skipping rule, Bad Data "
+ e.getMessage()+" on Rule {}", p.policyid);
}
//make sure its a queueing rule or service rule only.
if(p.enqueueport != -1 && p.queue != -1 && p.service != null){
l.add(p);
}
else if(p.enqueueport > -1 && p.queue > -1 && p.service == null){
l.add(p);
}
else{
continue;//not a valid rule
}
}
}catch(StorageException e){
logger.error("Error with storage source: {}", e);
}
Collections.sort(l);
return l;
}
/**
* Reads the types of services from the storage and creates a
* sorted ArrayList of QoSTypeOfService from them
* @return the sorted ArrayList of Type of Service instances (rules from storage)
*
* Based on work from below, Credit to
* @author Amer Tahir
* @edited KC Wang
* @author (re-authored) Ryan Wallner
*/
protected ArrayList<QoSTypeOfService> readServicesFromStorage() {
ArrayList<QoSTypeOfService> l = new ArrayList<QoSTypeOfService>();
try{
Map<String, Object> row;
IResultSet serviceSet = storageSource
.executeQuery(TOS_TABLE_NAME, TOSColumnNames, null, null );
for( Iterator<IResultSet> iter = serviceSet.iterator(); iter.hasNext();){
row = iter.next().getRow();
QoSTypeOfService s = new QoSTypeOfService();
if(!row.containsKey(COLUMN_SID)
|| !row.containsKey(COLUMN_TOSBITS)){
logger.error("Skipping entry with required fields {}", row);
continue;
}
try{
s.sid = Integer.parseInt((String) row.get(COLUMN_SID));
s.tos = Byte.parseByte((String) row.get(COLUMN_TOSBITS));
for(String key: row.keySet()){
if(row.get(key) == null){
continue;
}
else if(key.equals(COLUMN_SID)
|| key.equals(COLUMN_TOSBITS)){
continue;
}
else if(key.equals(COLUMN_SNAME)){
s.name = (String) row.get(COLUMN_SNAME);
}
}
}catch(ClassCastException e){
logger.error("Error, Skipping rule, Bad Data "
+ e.getMessage()+" on Rule {}", s.sid);
}
if(s.tos != -1){
l.add(s);
}
}
}catch(StorageException e){
logger.error("Error with storage source: {}", e);
}
Collections.sort(l);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
//initiate services
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
flowPusher = context.getServiceImpl(IStaticFlowEntryPusherService.class);
logger = LoggerFactory.getLogger(QoS.class);
storageSource = context.getServiceImpl(IStorageSourceService.class);
restApi = context.getServiceImpl(IRestApiService.class);
policies = new ArrayList<QoSPolicy>();
services = new ArrayList<QoSTypeOfService>();
logger = LoggerFactory.getLogger(QoS.class);
// start disabled
// can be overridden by tools.properties.
enabled = false;
try {
//load a properties file
props.load(new FileInputStream("src/main/resources/tools.properties"));
tools = props.getProperty("tools").split(",");
System.out.println(props.getProperty("qos"));
if(props.getProperty("qos").equalsIgnoreCase("enabled")){
logger.info("Enabling QoS on Start-up. Edit tools.properties to change this.");
this.enableQoS(true);
}
}catch (FileNotFoundException e) {
e.printStackTrace();
} catch(IOException e) {
e.printStackTrace();
}
}
@Override
public void startUp(FloodlightModuleContext context) {
// initialize REST interface
restApi.addRestletRoutable(new QoSWebRoutable());
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
//Storage for policies
storageSource.createTable(TABLE_NAME, null);
storageSource.setTablePrimaryKeyName(TABLE_NAME, COLUMN_POLID);
//avoid thread issues for concurrency
synchronized (policies) {
this.policies = readPoliciesFromStorage();
}
//Storage for services
storageSource.createTable(TOS_TABLE_NAME, null);
storageSource.setTablePrimaryKeyName(TOS_TABLE_NAME, COLUMN_SID);
//avoid thread issues for concurrency
synchronized (services) {
this.services = readServicesFromStorage();
}
// create default "Best Effort" service
// most networks use this as default, adding here for defaulting
try{
QoSTypeOfService service = new QoSTypeOfService();
service.name = "Best Effort";
service.tos = (byte)0x00;
service.sid = service.genID();
this.addService(service);
}catch(Exception e){
logger.error("Error adding default Best Effort {}", e);
}
}
@Override
public net.floodlightcontroller.core.IListener.Command receive(
IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
//do not process packet if not enabled
if (!this.enabled) {
return Command.CONTINUE;
}
//logger.debug("Message Recieved: Type - {}",msg.getType().toString());
//Listen for Packets that match Policies
switch (msg.getType()) {
case PACKET_IN:
//logger.debug("PACKET_IN recieved");
byte[] packetData = OFMessage.getData(sw, msg, cntx);
//Temporary match from packet to compare
OFMatch tmpMatch = new OFMatch();
tmpMatch.loadFromPacket(packetData, OFPort.OFPP_NONE.getValue());
checkIfQoSApplied(tmpMatch);
break;
default:
return Command.CONTINUE;
}
return Command.CONTINUE;
}
/**
* Allow access to enable module
* @param boolean
*/
@Override
public void enableQoS(boolean enable) {
logger.info("Setting QoS to {}", enable);
this.enabled = enable;
}
/**
* Return whether of not the module is enabled
*/
@Override
public boolean isEnabled(){
return this.enabled;
}
/**
* Return a List of Quality of Service Policies
*/
@Override
public List<QoSPolicy> getPolicies() {
return this.policies;
}
/**
* Returns a list of services available for Network Type of Service
* @return List
*/
@Override
public List<QoSTypeOfService> getServices() {
return this.services;
}
/**
* Add a service class to use in policies
* Used to make ToS/DiffServ Bits human readable.
* Bit notation 000000 becomes "Best Effort"
* @param QoSTypeOfService
*/
@Override
public synchronized void addService(QoSTypeOfService service) {
//debug
logger.debug("Adding Service to List and Storage");
//create the UID
service.sid = service.genID();
//check tos bits are within bounds
if (service.tos >= (byte)0x00 && service.tos <= (byte)0x3F ){
try{
//Add to the list of services
//un-ordered, naturally a short list
this.services.add(service);
//add to the storage source
Map<String, Object> serviceEntry = new HashMap<String,Object>();
serviceEntry.put(COLUMN_SID, Integer.toString(service.sid));
serviceEntry.put(COLUMN_SNAME, service.name);
serviceEntry.put(COLUMN_TOSBITS, Byte.toString(service.tos));
//ad to storage
storageSource.insertRow(TOS_TABLE_NAME, serviceEntry);
}catch(Exception e){
logger.debug("Error adding service, error: {}" ,e);
}
}
else{
logger.debug("Type of Service must be 0-64");
}
}
/**
* Removes a Network Type of Service
* @category by sid
*/
@Override
public synchronized void deleteService(int sid) {
Iterator<QoSTypeOfService> sIter = this.services.iterator();
while(sIter.hasNext()){
QoSTypeOfService s = sIter.next();
if(s.sid == sid){
sIter.remove();
break; //done only one can exist
}
}
}
/** Adds a policy
* @author wallnerryan
* @overloaded
**/
@Override
public synchronized void addPolicy(QoSPolicy policy){
//debug
logger.debug("Adding Policy to List and Storage");
//create the UID
policy.policyid = policy.genID();
int p = 0;
for (p = 0; p < this.policies.size(); p++){
//check if empy
if(this.policies.isEmpty()){
//p is zero
break;
}
//starts at the first(lowest) policy based on priority
//insertion sort, gets hairy when n # of switches increases.
//larger networks may need a merge sort.
if(this.policies.get(p).priority >= policy.priority){
//this keeps "p" in the correct position to place new policy in
break;
}
}
if (p <= this.policies.size()) {
this.policies.add(p, policy);
}
else {
this.policies.add(policy);
}
//Add to the storageSource
Map<String, Object> policyEntry = new HashMap<String, Object>();
policyEntry.put(COLUMN_POLID, Long.toString(policy.policyid));
policyEntry.put(COLUMN_NAME, policy.name);
policyEntry.put(COLUMN_MATCH_PROTOCOL, Short.toString(policy.protocol));
policyEntry.put(COLUMN_MATCH_ETHTYPE, Short.toString(policy.ethtype));
policyEntry.put(COLUMN_MATCH_INGRESSPRT, Short.toString(policy.ingressport));
policyEntry.put(COLUMN_MATCH_IPSRC, Integer.toString(policy.ipsrc));
policyEntry.put(COLUMN_MATCH_IPDST, Integer.toBinaryString(policy.ipdst));
policyEntry.put(COLUMN_MATCH_VLANID, Short.toString(policy.vlanid));
policyEntry.put(COLUMN_MATCH_ETHSRC, policy.ethsrc);
policyEntry.put(COLUMN_MATCH_ETHDST, policy.ethdst);
policyEntry.put(COLUMN_MATCH_TCPUDP_SRCPRT, Short.toString(policy.tcpudpsrcport));
policyEntry.put(COLUMN_MATCH_TCPUDP_DSTPRT, Short.toString(policy.tcpudpdstport));
policyEntry.put(COLUMN_NW_TOS, policy.service);
policyEntry.put(COLUMN_SW, policy.sw);
policyEntry.put(COLUMN_QUEUE, Short.toString(policy.queue));
policyEntry.put(COLUMN_ENQPORT, Short.toString(policy.enqueueport));
policyEntry.put(COLUMN_PRIORITY, Short.toString(policy.priority));
policyEntry.put(COLUMN_SERVICE, policy.service);
storageSource.insertRow(TABLE_NAME, policyEntry);
/**
* TODO Morph this to use a String[] of switches
**/
if (policy.sw.equals("all")){
logger.debug("Adding Policy {} to Entire Network", policy.toString());
addPolicyToNetwork(policy);
}
/** [NOTE] Note utilized yet, future revision used to "save" policies
* to the controller, then modified to be added to switched **/
else if (policy.sw.equals("none")){
logger.debug("Adding Policy {} to Controller", policy.toString());
}
//add to a specified switch b/c "all" not specified
else if(policy.sw.matches(dpidPattern)){
logger.debug("Adding policy {} to Switch {}", policy.toString(), policy.sw);
// add appropriate hex string converted to a long type
addPolicy(policy, policy.sw);
}
else{
logger.error("***Policy {} error at switch input {} ***" +
"", policy.toString(), policy.sw);
}
}
/**
* Add a policy-flowMod to all switches in network
* @param policy
*/
@Override
public void addPolicyToNetwork(QoSPolicy policy) {
OFFlowMod flow = policyToFlowMod(policy);
logger.info("adding policy-flow {} to all switches",flow.toString());
//add to all switches
Map<Long, IOFSwitch> switches = floodlightProvider.getSwitches();
//simple check
if(!(switches.isEmpty())){
for(IOFSwitch sw : switches.values()){
if(!(sw.isConnected())){
break;// cannot add
}
logger.info("Add flow Name: {} Flow: {} Switch "+ sw.getStringId(),
policy.name, flow.toString());
//add unique flow names based on dpid hasCode :)
flowPusher.addFlow(policy.name+Integer
.toString(sw.getStringId()
.hashCode()), flow, sw.getStringId());
}
}
}
/**
* Adds a policy to a switch (dpid)
* @param QoSPolicy policy
* @param String sw
*/
//This will change to list sws[]
//of switches, including a single sw
@Override
public void addPolicy(QoSPolicy policy, String swid) {
//get the flowmod
OFFlowMod flow = policyToFlowMod(policy);
logger.info("Adding policy-flow {} to switch {}",flow.toString(),swid);
//add unique flow names based on dpid hasCode :)
flowPusher.addFlow(policy.name+Integer
.toString(swid.hashCode()), flow, swid);
}
/**
* Removes a policy from entire network
* @param policy
*/
@Override
public void deletePolicyFromNetwork(String policyName) {
//all switches
Map<Long, IOFSwitch> switches = floodlightProvider.getSwitches();
//simple check
if(!(switches.isEmpty())){
for(IOFSwitch sw : switches.values()){
if(!(sw.isConnected())){
break;// cannot add
}
logger.debug("{} has {}",sw.getStringId(),flowPusher.getFlows());
flowPusher.deleteFlow(policyName+Integer
.toString(sw.getStringId().hashCode()));
}
}
}
/** Deletes a policy
* Called by @DELETE from REST API
* @author wallnerryan
* @overloaded
* @param by policyid
**/
@Override
public synchronized void deletePolicy(QoSPolicy policy){
logger.info("Deleting policy {} attached to switches: {}", policy.name, policy.sw);
//dont have to catch policy.sw == "non" just delete it
if(policy.sw.equalsIgnoreCase("none")){logger.info("policy match no switches, removeing from storage");}
else if(policy.sw.equalsIgnoreCase("all")){
logger.info("Delete flows from network!");
deletePolicyFromNetwork(policy.name);
}
else if(policy.sw.matches(dpidPattern)){
deletePolicy(policy.sw, policy.name);
}
else{
logger.error("Error!, Unrecognized switches! Switch is : {}",policy.sw);
}
//remove from storage
Iterator<QoSPolicy> sIter = this.policies.iterator();
while(sIter.hasNext()){
QoSPolicy p = sIter.next();
if(p.policyid == policy.policyid){
sIter.remove();
break; //done only one can exist
}
}
}
/**
* Delete policy from a switch (dpid)
* @param policyid
* @param sw
* @throws
*/
//This will change to list sws[]
//of switches, including a single sw
@Override
public void deletePolicy(String switches, String policyName){
//TODO Morph this to use a String[] of switches
IOFSwitch sw = floodlightProvider.getSwitches()
.get(HexString.toLong(switches));
if(sw != null){
assert(sw.isActive());
}
// delete flow based on hasCode
flowPusher.deleteFlow(policyName+sw.getStringId().hashCode());
}
/**
* Returns a flowmod from a policy
* @param policy
* @return
*/
public OFFlowMod policyToFlowMod(QoSPolicy policy){
//initialize a match structure that matches everything
OFMatch match = new OFMatch();
//Based on the policy match appropriately.
//no wildcards
match.setWildcards(0);
if(policy.ethtype != -1){
match.setDataLayerType((policy.ethtype));
//logger.debug("setting match on eth-type");
}
if(policy.protocol != -1){
match.setNetworkProtocol(policy.protocol);
//logger.debug("setting match on protocol ");
}
if(policy.ingressport != -1){
match.setInputPort(policy.ingressport);
//logger.debug("setting match on ingress port ");
}
if(policy.ipdst != -1){
match.setNetworkDestination(policy.ipdst);
//logger.debug("setting match on network destination");
}
if(policy.ipsrc != -1){
match.setNetworkSource(policy.ipsrc);
//logger.debug("setting match on network source");
}
if(policy.vlanid != -1){
match.setDataLayerVirtualLan(policy.vlanid);
//logger.debug("setting match on VLAN");
}
if(policy.tos != -1){
match.setNetworkTypeOfService(policy.tos);
//logger.debug("setting match on ToS");
}
if(policy.ethsrc != null){
match.setDataLayerSource(policy.ethsrc);
//logger.debug("setting match on data layer source");
}
if(policy.ethdst != null){
match.setDataLayerDestination(policy.ethdst);
//logger.debug("setting match on data layer destination");
}
if(policy.tcpudpsrcport != -1){
match.setTransportSource(policy.tcpudpsrcport);
//logger.debug("setting match on transport source port");
}
if(policy.tcpudpdstport != -1){
match.setTransportDestination(policy.tcpudpdstport);
//logger.debug("setting match on transport destination");
}
//Create a flow mod using the previous match structure
OFFlowMod fm = new OFFlowMod();
fm.setType(OFType.FLOW_MOD);
//depending on the policy nw_tos or queue the flow mod
// will change the type of service bits or enqueue the packets
if(policy.queue > -1 && policy.service == null){
logger.info("This policy is a queuing policy");
List<OFAction> actions = new ArrayList<OFAction>();
//add the queuing action
OFActionEnqueue enqueue = new OFActionEnqueue();
enqueue.setLength((short) 0xffff);
enqueue.setType(OFActionType.OPAQUE_ENQUEUE); // I think this happens anyway in the constructor
enqueue.setPort(policy.enqueueport);
enqueue.setQueueId(policy.queue);
actions.add((OFAction) enqueue);
logger.info("Match is : {}", match.toString());
//add the matches and actions and return
fm.setMatch(match)
.setActions(actions)
.setIdleTimeout((short) 0) // infinite
.setHardTimeout((short) 0) // infinite
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setFlags((short) 0)
.setOutPort(OFPort.OFPP_NONE.getValue())
.setPriority(policy.priority)
.setLengthU((short)OFFlowMod.MINIMUM_LENGTH + OFActionEnqueue.MINIMUM_LENGTH);
}
else if(policy.queue == -1 && policy.service != null){
logger.info("This policy is a type of service policy");
List<OFAction> actions = new ArrayList<OFAction>();
//add the queuing action
OFActionNetworkTypeOfService tosAction = new OFActionNetworkTypeOfService();
tosAction.setType(OFActionType.SET_NW_TOS);
tosAction.setLength((short) 0xffff);
//Find the appropriate type of service bits in policy
Byte pTos = null;
List<QoSTypeOfService> serviceList = this.getServices();
for(QoSTypeOfService s : serviceList){
if(s.name.equals(policy.service)){
//policy's service ToS bits
pTos = s.tos;
}
}
tosAction.setNetworkTypeOfService(pTos);
actions.add((OFAction)tosAction);
logger.info("Match is : {}", match.toString());
//add the matches and actions and return.class.ge
fm.setMatch(match)
.setActions(actions)
.setIdleTimeout((short) 0) // infinite
.setHardTimeout((short) 0) // infinite
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setFlags((short) 0)
.setOutPort(OFPort.OFPP_NONE.getValue())
.setPriority(Short.MAX_VALUE)
.setLengthU((short)OFFlowMod.MINIMUM_LENGTH + OFActionNetworkTypeOfService.MINIMUM_LENGTH);
}
else{
logger.error("Policy Misconfiguration");
}
return fm;
}
/**
* Logs which, if any QoS is applied to PACKET_IN
* @param tmpMatch
* @throws IOException
*/
private void checkIfQoSApplied(OFMatch tmpMatch){
List<QoSPolicy> pols = this.getPolicies();
//policies dont apply to wildcards, (yet)
if (!pols.isEmpty()){
for(QoSPolicy policy : pols){
OFMatch m = new OFMatch();
m = policyToFlowMod(policy).getMatch();
//check
if (tmpMatch.equals(m)){
logger.info("PACKET_IN matched, Applied QoS Policy {}",policy.toString());
}
//Commented out checks, annoying in console log. For debug I'll leave though.
//else{
// logger.debug("Pass, no match on PACKET_IN");
//}
}
}//else{logger.info("No Policies to Check Against PACKET_IN");}
}
}
| |
package il.co.codeguru.corewars8086.war;
import il.co.codeguru.corewars8086.cpu.Cpu;
import il.co.codeguru.corewars8086.cpu.CpuException;
import il.co.codeguru.corewars8086.cpu.CpuState;
import il.co.codeguru.corewars8086.memory.MemoryException;
import il.co.codeguru.corewars8086.memory.RealModeAddress;
import il.co.codeguru.corewars8086.memory.RealModeMemory;
import il.co.codeguru.corewars8086.memory.RealModeMemoryRegion;
import il.co.codeguru.corewars8086.memory.RestrictedAccessRealModeMemory;
/**
* A single CoreWars warrior.
*
* @author DL
*/
public class Warrior {
/**
* Constructor.
*
* @param name Warrior's name.
* @param codeSize Warrior's code size.
* @param core Real mode memory used as core.
* @param loadAddress Warrior's load address in the core (initial CS:IP).
* @param initialStack Warrior's private stack in the core (initial SS:SP).
* @param groupSharedMemory Warrior group's shared memroy address (initial ES).
* @param groupSharedMemorySize Warrior group's shared memory size.
*/
public Warrior(
String name,
int codeSize,
RealModeMemory core,
RealModeAddress loadAddress,
RealModeAddress initialStack,
RealModeAddress groupSharedMemory,
short groupSharedMemorySize) {
m_name = name;
m_codeSize = codeSize;
m_loadAddress = loadAddress;
m_state = new CpuState();
initializeCpuState(loadAddress, initialStack, groupSharedMemory);
// initialize read-access regions
RealModeAddress lowestStackAddress =
new RealModeAddress(initialStack.getSegment(), (short)0);
RealModeAddress lowestCoreAddress =
new RealModeAddress(loadAddress.getSegment(), (short)0);
RealModeAddress highestCoreAddress =
new RealModeAddress(loadAddress.getSegment(), (short)-1);
RealModeAddress highestGroupSharedMemoryAddress =
new RealModeAddress(groupSharedMemory.getSegment(),
(short)(groupSharedMemorySize-1));
RealModeMemoryRegion[] readAccessRegions =
new RealModeMemoryRegion[] {
new RealModeMemoryRegion(lowestStackAddress, initialStack),
new RealModeMemoryRegion(lowestCoreAddress, highestCoreAddress),
new RealModeMemoryRegion(groupSharedMemory, highestGroupSharedMemoryAddress)
};
// initialize write-access regions
RealModeMemoryRegion[] writeAccessRegions =
new RealModeMemoryRegion[] {
new RealModeMemoryRegion(lowestStackAddress, initialStack),
new RealModeMemoryRegion(lowestCoreAddress, highestCoreAddress),
new RealModeMemoryRegion(groupSharedMemory, highestGroupSharedMemoryAddress)
};
// initialize execute-access regions
RealModeMemoryRegion[] executeAccessRegions =
new RealModeMemoryRegion[] {
new RealModeMemoryRegion(lowestCoreAddress, highestCoreAddress)
};
m_memory = new RestrictedAccessRealModeMemory(
core, readAccessRegions, writeAccessRegions, executeAccessRegions);
m_cpu = new Cpu(m_state, m_memory);
m_isAlive = true;
}
/**
* @return whether or not the warrior is still alive.
*/
public boolean isAlive() {
return m_isAlive;
}
/**
* Kills the warrior.
*/
public void kill() {
m_isAlive = false;
}
/**
* @return the warrior's name.
*/
public String getName() {
return m_name;
}
/**
* @return the warrior's load offset.
*/
public short getLoadOffset() {
return m_loadAddress.getOffset();
}
/**
* @return the warrior's initial code size.
*/
public int getCodeSize() {
return m_codeSize;
}
/**
* Accessors for the warrior's Energy value (used to calculate
* the warrior's speed).
*/
public short getEnergy() {
return m_state.getEnergy();
}
public void setEnergy(short value) {
m_state.setEnergy(value);
}
/**
* Performs the warrior's next turn (= next opcode).
* @throws CpuException on any CPU error.
* @throws MemoryException on any Memory error.
*/
public void nextOpcode() throws CpuException, MemoryException {
m_cpu.nextOpcode();
}
/**
* Initializes the Cpu registers & flags:
* CS,DS - set to the core's segment.
* ES - set to the group's shared memory segment.
* AX,IP - set to the load address.
* SS - set to the private stack's segment.
* SP - set to the private stack's offset.
* BX,CX,DX,SI,DI,BP, flags - set to zero.
*
* @param loadAddress Warrior's load address in the core.
* @param initialStack Warrior's private stack (initial SS:SP).
* @param groupSharedMemory The warrior's group shared memory.
*/
private void initializeCpuState(
RealModeAddress loadAddress, RealModeAddress initialStack,
RealModeAddress groupSharedMemory) {
// initialize registers
m_state.setAX(loadAddress.getOffset());
m_state.setBX((short)0);
m_state.setCX((short)0);
m_state.setDX((short)0);
m_state.setDS(loadAddress.getSegment());
m_state.setES(groupSharedMemory.getSegment());
m_state.setSI((short)0);
m_state.setDI((short)0);
m_state.setSS(initialStack.getSegment());
m_state.setBP((short)0);
m_state.setSP(initialStack.getOffset());
m_state.setCS(loadAddress.getSegment());
m_state.setIP(loadAddress.getOffset());
m_state.setFlags((short)0);
// initialize Energy
m_state.setEnergy((short)0);
// initialize bombs
m_state.setBomb1Count((byte)2);
m_state.setBomb2Count((byte)1);
}
public CpuState getCpuState(){
return m_state;
}
/** Warrior's name */
private final String m_name;
/** Warrior's initial code size */
private final int m_codeSize;
/** Warrior's initial load address */
private final RealModeAddress m_loadAddress;
/** Current state of registers & flags */
private CpuState m_state;
/** Applies restricted access logic on top of the actual core memory */
private RestrictedAccessRealModeMemory m_memory;
/** CPU instance */
private Cpu m_cpu;
/** Whether or not the warrior is still alive */
private boolean m_isAlive;
}
| |
/**
*
* PixelFlow | Copyright (C) 2017 Thomas Diewald - www.thomasdiewald.com
*
* https://github.com/diwi/PixelFlow.git
*
* A Processing/Java library for high performance GPU-Computing.
* MIT License: https://opensource.org/licenses/MIT
*
*/
package com.thomasdiewald.pixelflow.java.imageprocessing;
import com.jogamp.opengl.GL;
import com.jogamp.opengl.GL2;
import com.thomasdiewald.pixelflow.java.DwPixelFlow;
import com.thomasdiewald.pixelflow.java.dwgl.DwGLSLProgram;
import com.thomasdiewald.pixelflow.java.dwgl.DwGLTexture;
import com.thomasdiewald.pixelflow.java.imageprocessing.filter.DwFilter;
import processing.opengl.PGraphicsOpenGL;
import processing.opengl.Texture;
/**
*
* Builds a gradient image from a given input.<br>
* The gradient is stored in a two-channel float texture.<br>
* <br>
* This class includes some GLSL programs for rendering:
* <ul>
* <li> Velocity Pixel Shader </li>
* <li> Velocity Line Shader </li>
* <li> Line Integral Convolution - LIC </li>
* </ul>
*
* @author Thomas Diewald
*
*/
public class DwFlowField {
public static class Param {
public float line_spacing = 15;
public float line_width = 1.0f;
public float line_scale = 1.5f;
public boolean line_smooth = true;
public int line_mode = 0; // 0 or 1, in velocity direction, or normal to it
public int line_shading = 0; // 0 = col_A/col_B, 1 = velocity
public float[] line_col_A = {1,1,1,1.0f};
public float[] line_col_B = {1,1,1,0.1f};
public int blend_mode = 0; // BLEND=0; ADD=1
public int blur_radius = 2;
public int blur_iterations = 1;
public boolean HIGHP_FLOAT = false; // false=GL2.GL_RG16F, true=GL2.GL_RG32F;
}
public DwPixelFlow context;
public Param param = new Param();
public DwGLSLProgram shader_create;
public DwGLSLProgram shader_display_lines;
public DwGLSLProgram shader_display_pixel;
public DwGLSLProgram shader_display_lic;
public DwGLTexture tex_vel = new DwGLTexture();
public DwGLTexture tex_tmp = new DwGLTexture();
int tex_wrap = GL2.GL_CLAMP_TO_EDGE;
public DwFlowField(DwPixelFlow context){
this.context = context;
context.papplet.registerMethod("dispose", this);
String data_path = DwPixelFlow.SHADER_DIR+"Filter/";
shader_create = context.createShader(data_path+"flowfield_create.frag");
shader_display_pixel = context.createShader(data_path+"flowfield_display_pixel.frag");
shader_display_lines = context.createShader(data_path+"flowfield_display_lines.glsl", data_path+"flowfield_display_lines.glsl");
shader_display_lines.frag.setDefine("SHADER_FRAG", 1);
shader_display_lines.vert.setDefine("SHADER_VERT", 1);
shader_display_lic = context.createShader(data_path+"flowfield_display_line_integral_convolution.frag");
}
public void dispose(){
release();
}
public void release(){
tex_vel.release();
tex_tmp.release();
tex_lic.release();
}
public void reset(){
tex_vel.clear(0);
}
public boolean resize(int w, int h){
int internalFormat = GL2.GL_RG16F;
int byte_per_channel = 2;
if(param.HIGHP_FLOAT){
internalFormat = GL2.GL_RG32F;
byte_per_channel = 4;
}
boolean resized = tex_vel.resize(context, internalFormat, w, h, GL2.GL_RG, GL.GL_FLOAT, GL2.GL_LINEAR, tex_wrap, 2, byte_per_channel);
if(resized){
tex_vel.clear(0);
}
return resized;
}
public void create(PGraphicsOpenGL pg_src){
Texture tex_src = pg_src.getTexture(); if(!tex_src.available()) return;
create(tex_src.glName, tex_src.glWidth, tex_src.glHeight);
}
public void create(DwGLTexture tex_src){
create(tex_src.HANDLE[0], tex_src.w, tex_src.h);
}
public void create(int tex_src, int w_src, int h_src){
context.begin();
resize(w_src, h_src);
int w_dst = tex_vel.w;
int h_dst = tex_vel.h;
context.beginDraw(tex_vel);
shader_create.begin();
shader_create.uniform2f ("wh_rcp" , 1f/w_dst, 1f/h_dst);
shader_create.uniformTexture("tex_src", tex_src);
shader_create.drawFullScreenQuad();
shader_create.end();
context.endDraw("FlowField.create");
blur(param.blur_iterations, param.blur_radius);
context.end();
}
public void blur(){
blur(param.blur_iterations, param.blur_radius);
}
public void blur(int iterations, int radius){
if(!tex_vel.isTexture() || iterations <= 0 || radius <= 0){
return;
}
tex_tmp.resize(context, tex_vel);
tex_tmp.setParamWrap(GL2.GL_MIRRORED_REPEAT);
tex_vel.setParamWrap(GL2.GL_MIRRORED_REPEAT);
for(int i = 0; i < iterations; i++){
DwFilter.get(context).gaussblur.apply(tex_vel, tex_vel, tex_tmp, radius);
}
tex_vel.setParamWrap(tex_wrap);
context.errorCheck("FlowField.blur()");
}
public void displayLines(PGraphicsOpenGL dst){
int w = dst.width;
int h = dst.height;
int lines_x = (int) Math.ceil(w/param.line_spacing);
int lines_y = (int) Math.ceil(h/param.line_spacing);
int num_lines = lines_x * lines_y;
float scale = param.line_scale;
context.begin();
context.beginDraw(dst);
blendMode();
shader_display_lines.vert.setDefine("LINE_MODE" , param.line_mode);
shader_display_lines.vert.setDefine("LINE_SHADING", param.line_shading);
shader_display_lines.begin();
shader_display_lines.uniform4fv ("col_A" , 1, param.line_col_A);
shader_display_lines.uniform4fv ("col_B" , 1, param.line_col_B);
shader_display_lines.uniform2i ("wh_lines" , lines_x, lines_y);
shader_display_lines.uniform2f ("wh_lines_rcp" , 1f/lines_x, 1f/lines_y);
shader_display_lines.uniform1f ("vel_scale" , scale);
shader_display_lines.uniformTexture("tex_velocity" , tex_vel);
shader_display_lines.drawFullScreenLines(num_lines, param.line_width, param.line_smooth);
shader_display_lines.end();
context.endDraw("FlowField.displayLines");
context.end();
}
public void displayPixel(PGraphicsOpenGL dst){
int w = dst.width;
int h = dst.height;
context.begin();
context.beginDraw(dst);
blendMode();
shader_display_pixel.begin();
shader_display_pixel.uniform2f ("wh_rcp" , 1f/w, 1f/h);
shader_display_pixel.uniformTexture("tex_velocity" , tex_vel);
shader_display_pixel.drawFullScreenQuad();
shader_display_pixel.end();
context.endDraw();
context.end("FlowField.displayPixel");
}
protected void blendMode(){
context.gl.glEnable(GL.GL_BLEND);
context.gl.glBlendEquation(GL.GL_FUNC_ADD);
switch(param.blend_mode){
case 0: context.gl.glBlendFuncSeparate(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA, GL.GL_ONE, GL.GL_ONE); break; // BLEND
case 1: context.gl.glBlendFuncSeparate(GL.GL_SRC_ALPHA, GL.GL_ONE , GL.GL_ONE, GL.GL_ONE); break; // ADD
default: context.gl.glBlendFuncSeparate(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA, GL.GL_ONE, GL.GL_ONE); break; // BLEND
}
}
public static class ParamLIC{
public int iterations = 1;
public int num_samples = 25;
public float acc_mult = 1.0f;
public float vel_mult = 1.00f;
public float intensity_mult = 1f;
public float intensity_exp = 1f;
public boolean TRACE_FORWARD = false;
public boolean TRACE_BACKWARD = true;
}
public ParamLIC param_lic = new ParamLIC();
public DwGLTexture.TexturePingPong tex_lic = new DwGLTexture.TexturePingPong();
public void displayLineIntegralConvolution(PGraphicsOpenGL dst, PGraphicsOpenGL src){
resizeLic(dst.width, dst.height);
DwFilter.get(context).copy.apply(src, tex_lic.src);
applyLineIntegralConvolution();
DwFilter.get(context).copy.apply(tex_lic.src, dst);
}
public void displayLineIntegralConvolution(DwGLTexture dst, DwGLTexture src){
resizeLic(dst.w, dst.h);
DwFilter.get(context).copy.apply(src, tex_lic.src);
applyLineIntegralConvolution();
DwFilter.get(context).copy.apply(tex_lic.src, dst);
}
// TODO, custom formats?
private void resizeLic(int w, int h){
tex_lic.resize(context, GL2.GL_RGBA8, w, h, GL2.GL_RGBA, GL2.GL_UNSIGNED_BYTE, GL2.GL_LINEAR, GL2.GL_MIRRORED_REPEAT, 4, 1);
// tex_lic.resize(context, GL2.GL_RGBA16F, w, h, GL2.GL_RGBA, GL2.GL_FLOAT, GL2.GL_LINEAR, GL2.GL_MIRRORED_REPEAT, 4, 2);
}
private void applyLineIntegralConvolution(){
if(!param_lic.TRACE_FORWARD && !param_lic.TRACE_BACKWARD){
return;
}
int w_dst = tex_lic.dst.w;
int h_dst = tex_lic.dst.h;
int w_vel = tex_vel.w;
int h_vel = tex_vel.h;
boolean APPLY_EXP_SHADING = param_lic.intensity_exp != 0.0;
int TRACE_B = param_lic.TRACE_BACKWARD ? 1 : 0;
int TRACE_F = param_lic.TRACE_FORWARD ? 1 : 0;
int num_samples = (int) Math.ceil(param_lic.num_samples / (float)(TRACE_B + TRACE_F));
shader_display_lic.frag.setDefine("NUM_SAMPLES" , num_samples);
shader_display_lic.frag.setDefine("TRACE_FORWARD" , TRACE_F);
shader_display_lic.frag.setDefine("TRACE_BACKWARD" , TRACE_B);
shader_display_lic.frag.setDefine("APPLY_EXP_SHADING", APPLY_EXP_SHADING ? 1 : 0);
float[] acc_minmax = {0, 1};
float[] vel_minmax = {0, Math.max(1, param_lic.acc_mult)};
context.begin();
for(int i = 0; i < param_lic.iterations; i++){
context.beginDraw(tex_lic.dst);
shader_display_lic.begin();
shader_display_lic.uniform2f ("wh_rcp" , 1f/w_dst, 1f/h_dst);
shader_display_lic.uniform2f ("wh_vel_rcp", 1f/w_vel, 1f/h_vel);
shader_display_lic.uniform1f ("acc_mult" , param_lic.acc_mult);
shader_display_lic.uniform1f ("vel_mult" , param_lic.vel_mult);
shader_display_lic.uniform2f ("acc_minmax", acc_minmax[0], acc_minmax[1]);
shader_display_lic.uniform2f ("vel_minmax", vel_minmax[0], vel_minmax[1]);
shader_display_lic.uniform1f ("intensity_mult", param_lic.intensity_mult);
if(APPLY_EXP_SHADING){
shader_display_lic.uniform1f ("intensity_exp" , param_lic.intensity_exp);
}
shader_display_lic.uniformTexture("tex_src" , tex_lic.src);
shader_display_lic.uniformTexture("tex_acc" , tex_vel);
shader_display_lic.drawFullScreenQuad();
shader_display_lic.end();
context.endDraw();
tex_lic.swap();
}
context.end("FlowField.displayLineIntegralConvolution");
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.rpc.client;
import java.net.SocketAddress;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelHandler;
import org.jboss.netty.util.HashedWheelTimer;
import org.jboss.netty.util.Timeout;
import org.jboss.netty.util.Timer;
import org.jboss.netty.util.TimerTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.navercorp.pinpoint.rpc.ChannelWriteCompleteListenableFuture;
import com.navercorp.pinpoint.rpc.ChannelWriteFailListenableFuture;
import com.navercorp.pinpoint.rpc.DefaultFuture;
import com.navercorp.pinpoint.rpc.Future;
import com.navercorp.pinpoint.rpc.PinpointSocketException;
import com.navercorp.pinpoint.rpc.ResponseMessage;
import com.navercorp.pinpoint.rpc.client.ConnectFuture.Result;
import com.navercorp.pinpoint.rpc.common.SocketStateChangeResult;
import com.navercorp.pinpoint.rpc.common.SocketStateCode;
import com.navercorp.pinpoint.rpc.packet.ClientClosePacket;
import com.navercorp.pinpoint.rpc.packet.ControlHandshakeResponsePacket;
import com.navercorp.pinpoint.rpc.packet.HandshakeResponseCode;
import com.navercorp.pinpoint.rpc.packet.Packet;
import com.navercorp.pinpoint.rpc.packet.PacketType;
import com.navercorp.pinpoint.rpc.packet.PingPacket;
import com.navercorp.pinpoint.rpc.packet.RequestPacket;
import com.navercorp.pinpoint.rpc.packet.ResponsePacket;
import com.navercorp.pinpoint.rpc.packet.SendPacket;
import com.navercorp.pinpoint.rpc.packet.stream.StreamPacket;
import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelContext;
import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelMessageListener;
import com.navercorp.pinpoint.rpc.stream.DisabledServerStreamChannelMessageListener;
import com.navercorp.pinpoint.rpc.stream.ServerStreamChannelMessageListener;
import com.navercorp.pinpoint.rpc.stream.StreamChannelContext;
import com.navercorp.pinpoint.rpc.stream.StreamChannelManager;
import com.navercorp.pinpoint.rpc.util.ClassUtils;
import com.navercorp.pinpoint.rpc.util.IDGenerator;
import com.navercorp.pinpoint.rpc.util.TimerFactory;
/**
* @author emeroad
* @author netspider
* @author koo.taejin
*/
public class PinpointSocketHandler extends SimpleChannelHandler implements SocketHandler {
private static final long DEFAULT_PING_DELAY = 60 * 1000 * 5;
private static final long DEFAULT_TIMEOUTMILLIS = 3 * 1000;
private static final long DEFAULT_ENABLE_WORKER_PACKET_DELAY = 60 * 1000 * 1;
private static final int DEFAULT_ENABLE_WORKER_PACKET_RETRY_COUNT = Integer.MAX_VALUE;
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final int socketId;
private final PinpointSocketHandlerState state;
private volatile Channel channel;
private long timeoutMillis = DEFAULT_TIMEOUTMILLIS;
private long pingDelay = DEFAULT_PING_DELAY;
private long handshakeRetryInterval = DEFAULT_ENABLE_WORKER_PACKET_DELAY;
private int maxHandshakeCount = DEFAULT_ENABLE_WORKER_PACKET_RETRY_COUNT;
private final Timer channelTimer;
private final PinpointSocketFactory pinpointSocketFactory;
private SocketAddress connectSocketAddress;
private volatile PinpointSocket pinpointSocket;
private final MessageListener messageListener;
private final ServerStreamChannelMessageListener serverStreamChannelMessageListener;
private final RequestManager requestManager;
private final ChannelFutureListener pingWriteFailFutureListener = new WriteFailFutureListener(this.logger, "ping write fail.", "ping write success.");
private final ChannelFutureListener sendWriteFailFutureListener = new WriteFailFutureListener(this.logger, "send() write fail.", "send() write success.");
private final ChannelFutureListener sendClosePacketFailFutureListener = new WriteFailFutureListener(this.logger, "sendClosedPacket() write fail.", "sendClosedPacket() write success.");
private final PinpointClientSocketHandshaker handshaker;
private final ConnectFuture connectFuture = new ConnectFuture();
private final String objectUniqName;
public PinpointSocketHandler(PinpointSocketFactory pinpointSocketFactory) {
this(pinpointSocketFactory, DEFAULT_PING_DELAY, DEFAULT_ENABLE_WORKER_PACKET_DELAY, DEFAULT_TIMEOUTMILLIS);
}
public PinpointSocketHandler(PinpointSocketFactory pinpointSocketFactory, long pingDelay, long handshakeRetryInterval, long timeoutMillis) {
if (pinpointSocketFactory == null) {
throw new NullPointerException("pinpointSocketFactory must not be null");
}
HashedWheelTimer timer = TimerFactory.createHashedWheelTimer("Pinpoint-SocketHandler-Timer", 100, TimeUnit.MILLISECONDS, 512);
timer.start();
this.channelTimer = timer;
this.pinpointSocketFactory = pinpointSocketFactory;
this.requestManager = new RequestManager(timer, timeoutMillis);
this.pingDelay = pingDelay;
this.timeoutMillis = timeoutMillis;
this.messageListener = pinpointSocketFactory.getMessageListener(SimpleLoggingMessageListener.LISTENER);
this.serverStreamChannelMessageListener = pinpointSocketFactory.getServerStreamChannelMessageListener(DisabledServerStreamChannelMessageListener.INSTANCE);
this.objectUniqName = ClassUtils.simpleClassNameAndHashCodeString(this);
this.handshaker = new PinpointClientSocketHandshaker(channelTimer, (int) handshakeRetryInterval, maxHandshakeCount);
this.socketId = pinpointSocketFactory.issueNewSocketId();
this.state = new PinpointSocketHandlerState(this.objectUniqName);
}
public void setPinpointSocket(PinpointSocket pinpointSocket) {
if (pinpointSocket == null) {
throw new NullPointerException("pinpointSocket must not be null");
}
this.pinpointSocket = pinpointSocket;
}
public void setConnectSocketAddress(SocketAddress connectSocketAddress) {
if (connectSocketAddress == null) {
throw new NullPointerException("connectSocketAddress must not be null");
}
this.connectSocketAddress = connectSocketAddress;
}
@Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
Channel channel = e.getChannel();
logger.debug("{} channelOpen() started. channel:{}", objectUniqName, channel);
this.channel = channel;
}
@Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
Channel channel = ctx.getChannel();
if ((null == channel) || (this.channel != channel)) {
throw new IllegalArgumentException("Invalid channel variable. this.channel:" + this.channel + ", channel:" + channel + ".");
}
logger.info("{} channelConnected() started. channel:{}", objectUniqName, channel);
SocketStateChangeResult stateChangeResult = state.toConnected();
if (!stateChangeResult.isChange()) {
throw new IllegalStateException("Invalid state:" + stateChangeResult.getCurrentState());
}
prepareChannel(channel);
stateChangeResult = state.toRunWithoutHandshake();
if (!stateChangeResult.isChange()) {
throw new IllegalStateException("Failed to execute channelConnected() method. Error:" + stateChangeResult);
}
registerPing();
Map<String, Object> handshakeData = this.pinpointSocketFactory.getProperties();
handshaker.handshakeStart(channel, handshakeData);
connectFuture.setResult(Result.SUCCESS);
logger.info("{} channelConnected() completed.", objectUniqName);
}
private void prepareChannel(Channel channel) {
StreamChannelManager streamChannelManager = new StreamChannelManager(channel, IDGenerator.createOddIdGenerator(), serverStreamChannelMessageListener);
PinpointSocketHandlerContext context = new PinpointSocketHandlerContext(channel, streamChannelManager);
channel.setAttachment(context);
}
@Override
public void initReconnect() {
logger.info("{} initReconnect() started.", objectUniqName);
SocketStateChangeResult stateChangeResult = state.toBeingConnect();
if (!stateChangeResult.isChange()) {
throw new IllegalStateException("Failed to execute initReconnect() method. Error:" + stateChangeResult);
}
logger.info("{} initReconnect() completed.", objectUniqName);
}
private void registerPing() {
final PingTask pingTask = new PingTask();
newPingTimeout(pingTask);
}
private void newPingTimeout(TimerTask pingTask) {
this.channelTimer.newTimeout(pingTask, pingDelay, TimeUnit.MILLISECONDS);
}
private class PingTask implements TimerTask {
@Override
public void run(Timeout timeout) throws Exception {
if (timeout.isCancelled()) {
newPingTimeout(this);
return;
}
if (state.isClosed()) {
return;
}
writePing();
newPingTimeout(this);
}
}
void writePing() {
if (!state.isEnableCommunication()) {
return;
}
logger.debug("{} writePing() started. channel:{}", objectUniqName, channel);
PingPacket pingPacket = new PingPacket(socketId, (byte) 0, state.getCurrentStateCode().getId());
write0(pingPacket, pingWriteFailFutureListener);
}
public void sendPing() {
if (!state.isEnableCommunication()) {
return;
}
logger.debug("{} sendPing() started.", objectUniqName);
ChannelFuture future = write0(PingPacket.PING_PACKET);
future.awaitUninterruptibly();
if (!future.isSuccess()) {
Throwable cause = future.getCause();
throw new PinpointSocketException("send ping failed. Error:" + cause.getMessage(), cause);
}
logger.debug("{} sendPing() completed.", objectUniqName);
}
public void send(byte[] bytes) {
ChannelFuture future = send0(bytes);
future.addListener(sendWriteFailFutureListener);
}
public Future sendAsync(byte[] bytes) {
ChannelFuture channelFuture = send0(bytes);
final ChannelWriteCompleteListenableFuture future = new ChannelWriteCompleteListenableFuture(timeoutMillis);
channelFuture.addListener(future);
return future ;
}
public void sendSync(byte[] bytes) {
ChannelFuture write = send0(bytes);
await(write);
}
private void await(ChannelFuture channelFuture) {
try {
channelFuture.await(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (channelFuture.isDone()) {
boolean success = channelFuture.isSuccess();
if (success) {
return;
} else {
final Throwable cause = channelFuture.getCause();
throw new PinpointSocketException(cause);
}
} else {
boolean cancel = channelFuture.cancel();
if (cancel) {
// if IO not finished in 3 seconds, dose it mean timeout?
throw new PinpointSocketException("io timeout");
} else {
// same logic as above because of success
boolean success = channelFuture.isSuccess();
if (success) {
return;
} else {
final Throwable cause = channelFuture.getCause();
throw new PinpointSocketException(cause);
}
}
}
}
private ChannelFuture send0(byte[] bytes) {
if (bytes == null) {
throw new NullPointerException("bytes");
}
ensureOpen();
SendPacket send = new SendPacket(bytes);
return write0(send);
}
public Future<ResponseMessage> request(byte[] bytes) {
if (bytes == null) {
throw new NullPointerException("bytes");
}
boolean isEnable = state.isEnableCommunication();
if (!isEnable) {
DefaultFuture<ResponseMessage> closedException = new DefaultFuture<ResponseMessage>();
closedException.setFailure(new PinpointSocketException("invalid state:" + state.getCurrentStateCode() + " channel:" + channel));
return closedException;
}
RequestPacket request = new RequestPacket(bytes);
final ChannelWriteFailListenableFuture<ResponseMessage> messageFuture = this.requestManager.register(request, this.timeoutMillis);
write0(request, messageFuture);
return messageFuture;
}
@Override
public ClientStreamChannelContext createStreamChannel(byte[] payload, ClientStreamChannelMessageListener clientStreamChannelMessageListener) {
ensureOpen();
PinpointSocketHandlerContext context = getChannelContext(channel);
return context.createStream(payload, clientStreamChannelMessageListener);
}
@Override
public StreamChannelContext findStreamChannel(int streamChannelId) {
ensureOpen();
PinpointSocketHandlerContext context = getChannelContext(channel);
return context.getStreamChannel(streamChannelId);
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
final Object message = e.getMessage();
if (message instanceof Packet) {
final Packet packet = (Packet) message;
final short packetType = packet.getPacketType();
switch (packetType) {
case PacketType.APPLICATION_RESPONSE:
this.requestManager.messageReceived((ResponsePacket) message, objectUniqName);
return;
// have to handle a request message through connector
case PacketType.APPLICATION_REQUEST:
this.messageListener.handleRequest((RequestPacket) message, e.getChannel());
return;
case PacketType.APPLICATION_SEND:
this.messageListener.handleSend((SendPacket) message, e.getChannel());
return;
case PacketType.APPLICATION_STREAM_CREATE:
case PacketType.APPLICATION_STREAM_CLOSE:
case PacketType.APPLICATION_STREAM_CREATE_SUCCESS:
case PacketType.APPLICATION_STREAM_CREATE_FAIL:
case PacketType.APPLICATION_STREAM_RESPONSE:
case PacketType.APPLICATION_STREAM_PING:
case PacketType.APPLICATION_STREAM_PONG:
PinpointSocketHandlerContext context = getChannelContext(channel);
context.handleStreamEvent((StreamPacket) message);
return;
case PacketType.CONTROL_SERVER_CLOSE:
handleClosedPacket(e.getChannel());
return;
case PacketType.CONTROL_HANDSHAKE_RESPONSE:
handleHandshakePacket((ControlHandshakeResponsePacket)message, e.getChannel());
return;
default:
logger.warn("{} messageReceived() failed. unexpectedMessage received:{} address:{}", objectUniqName, message, e.getRemoteAddress());
}
} else {
logger.warn("{} messageReceived() failed. invalid messageReceived:{}", objectUniqName, message);
}
}
private void handleClosedPacket(Channel channel) {
logger.info("{} handleClosedPacket() started. channel:{}", objectUniqName, channel);
state.toBeingCloseByPeer();
}
private void handleHandshakePacket(ControlHandshakeResponsePacket message, Channel channel) {
boolean isCompleted = handshaker.handshakeComplete(message);
logger.info("{} handleHandshakePacket() started. message:{}", objectUniqName, message);
if (isCompleted) {
HandshakeResponseCode code = handshaker.getHandshakeResult();
if (code == HandshakeResponseCode.SUCCESS || code == HandshakeResponseCode.ALREADY_KNOWN) {
state.toRunSimplex();
} else if (code == HandshakeResponseCode.DUPLEX_COMMUNICATION || code == HandshakeResponseCode.ALREADY_DUPLEX_COMMUNICATION) {
state.toRunDuplex();
} else if (code == HandshakeResponseCode.SIMPLEX_COMMUNICATION || code == HandshakeResponseCode.ALREADY_SIMPLEX_COMMUNICATION) {
state.toRunSimplex();
} else {
logger.warn("{} handleHandshakePacket() failed. Error:Invalid Handshake Packet(code:{}).", objectUniqName, code);
return;
}
logger.info("{} handleHandshakePacket() completed. code:{}", channel, code);
} else if (handshaker.isFinished()){
logger.warn("{} handleHandshakePacket() failed. Error:Handshake already completed.");
} else {
logger.warn("{} handleHandshakePacket() failed. Error:Handshake not yet started.");
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
Throwable cause = e.getCause();
SocketStateCode currentStateCode = state.getCurrentStateCode();
if (currentStateCode == SocketStateCode.BEING_CONNECT) {
// removed stackTrace when reconnect. so many logs.
logger.info("{} exceptionCaught() occured. state:{}, caused:{}.", objectUniqName, currentStateCode, cause.getMessage());
} else {
logger.warn("{} exceptionCaught() occured. state:{}. Caused:{}", objectUniqName, currentStateCode, cause.getMessage(), cause);
}
// need to handle a error more precisely.
// below code dose not reconnect when node on channel is just hang up or dead without specific reasons.
// state.setClosed();
// Channel channel = e.getChannel();
// if (channel.isConnected()) {
// channel.close();
// }
}
private void ensureOpen() {
SocketStateCode currentStateCode = state.getCurrentStateCode();
if (state.isEnableCommunication(currentStateCode)) {
return;
}
if (state.isReconnect(currentStateCode)) {
throw new PinpointSocketException("reconnecting...");
}
throw new PinpointSocketException("Invalid socket state:" + currentStateCode);
}
// Calling this method on a closed SocketHandler has no effect.
public void close() {
logger.debug("{} close() started.", objectUniqName);
SocketStateCode currentStateCode = state.getCurrentStateCode();
if (currentStateCode.isRun()) {
state.toBeingClose();
closeChannel();
} else if (currentStateCode.isBeforeConnected()) {
state.toClosed();
closeResources();
} else if (currentStateCode.onClose() || currentStateCode.isClosed()) {
logger.warn("close() failed. Already closed.");
} else {
logger.warn("Illegal State :{}.", currentStateCode);
}
}
private void closeChannel() {
Channel channel = this.channel;
if (channel != null) {
closeStreamChannelManager(channel);
sendClosedPacket(channel);
ChannelFuture closeFuture = channel.close();
closeFuture.addListener(new WriteFailFutureListener(logger, "close() event failed.", "close() event success."));
closeFuture.awaitUninterruptibly();
}
}
// Calling this method on a closed SocketHandler has no effect.
private void closeResources() {
logger.debug("{} closeResources() started.", objectUniqName);
this.handshaker.handshakeAbort();
this.requestManager.close();
this.channelTimer.stop();
}
private void closeStreamChannelManager(Channel channel) {
if (!channel.isConnected()) {
logger.debug("channel already closed. skip closeStreamChannelManager() {}", channel);
return;
}
// stream channel clear and send stream close packet
PinpointSocketHandlerContext context = getChannelContext(channel);
if (context != null) {
context.closeAllStreamChannel();
}
}
private void sendClosedPacket(Channel channel) {
if (!channel.isConnected()) {
logger.debug("{} sendClosedPacket() failed. Error:channel already closed.", objectUniqName);
return;
}
logger.debug("{} sendClosedPacket() started.", objectUniqName);
ClientClosePacket clientClosePacket = new ClientClosePacket();
ChannelFuture write = write0(clientClosePacket, sendClosePacketFailFutureListener);
write.awaitUninterruptibly(3000, TimeUnit.MILLISECONDS);
}
@Override
public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent e) throws Exception {
logger.info("{} channelClosed() started.", objectUniqName);
try {
boolean factoryReleased = pinpointSocketFactory.isReleased();
boolean needReconnect = false;
SocketStateCode currentStateCode = state.getCurrentStateCode();
if (currentStateCode == SocketStateCode.BEING_CLOSE_BY_CLIENT) {
state.toClosed();
} else if (currentStateCode == SocketStateCode.BEING_CLOSE_BY_SERVER) {
needReconnect = state.toClosedByPeer().isChange();
} else if (currentStateCode.isRun() && factoryReleased) {
state.toUnexpectedClosed();
} else if (currentStateCode.isRun()) {
needReconnect = state.toUnexpectedClosedByPeer().isChange();
} else if (currentStateCode.isBeforeConnected()) {
state.toConnectFailed();
} else {
state.toErrorUnknown();
}
if (needReconnect) {
pinpointSocketFactory.reconnect(this.pinpointSocket, this.connectSocketAddress);
}
} finally {
closeResources();
connectFuture.setResult(Result.FAIL);
}
}
private ChannelFuture write0(Object message) {
return write0(message, null);
}
private ChannelFuture write0(Object message, ChannelFutureListener futureListener) {
ChannelFuture future = channel.write(message);
if (futureListener != null) {
future.addListener(futureListener);
}
return future;
}
public Timer getChannelTimer() {
return channelTimer;
}
@Override
public ConnectFuture getConnectFuture() {
return connectFuture;
}
@Override
public SocketStateCode getCurrentStateCode() {
return state.getCurrentStateCode();
}
private PinpointSocketHandlerContext getChannelContext(Channel channel) {
if (channel == null) {
throw new NullPointerException("channel must not be null");
}
return (PinpointSocketHandlerContext) channel.getAttachment();
}
@Override
public boolean isConnected() {
return this.state.isEnableCommunication();
}
@Override
public boolean isSupportServerMode() {
return messageListener != SimpleLoggingMessageListener.LISTENER;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(objectUniqName);
sb.append('{');
sb.append("channel=").append(channel);
sb.append("state=").append(state);
sb.append('}');
return sb.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import com.google.common.collect.Lists;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
/**
* TODO should be renamed to InternalNumericHistogram (see comment on {@link Histogram})?
*/
public class InternalHistogram<B extends InternalHistogram.Bucket> extends InternalMultiBucketAggregation implements Histogram {
final static Type TYPE = new Type("histogram", "histo");
private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public InternalHistogram readResult(StreamInput in) throws IOException {
InternalHistogram histogram = new InternalHistogram();
histogram.readFrom(in);
return histogram;
}
};
private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
@Override
public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
Factory<?> factory = (Factory<?>) context.attributes().get("factory");
if (factory == null) {
throw new ElasticsearchIllegalStateException("No factory found for histogram buckets");
}
Bucket histogram = new Bucket(context.keyed(), context.formatter(), factory);
histogram.readFrom(in);
return histogram;
}
@Override
public BucketStreamContext getBucketStreamContext(Bucket bucket) {
BucketStreamContext context = new BucketStreamContext();
context.formatter(bucket.formatter);
context.keyed(bucket.keyed);
return context;
}
};
public static void registerStream() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
BucketStreams.registerStream(BUCKET_STREAM, TYPE.stream());
}
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket {
long key;
long docCount;
InternalAggregations aggregations;
private transient final boolean keyed;
protected transient final @Nullable ValueFormatter formatter;
private Factory<?> factory;
public Bucket(boolean keyed, @Nullable ValueFormatter formatter, Factory<?> factory) {
this.formatter = formatter;
this.keyed = keyed;
this.factory = factory;
}
public Bucket(long key, long docCount, boolean keyed, @Nullable ValueFormatter formatter, Factory factory,
InternalAggregations aggregations) {
this(keyed, formatter, factory);
this.key = key;
this.docCount = docCount;
this.aggregations = aggregations;
}
protected Factory<?> getFactory() {
return factory;
}
@Override
public String getKeyAsString() {
return formatter != null ? formatter.format(key) : ValueFormatter.RAW.format(key);
}
@Override
public Object getKey() {
return key;
}
@Override
public long getDocCount() {
return docCount;
}
@Override
public Aggregations getAggregations() {
return aggregations;
}
<B extends Bucket> B reduce(List<B> buckets, ReduceContext context) {
List<InternalAggregations> aggregations = new ArrayList<>(buckets.size());
long docCount = 0;
for (Bucket bucket : buckets) {
docCount += bucket.docCount;
aggregations.add((InternalAggregations) bucket.getAggregations());
}
InternalAggregations aggs = InternalAggregations.reduce(aggregations, context);
return (B) getFactory().createBucket(key, docCount, aggs, keyed, formatter);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (formatter != null && formatter != ValueFormatter.RAW) {
Text keyTxt = new StringText(formatter.format(key));
if (keyed) {
builder.startObject(keyTxt.string());
} else {
builder.startObject();
}
builder.field(CommonFields.KEY_AS_STRING, keyTxt);
} else {
if (keyed) {
builder.startObject(String.valueOf(getKey()));
} else {
builder.startObject();
}
}
builder.field(CommonFields.KEY, key);
builder.field(CommonFields.DOC_COUNT, docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
key = in.readLong();
docCount = in.readVLong();
aggregations = InternalAggregations.readAggregations(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(key);
out.writeVLong(docCount);
aggregations.writeTo(out);
}
}
static class EmptyBucketInfo {
final Rounding rounding;
final InternalAggregations subAggregations;
final ExtendedBounds bounds;
EmptyBucketInfo(Rounding rounding, InternalAggregations subAggregations) {
this(rounding, subAggregations, null);
}
EmptyBucketInfo(Rounding rounding, InternalAggregations subAggregations, ExtendedBounds bounds) {
this.rounding = rounding;
this.subAggregations = subAggregations;
this.bounds = bounds;
}
public static EmptyBucketInfo readFrom(StreamInput in) throws IOException {
Rounding rounding = Rounding.Streams.read(in);
InternalAggregations aggs = InternalAggregations.readAggregations(in);
if (in.readBoolean()) {
return new EmptyBucketInfo(rounding, aggs, ExtendedBounds.readFrom(in));
}
return new EmptyBucketInfo(rounding, aggs);
}
public static void writeTo(EmptyBucketInfo info, StreamOutput out) throws IOException {
Rounding.Streams.write(info.rounding, out);
info.subAggregations.writeTo(out);
out.writeBoolean(info.bounds != null);
if (info.bounds != null) {
info.bounds.writeTo(out);
}
}
}
static class Factory<B extends InternalHistogram.Bucket> {
protected Factory() {
}
public String type() {
return TYPE.name();
}
public InternalHistogram<B> create(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, @Nullable ValueFormatter formatter, boolean keyed, Map<String, Object> metaData) {
return new InternalHistogram<>(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, this, metaData);
}
public B createBucket(long key, long docCount, InternalAggregations aggregations, boolean keyed, @Nullable ValueFormatter formatter) {
return (B) new Bucket(key, docCount, keyed, formatter, this, aggregations);
}
protected B createEmptyBucket(boolean keyed, @Nullable ValueFormatter formatter) {
return (B) new Bucket(keyed, formatter, this);
}
}
protected List<B> buckets;
private InternalOrder order;
private @Nullable ValueFormatter formatter;
private boolean keyed;
private long minDocCount;
private EmptyBucketInfo emptyBucketInfo;
protected Factory<B> factory;
InternalHistogram() {} // for serialization
InternalHistogram(String name, List<B> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo,
@Nullable ValueFormatter formatter, boolean keyed, Factory<B> factory, Map<String, Object> metaData) {
super(name, metaData);
this.buckets = buckets;
this.order = order;
assert (minDocCount == 0) == (emptyBucketInfo != null);
this.minDocCount = minDocCount;
this.emptyBucketInfo = emptyBucketInfo;
this.formatter = formatter;
this.keyed = keyed;
this.factory = factory;
}
@Override
public Type type() {
return TYPE;
}
@Override
public List<B> getBuckets() {
return buckets;
}
protected Factory<B> getFactory() {
return factory;
}
private static class IteratorAndCurrent<B> {
private final Iterator<B> iterator;
private B current;
IteratorAndCurrent(Iterator<B> iterator) {
this.iterator = iterator;
current = iterator.next();
}
}
private List<B> reduceBuckets(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
final PriorityQueue<IteratorAndCurrent<B>> pq = new PriorityQueue<IteratorAndCurrent<B>>(aggregations.size()) {
@Override
protected boolean lessThan(IteratorAndCurrent<B> a, IteratorAndCurrent<B> b) {
return a.current.key < b.current.key;
}
};
for (InternalAggregation aggregation : aggregations) {
InternalHistogram<B> histogram = (InternalHistogram) aggregation;
if (histogram.buckets.isEmpty() == false) {
pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator()));
}
}
List<B> reducedBuckets = new ArrayList<>();
if (pq.size() > 0) {
// list of buckets coming from different shards that have the same key
List<B> currentBuckets = new ArrayList<>();
long key = pq.top().current.key;
do {
final IteratorAndCurrent<B> top = pq.top();
if (top.current.key != key) {
// the key changes, reduce what we already buffered and reset the buffer for current buckets
final B reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext);
if (reduced.getDocCount() >= minDocCount) {
reducedBuckets.add(reduced);
}
currentBuckets.clear();
key = top.current.key;
}
currentBuckets.add(top.current);
if (top.iterator.hasNext()) {
final B next = top.iterator.next();
assert next.key > top.current.key : "shards must return data sorted by key";
top.current = next;
pq.updateTop();
} else {
pq.pop();
}
} while (pq.size() > 0);
if (currentBuckets.isEmpty() == false) {
final B reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext);
if (reduced.getDocCount() >= minDocCount) {
reducedBuckets.add(reduced);
}
}
}
return reducedBuckets;
}
private void addEmptyBuckets(List<B> list) {
B lastBucket = null;
ExtendedBounds bounds = emptyBucketInfo.bounds;
ListIterator<B> iter = list.listIterator();
// first adding all the empty buckets *before* the actual data (based on th extended_bounds.min the user requested)
if (bounds != null) {
B firstBucket = iter.hasNext() ? list.get(iter.nextIndex()) : null;
if (firstBucket == null) {
if (bounds.min != null && bounds.max != null) {
long key = bounds.min;
long max = bounds.max;
while (key <= max) {
iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter));
key = emptyBucketInfo.rounding.nextRoundingValue(key);
}
}
} else {
if (bounds.min != null) {
long key = bounds.min;
if (key < firstBucket.key) {
while (key < firstBucket.key) {
iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter));
key = emptyBucketInfo.rounding.nextRoundingValue(key);
}
}
}
}
}
// now adding the empty buckets within the actual data,
// e.g. if the data series is [1,2,3,7] there're 3 empty buckets that will be created for 4,5,6
while (iter.hasNext()) {
B nextBucket = list.get(iter.nextIndex());
if (lastBucket != null) {
long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key);
while (key < nextBucket.key) {
iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter));
key = emptyBucketInfo.rounding.nextRoundingValue(key);
}
assert key == nextBucket.key;
}
lastBucket = iter.next();
}
// finally, adding the empty buckets *after* the actual data (based on the extended_bounds.max requested by the user)
if (bounds != null && lastBucket != null && bounds.max != null && bounds.max > lastBucket.key) {
long key = emptyBucketInfo.rounding.nextRoundingValue(lastBucket.key);
long max = bounds.max;
while (key <= max) {
iter.add(getFactory().createBucket(key, 0, emptyBucketInfo.subAggregations, keyed, formatter));
key = emptyBucketInfo.rounding.nextRoundingValue(key);
}
}
}
@Override
public InternalAggregation reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
List<B> reducedBuckets = reduceBuckets(aggregations, reduceContext);
// adding empty buckets if needed
if (minDocCount == 0) {
addEmptyBuckets(reducedBuckets);
}
if (order == InternalOrder.KEY_ASC) {
// nothing to do, data are already sorted since shards return
// sorted buckets and the merge-sort performed by reduceBuckets
// maintains order
} else if (order == InternalOrder.KEY_DESC) {
// we just need to reverse here...
reducedBuckets = Lists.reverse(reducedBuckets);
} else {
// sorted by sub-aggregation, need to fall back to a costly n*log(n) sort
CollectionUtil.introSort(reducedBuckets, order.comparator());
}
return getFactory().create(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, formatter, keyed, getMetaData());
}
@Override
protected void doReadFrom(StreamInput in) throws IOException {
this.factory = resolveFactory(in.readString());
order = InternalOrder.Streams.readOrder(in);
minDocCount = in.readVLong();
if (minDocCount == 0) {
emptyBucketInfo = EmptyBucketInfo.readFrom(in);
}
formatter = ValueFormatterStreams.readOptional(in);
keyed = in.readBoolean();
int size = in.readVInt();
List<B> buckets = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
B bucket = getFactory().createEmptyBucket(keyed, formatter);
bucket.readFrom(in);
buckets.add(bucket);
}
this.buckets = buckets;
}
@SuppressWarnings("unchecked")
private static <B extends InternalHistogram.Bucket> Factory<B> resolveFactory(String factoryType) {
if (factoryType.equals(InternalDateHistogram.TYPE.name())) {
return (Factory<B>) new InternalDateHistogram.Factory();
} else if (factoryType.equals(TYPE.name())) {
return new Factory<>();
} else {
throw new ElasticsearchIllegalStateException("Invalid histogram factory type [" + factoryType + "]");
}
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(factory.type());
InternalOrder.Streams.writeOrder(order, out);
out.writeVLong(minDocCount);
if (minDocCount == 0) {
EmptyBucketInfo.writeTo(emptyBucketInfo, out);
}
ValueFormatterStreams.writeOptional(formatter, out);
out.writeBoolean(keyed);
out.writeVInt(buckets.size());
for (B bucket : buckets) {
bucket.writeTo(out);
}
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
if (keyed) {
builder.startObject(CommonFields.BUCKETS);
} else {
builder.startArray(CommonFields.BUCKETS);
}
for (B bucket : buckets) {
bucket.toXContent(builder, params);
}
if (keyed) {
builder.endObject();
} else {
builder.endArray();
}
return builder;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.intellij;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Ordering;
import org.immutables.value.Value;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Does the converting of abstract data structures to a format immediately consumable by the
* StringTemplate-based templates employed by {@link IjProjectWriter}. This is a separate class
* mainly for testing convenience.
*/
@VisibleForTesting
public class IjProjectTemplateDataPreparer {
private static final String ANDROID_MANIFEST_TEMPLATE_PARAMETER = "android_manifest";
private static final String APK_PATH_TEMPLATE_PARAMETER = "apk_path";
private static final String ASSETS_FOLDER_TEMPLATE_PARAMETER = "asset_folder";
private static final String PROGUARD_CONFIG_TEMPLATE_PARAMETER = "proguard_config";
private static final String RESOURCES_RELATIVE_PATH_TEMPLATE_PARAMETER = "res";
private static final String EMPTY_STRING = "";
private JavaPackageFinder javaPackageFinder;
private IjModuleGraph moduleGraph;
private ProjectFilesystem projectFilesystem;
private IjSourceRootSimplifier sourceRootSimplifier;
private ImmutableSet<Path> referencedFolderPaths;
private ImmutableSet<Path> filesystemTraversalBoundaryPaths;
private ImmutableSet<IjModule> modulesToBeWritten;
private ImmutableSet<IjLibrary> librariesToBeWritten;
public IjProjectTemplateDataPreparer(
JavaPackageFinder javaPackageFinder,
IjModuleGraph moduleGraph,
ProjectFilesystem projectFilesystem) {
this.javaPackageFinder = javaPackageFinder;
this.moduleGraph = moduleGraph;
this.projectFilesystem = projectFilesystem;
this.sourceRootSimplifier = new IjSourceRootSimplifier(javaPackageFinder);
this.modulesToBeWritten = createModulesToBeWritten(moduleGraph);
this.librariesToBeWritten =
FluentIterable.from(moduleGraph.getNodes()).filter(IjLibrary.class).toSet();
this.filesystemTraversalBoundaryPaths =
createFilesystemTraversalBoundaryPathSet(modulesToBeWritten);
this.referencedFolderPaths = createReferencedFolderPathsSet(modulesToBeWritten);
}
private static void addPathAndParents(Set<Path> pathSet, Path path) {
do {
pathSet.add(path);
path = path.getParent();
} while(path != null && !pathSet.contains(path));
}
public static ImmutableSet<Path> createReferencedFolderPathsSet(ImmutableSet<IjModule> modules) {
Set<Path> pathSet = new HashSet<>();
for (IjModule module : modules) {
addPathAndParents(pathSet, module.getModuleBasePath());
for (IjFolder folder : module.getFolders()) {
addPathAndParents(pathSet, folder.getPath());
}
}
return ImmutableSet.copyOf(pathSet);
}
public static ImmutableSet<Path> createFilesystemTraversalBoundaryPathSet(
ImmutableSet<IjModule> modules) {
return FluentIterable.from(modules)
.transform(IjModule.TO_MODULE_BASE_PATH)
.append(IjProjectWriter.IDEA_CONFIG_DIR_PREFIX)
.toSet();
}
public static ImmutableSet<Path> createPackageLookupPathSet(IjModuleGraph moduleGraph) {
ImmutableSet.Builder<Path> builder = ImmutableSet.builder();
for (IjModule module : moduleGraph.getModuleNodes()) {
for (IjFolder folder : module.getFolders()) {
if (!folder.getWantsPackagePrefix()) {
continue;
}
Optional<Path> firstJavaFile = FluentIterable.from(folder.getInputs())
.filter(
new Predicate<Path>() {
@Override
public boolean apply(Path input) {
return input.getFileName().toString().endsWith(".java");
}
})
.first();
if (firstJavaFile.isPresent()) {
builder.add(firstJavaFile.get());
}
}
}
return builder.build();
}
private static ImmutableSet<IjModule> createModulesToBeWritten(IjModuleGraph graph) {
Path rootModuleBasePath = Paths.get("");
boolean hasRootModule = FluentIterable.from(graph.getModuleNodes())
.transform(IjModule.TO_MODULE_BASE_PATH)
.contains(rootModuleBasePath);
ImmutableSet<IjModule> supplementalModules = ImmutableSet.of();
if (!hasRootModule) {
supplementalModules = ImmutableSet.of(
IjModule.builder()
.setModuleBasePath(rootModuleBasePath)
.setTargets(ImmutableSet.<TargetNode<?>>of())
.build());
}
return FluentIterable.from(graph.getModuleNodes())
.append(supplementalModules)
.toSet();
}
/**
* @param path path to folder.
* @param moduleLocationBasePath path to the location of the .iml file.
* @return a path, relative to the module .iml file location describing a folder
* in IntelliJ format.
*/
private static String toModuleDirRelativeString(Path path, Path moduleLocationBasePath) {
String moduleRelativePath = moduleLocationBasePath.relativize(path).toString();
if (moduleRelativePath.isEmpty()) {
return "file://$MODULE_DIR$";
} else {
return "file://$MODULE_DIR$/" + moduleRelativePath;
}
}
private static String toProjectDirRelativeString(Path projectRelativePath) {
String path = projectRelativePath.toString();
if (path.isEmpty()) {
return "file://$PROJECT_DIR$";
} else {
return "file://$PROJECT_DIR$/" + MorePaths.pathWithUnixSeparators(path);
}
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractIjSourceFolder implements Comparable<IjSourceFolder> {
public abstract String getType();
public abstract String getUrl();
public abstract boolean getIsTestSource();
public abstract boolean getIsAndroidResources();
@Nullable public abstract String getPackagePrefix();
@Override
public int compareTo(IjSourceFolder o) {
if (this == o) {
return 0;
}
return getUrl().compareTo(o.getUrl());
}
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractContentRoot implements Comparable<ContentRoot> {
public abstract String getUrl();
public abstract ImmutableSortedSet<IjSourceFolder> getFolders();
@Override
public int compareTo(ContentRoot o) {
if (this == o) {
return 0;
}
return getUrl().compareTo(o.getUrl());
}
}
public ImmutableSet<IjModule> getModulesToBeWritten() {
return modulesToBeWritten;
}
public ImmutableSet<IjLibrary> getLibrariesToBeWritten() {
return librariesToBeWritten;
}
private ContentRoot createContentRoot(
final IjModule module,
Path contentRootPath,
ImmutableSet<IjFolder> folders,
final Path moduleLocationBasePath) {
String url = toModuleDirRelativeString(contentRootPath, moduleLocationBasePath);
ImmutableSet<IjFolder> simplifiedFolders = sourceRootSimplifier.simplify(
SimplificationLimit.of(contentRootPath.getNameCount()),
folders);
ImmutableSortedSet<IjSourceFolder> sourceFolders = FluentIterable.from(simplifiedFolders)
.transform(new IjFolderToIjSourceFolderTransform(module))
.toSortedSet(Ordering.natural());
return ContentRoot.builder()
.setUrl(url)
.setFolders(sourceFolders)
.build();
}
public ImmutableSet<IjFolder> createExcludes(final IjModule module) throws IOException {
final ImmutableSet.Builder<IjFolder> excludesBuilder = ImmutableSet.builder();
final Path moduleBasePath = module.getModuleBasePath();
projectFilesystem.walkRelativeFileTree(
moduleBasePath, new FileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(
Path dir, BasicFileAttributes attrs) throws IOException {
// This is another module that's nested in this one. The entire subtree will be handled
// When we create excludes for that module.
if (filesystemTraversalBoundaryPaths.contains(dir) && !moduleBasePath.equals(dir)) {
return FileVisitResult.SKIP_SUBTREE;
}
if (isRootAndroidResourceDirectory(module, dir)) {
return FileVisitResult.SKIP_SUBTREE;
}
if (!referencedFolderPaths.contains(dir)) {
excludesBuilder.add(new ExcludeFolder(dir));
return FileVisitResult.SKIP_SUBTREE;
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(
Path file, BasicFileAttributes attrs) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
});
return excludesBuilder.build();
}
private boolean isRootAndroidResourceDirectory(IjModule module, Path dir) {
if (!module.getAndroidFacet().isPresent()) {
return false;
}
for (Path resourcePath : module.getAndroidFacet().get().getResourcePaths()) {
if (dir.equals(resourcePath)) {
return true;
}
}
return false;
}
public ContentRoot getContentRoot(IjModule module) throws IOException {
Path moduleBasePath = module.getModuleBasePath();
Path moduleLocation = module.getModuleImlFilePath();
final Path moduleLocationBasePath =
(moduleLocation.getParent() == null) ? Paths.get("") : moduleLocation.getParent();
ImmutableSet<IjFolder> sourcesAndExcludes = FluentIterable.from(module.getFolders())
.append(createExcludes(module))
.toSet();
return createContentRoot(module, moduleBasePath, sourcesAndExcludes, moduleLocationBasePath);
}
public ImmutableSet<IjSourceFolder> getGeneratedSourceFolders(final IjModule module) {
return FluentIterable.from(module.getGeneratedSourceCodeFolders())
.transform(new IjFolderToIjSourceFolderTransform(module))
.toSet();
}
public ImmutableSet<DependencyEntry> getDependencies(IjModule module) {
ImmutableMap<IjProjectElement, IjModuleGraph.DependencyType> deps =
moduleGraph.getDepsFor(module);
IjDependencyListBuilder dependencyListBuilder = new IjDependencyListBuilder();
for (Map.Entry<IjProjectElement, IjModuleGraph.DependencyType> entry : deps.entrySet()) {
IjProjectElement element = entry.getKey();
IjModuleGraph.DependencyType dependencyType = entry.getValue();
element.addAsDependency(dependencyType, dependencyListBuilder);
}
return dependencyListBuilder.build();
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractModuleIndexEntry implements Comparable<ModuleIndexEntry> {
public abstract String getFileUrl();
public abstract Path getFilePath();
@Nullable public abstract String getGroup();
@Override
public int compareTo(ModuleIndexEntry o) {
if (this == o) {
return 0;
}
return getFilePath().compareTo(o.getFilePath());
}
}
public ImmutableSortedSet<ModuleIndexEntry> getModuleIndexEntries() {
return FluentIterable.from(modulesToBeWritten)
.filter(IjModule.class)
.transform(
new Function<IjModule, ModuleIndexEntry>() {
@Override
public ModuleIndexEntry apply(IjModule module) {
Path moduleOutputFilePath = module.getModuleImlFilePath();
String fileUrl = toProjectDirRelativeString(moduleOutputFilePath);
// The root project module cannot belong to any group.
String group = (module.getModuleBasePath().toString().isEmpty()) ? null : "modules";
return ModuleIndexEntry.builder()
.setFileUrl(fileUrl)
.setFilePath(moduleOutputFilePath)
.setGroup(group)
.build();
}
})
.toSortedSet(Ordering.natural());
}
public Map<String, Object> getAndroidProperties(IjModule module) throws IOException {
Map<String, Object> androidProperties = new HashMap<>();
Optional<IjModuleAndroidFacet> androidFacetOptional = module.getAndroidFacet();
boolean isAndroidFacetPresent = androidFacetOptional.isPresent();
androidProperties.put("enabled", isAndroidFacetPresent);
if (!isAndroidFacetPresent) {
return androidProperties;
}
IjModuleAndroidFacet androidFacet = androidFacetOptional.get();
androidProperties.put("is_android_library_project", androidFacet.isAndroidLibrary());
Path basePath = module.getModuleBasePath();
addAndroidConstants(androidProperties);
addAndroidApkPaths(androidProperties, module, basePath, androidFacet);
addAndroidAssetPaths(androidProperties, androidFacet);
addAndroidGenPath(androidProperties, basePath);
addAndroidManifestPath(androidProperties, basePath, androidFacet);
addAndroidProguardPath(androidProperties, androidFacet);
addAndroidResourcePaths(androidProperties, module, androidFacet);
return androidProperties;
}
private void addAndroidConstants(Map<String, Object> androidProperties) {
androidProperties.put("enable_sources_autogeneration", true);
androidProperties.put("run_proguard", false);
// TODO(alsutton): Fix keystore detection
androidProperties.put("keystore", "");
// TODO(alsutton): See if we need nativeLibs and libs
androidProperties.put("libs_path", "/libs");
}
private void addAndroidApkPaths(
Map<String, Object> androidProperties,
IjModule module,
Path moduleBasePath,
IjModuleAndroidFacet androidFacet) {
if (androidFacet.isAndroidLibrary()) {
androidProperties.put(APK_PATH_TEMPLATE_PARAMETER, EMPTY_STRING);
return;
}
Path apkPath = moduleBasePath
.relativize(Paths.get(""))
.resolve(Project.getAndroidApkDir(projectFilesystem))
.resolve(Paths.get("").relativize(moduleBasePath))
.resolve(module.getName() + ".apk");
androidProperties.put(APK_PATH_TEMPLATE_PARAMETER, apkPath);
}
private void addAndroidAssetPaths(
Map<String, Object> androidProperties,
IjModuleAndroidFacet androidFacet) {
ImmutableSet<Path> assetPaths = androidFacet.getAssetPaths();
if (assetPaths.isEmpty()) {
androidProperties.put(ASSETS_FOLDER_TEMPLATE_PARAMETER, "/assets");
} else {
androidProperties.put(
ASSETS_FOLDER_TEMPLATE_PARAMETER,
"/" + Joiner.on(";/").join(androidFacet.getAssetPaths()));
}
}
private void addAndroidGenPath(
Map<String, Object> androidProperties,
Path moduleBasePath) {
Path genPath = moduleBasePath
.relativize(Paths.get(""))
.resolve(Project.getAndroidGenDir(projectFilesystem))
.resolve(Paths.get("").relativize(moduleBasePath))
.resolve("gen");
androidProperties.put(
"module_gen_path",
"/" + MorePaths.pathWithUnixSeparators(genPath));
}
private void addAndroidManifestPath(
Map<String, Object> androidProperties,
Path moduleBasePath,
IjModuleAndroidFacet androidFacet) {
Optional<Path> androidManifestPath = androidFacet.getManifestPath();
Path manifestPath;
if (androidManifestPath.isPresent()) {
manifestPath = androidManifestPath.get()
.relativize(moduleBasePath.toAbsolutePath())
.resolve(androidManifestPath.get().getFileName());
} else {
manifestPath = moduleBasePath
.relativize(
Paths
.get("")
.resolve("android_res/AndroidManifest.xml"));
}
androidProperties.put(ANDROID_MANIFEST_TEMPLATE_PARAMETER, "/" + manifestPath);
}
private void addAndroidProguardPath(
Map<String, Object> androidProperties,
IjModuleAndroidFacet androidFacet) {
Optional<Path> proguardPath = androidFacet.getProguardConfigPath();
if (proguardPath.isPresent()) {
androidProperties.put(PROGUARD_CONFIG_TEMPLATE_PARAMETER, proguardPath.get());
} else {
androidProperties.put(PROGUARD_CONFIG_TEMPLATE_PARAMETER, EMPTY_STRING);
}
}
private void addAndroidResourcePaths(
Map<String, Object> androidProperties,
IjModule module,
IjModuleAndroidFacet androidFacet) {
ImmutableSet<Path> resourcePaths = androidFacet.getResourcePaths();
if (resourcePaths.isEmpty()) {
androidProperties.put(RESOURCES_RELATIVE_PATH_TEMPLATE_PARAMETER, EMPTY_STRING);
} else {
Set<Path> relativeResourcePaths = new HashSet<>(resourcePaths.size());
Path moduleBase = module.getModuleBasePath();
for (Path resourcePath : resourcePaths) {
relativeResourcePaths.add(moduleBase.relativize(resourcePath));
}
androidProperties.put(
RESOURCES_RELATIVE_PATH_TEMPLATE_PARAMETER,
"/" + Joiner.on(";/").join(relativeResourcePaths));
}
}
private class IjFolderToIjSourceFolderTransform implements Function<IjFolder, IjSourceFolder> {
private Path moduleBasePath;
private Optional<IjModuleAndroidFacet> androidFacet;
IjFolderToIjSourceFolderTransform(IjModule module) {
moduleBasePath = module.getModuleBasePath();
androidFacet = module.getAndroidFacet();
}
@Override
public IjSourceFolder apply(IjFolder input) {
String packagePrefix;
if (input instanceof AndroidResourceFolder &&
androidFacet.isPresent() &&
androidFacet.get().getPackageName().isPresent()) {
packagePrefix = androidFacet.get().getPackageName().get();
} else {
packagePrefix = getPackagePrefix(input);
}
return createSourceFolder(input, moduleBasePath, packagePrefix);
}
private IjSourceFolder createSourceFolder(
IjFolder folder,
Path moduleLocationBasePath,
String packagePrefix) {
return IjSourceFolder.builder()
.setType(folder.getIjName())
.setUrl(toModuleDirRelativeString(folder.getPath(), moduleLocationBasePath))
.setIsTestSource(folder instanceof TestFolder)
.setIsAndroidResources(folder instanceof AndroidResourceFolder)
.setPackagePrefix(packagePrefix)
.build();
}
@Nullable
private String getPackagePrefix(IjFolder folder) {
if (!folder.getWantsPackagePrefix()) {
return null;
}
Path fileToLookupPackageIn;
if (!folder.getInputs().isEmpty() &&
folder.getInputs().first().getParent().equals(folder.getPath())) {
fileToLookupPackageIn = folder.getInputs().first();
} else {
fileToLookupPackageIn = folder.getPath().resolve("notfound");
}
String packagePrefix = javaPackageFinder.findJavaPackage(fileToLookupPackageIn);
if (packagePrefix.isEmpty()) {
// It doesn't matter either way, but an empty prefix looks confusing.
return null;
}
return packagePrefix;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.secor.tools;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.net.HostAndPort;
import com.pinterest.secor.common.KafkaClient;
import com.pinterest.secor.common.SecorConfig;
import com.pinterest.secor.common.TopicPartition;
import com.pinterest.secor.common.ZookeeperConnector;
import com.pinterest.secor.message.Message;
import com.pinterest.secor.parser.MessageParser;
import com.pinterest.secor.parser.TimestampedMessageParser;
import com.pinterest.secor.util.ReflectionUtil;
import com.timgroup.statsd.NonBlockingStatsDClient;
import net.minidev.json.JSONArray;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import java.util.Map;
/**
* Progress monitor exports offset lags per topic partition.
*
* @author Pawel Garbacki (pawel@pinterest.com)
*/
public class ProgressMonitor {
private static final Logger LOG = LoggerFactory.getLogger(ProgressMonitor.class);
private static final String PERIOD = ".";
private SecorConfig mConfig;
private ZookeeperConnector mZookeeperConnector;
private KafkaClient mKafkaClient;
private MessageParser mMessageParser;
private String mPrefix;
public ProgressMonitor(SecorConfig config)
throws Exception
{
mConfig = config;
mZookeeperConnector = new ZookeeperConnector(mConfig);
mKafkaClient = new KafkaClient(mConfig);
mMessageParser = (MessageParser) ReflectionUtil.createMessageParser(
mConfig.getMessageParserClass(), mConfig);
mPrefix = mConfig.getMonitoringPrefix();
if (Strings.isNullOrEmpty(mPrefix)) {
mPrefix = "secor";
}
}
private void makeRequest(String body) throws IOException {
URL url = new URL("http://" + mConfig.getTsdbHostport() + "/api/put?details");
HttpURLConnection connection = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Accepts", "application/json");
connection.setRequestProperty("Accept", "*/*");
if (body != null) {
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Length",
Integer.toString(body.getBytes().length));
}
connection.setUseCaches (false);
connection.setDoInput(true);
connection.setDoOutput(true);
if (body != null) {
// Send request.
DataOutputStream dataOutputStream = new DataOutputStream(
connection.getOutputStream());
dataOutputStream.writeBytes(body);
dataOutputStream.flush();
dataOutputStream.close();
}
// Get Response.
InputStream inputStream = connection.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
Map response = (Map) JSONValue.parse(reader);
if (!response.get("failed").equals(0)) {
throw new RuntimeException("url " + url + " with body " + body + " failed " +
JSONObject.toJSONString(response));
}
} catch (IOException exception) {
if (connection != null) {
connection.disconnect();
}
throw exception;
}
}
private void exportToTsdb(Stat stat)
throws IOException {
LOG.info("exporting metric to tsdb {}", stat);
makeRequest(stat.toString());
}
public void exportStats() throws Exception {
List<Stat> stats = getStats();
LOG.info("Stats: {}", JSONArray.toJSONString(stats));
// if there is a valid openTSDB port configured export to openTSDB
if (mConfig.getTsdbHostport() != null && !mConfig.getTsdbHostport().isEmpty()) {
for (Stat stat : stats) {
exportToTsdb(stat);
}
}
// if there is a valid statsD port configured export to statsD
if (mConfig.getStatsDHostPort() != null && !mConfig.getStatsDHostPort().isEmpty()) {
exportToStatsD(stats);
}
}
/**
* Helper to publish stats to statsD client
*/
private void exportToStatsD(List<Stat> stats) {
HostAndPort hostPort = HostAndPort.fromString(mConfig.getStatsDHostPort());
// group stats by kafka group
NonBlockingStatsDClient client = new NonBlockingStatsDClient(mConfig.getKafkaGroup(),
hostPort.getHostText(), hostPort.getPort());
for (Stat stat : stats) {
@SuppressWarnings("unchecked")
Map<String, String> tags = (Map<String, String>) stat.get(Stat.STAT_KEYS.TAGS.getName());
String aspect = new StringBuilder((String)stat.get(Stat.STAT_KEYS.METRIC.getName()))
.append(PERIOD)
.append(tags.get(Stat.STAT_KEYS.TOPIC.getName()))
.append(PERIOD)
.append(tags.get(Stat.STAT_KEYS.PARTITION.getName()))
.toString();
client.recordGaugeValue(aspect, Long.parseLong((String)stat.get(Stat.STAT_KEYS.VALUE.getName())));
}
}
private List<Stat> getStats() throws Exception {
List<String> topics = mZookeeperConnector.getCommittedOffsetTopics();
List<Stat> stats = Lists.newArrayList();
for (String topic : topics) {
if (topic.matches(mConfig.getMonitoringBlacklistTopics()) ||
!topic.matches(mConfig.getKafkaTopicFilter())) {
LOG.info("skipping topic {}", topic);
continue;
}
List<Integer> partitions = mZookeeperConnector.getCommittedOffsetPartitions(topic);
for (Integer partition : partitions) {
TopicPartition topicPartition = new TopicPartition(topic, partition);
Message committedMessage = mKafkaClient.getCommittedMessage(topicPartition);
long committedOffset = - 1;
long committedTimestampMillis = -1;
if (committedMessage == null) {
LOG.warn("no committed message found in topic {} partition {}", topic, partition);
} else {
committedOffset = committedMessage.getOffset();
committedTimestampMillis = getTimestamp(committedMessage);
}
Message lastMessage = mKafkaClient.getLastMessage(topicPartition);
if (lastMessage == null) {
LOG.warn("no message found in topic {} partition {}", topic, partition);
} else {
long lastOffset = lastMessage.getOffset();
long lastTimestampMillis = getTimestamp(lastMessage);
assert committedOffset <= lastOffset: Long.toString(committedOffset) + " <= " +
lastOffset;
long offsetLag = lastOffset - committedOffset;
long timestampMillisLag = lastTimestampMillis - committedTimestampMillis;
Map<String, String> tags = ImmutableMap.of(
Stat.STAT_KEYS.TOPIC.getName(), topic,
Stat.STAT_KEYS.PARTITION.getName(), Integer.toString(partition),
Stat.STAT_KEYS.GROUP.getName(), mConfig.getKafkaGroup()
);
long timestamp = System.currentTimeMillis() / 1000;
stats.add(Stat.createInstance(metricName("lag.offsets"), tags, Long.toString(offsetLag), timestamp));
stats.add(Stat.createInstance(metricName("lag.seconds"), tags, Long.toString(timestampMillisLag / 1000), timestamp));
LOG.debug("topic {} partition {} committed offset {} last offset {} committed timestamp {} last timestamp {}",
topic, partition, committedOffset, lastOffset,
(committedTimestampMillis / 1000), (lastTimestampMillis / 1000));
}
}
}
return stats;
}
private String metricName(String key) {
return Joiner.on(".").join(mPrefix, key);
}
private long getTimestamp(Message message) throws Exception {
if (mMessageParser instanceof TimestampedMessageParser) {
return ((TimestampedMessageParser)mMessageParser).extractTimestampMillis(message);
} else {
return -1;
}
}
/**
*
* JSON hash map extension to store statistics
*
*/
private static class Stat extends JSONObject {
// definition of all the stat keys
public enum STAT_KEYS {
METRIC("metric"),
TAGS("tags"),
VALUE("value"),
TIMESTAMP("timestamp"),
TOPIC("topic"),
PARTITION("partition"),
GROUP("group");
STAT_KEYS(String name) {
this.mName = name;
}
private final String mName;
public String getName() {
return this.mName;
}
}
public static Stat createInstance(String metric, Map<String, String> tags, String value, long timestamp) {
return new Stat(ImmutableMap.of(
STAT_KEYS.METRIC.getName(), metric,
STAT_KEYS.TAGS.getName(), tags,
STAT_KEYS.VALUE.getName(), value,
STAT_KEYS.TIMESTAMP.getName(), timestamp
));
}
public Stat(Map<String, Object> map) {
super(map);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.client;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Predicate;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import org.apache.druid.client.selector.QueryableDruidServer;
import org.apache.druid.client.selector.ServerSelector;
import org.apache.druid.client.selector.TierSelectorStrategy;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.guice.annotations.EscalatedClient;
import org.apache.druid.guice.annotations.Smile;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryToolChestWarehouse;
import org.apache.druid.query.QueryWatcher;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.planning.DataSourceAnalysis;
import org.apache.druid.server.coordination.DruidServerMetadata;
import org.apache.druid.server.coordination.ServerType;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.PartitionChunk;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
*
*/
@ManageLifecycle
public class BrokerServerView implements TimelineServerView
{
private static final Logger log = new Logger(BrokerServerView.class);
private final Object lock = new Object();
private final ConcurrentMap<String, QueryableDruidServer> clients;
private final Map<SegmentId, ServerSelector> selectors;
private final Map<String, VersionedIntervalTimeline<String, ServerSelector>> timelines;
private final ConcurrentMap<TimelineCallback, Executor> timelineCallbacks = new ConcurrentHashMap<>();
private final QueryToolChestWarehouse warehouse;
private final QueryWatcher queryWatcher;
private final ObjectMapper smileMapper;
private final HttpClient httpClient;
private final FilteredServerInventoryView baseView;
private final TierSelectorStrategy tierSelectorStrategy;
private final ServiceEmitter emitter;
private final BrokerSegmentWatcherConfig segmentWatcherConfig;
private final Predicate<Pair<DruidServerMetadata, DataSegment>> segmentFilter;
private final CountDownLatch initialized = new CountDownLatch(1);
@Inject
public BrokerServerView(
final QueryToolChestWarehouse warehouse,
final QueryWatcher queryWatcher,
final @Smile ObjectMapper smileMapper,
final @EscalatedClient HttpClient httpClient,
final FilteredServerInventoryView baseView,
final TierSelectorStrategy tierSelectorStrategy,
final ServiceEmitter emitter,
final BrokerSegmentWatcherConfig segmentWatcherConfig
)
{
this.warehouse = warehouse;
this.queryWatcher = queryWatcher;
this.smileMapper = smileMapper;
this.httpClient = httpClient;
this.baseView = baseView;
this.tierSelectorStrategy = tierSelectorStrategy;
this.emitter = emitter;
this.segmentWatcherConfig = segmentWatcherConfig;
this.clients = new ConcurrentHashMap<>();
this.selectors = new HashMap<>();
this.timelines = new HashMap<>();
this.segmentFilter = (Pair<DruidServerMetadata, DataSegment> metadataAndSegment) -> {
if (segmentWatcherConfig.getWatchedTiers() != null
&& !segmentWatcherConfig.getWatchedTiers().contains(metadataAndSegment.lhs.getTier())) {
return false;
}
if (segmentWatcherConfig.getWatchedDataSources() != null
&& !segmentWatcherConfig.getWatchedDataSources().contains(metadataAndSegment.rhs.getDataSource())) {
return false;
}
return true;
};
ExecutorService exec = Execs.singleThreaded("BrokerServerView-%s");
baseView.registerSegmentCallback(
exec,
new ServerView.SegmentCallback()
{
@Override
public ServerView.CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment)
{
serverAddedSegment(server, segment);
return ServerView.CallbackAction.CONTINUE;
}
@Override
public ServerView.CallbackAction segmentRemoved(final DruidServerMetadata server, DataSegment segment)
{
serverRemovedSegment(server, segment);
return ServerView.CallbackAction.CONTINUE;
}
@Override
public CallbackAction segmentViewInitialized()
{
initialized.countDown();
runTimelineCallbacks(TimelineCallback::timelineInitialized);
return ServerView.CallbackAction.CONTINUE;
}
},
segmentFilter
);
baseView.registerServerRemovedCallback(
exec,
server -> {
removeServer(server);
return CallbackAction.CONTINUE;
}
);
}
@LifecycleStart
public void start() throws InterruptedException
{
if (segmentWatcherConfig.isAwaitInitializationOnStart()) {
final long startNanos = System.nanoTime();
log.debug("%s waiting for initialization.", getClass().getSimpleName());
awaitInitialization();
log.info("%s initialized in [%,d] ms.", getClass().getSimpleName(), (System.nanoTime() - startNanos) / 1000000);
}
}
public boolean isInitialized()
{
return initialized.getCount() == 0;
}
public void awaitInitialization() throws InterruptedException
{
initialized.await();
}
private QueryableDruidServer addServer(DruidServer server)
{
QueryableDruidServer retVal = new QueryableDruidServer<>(server, makeDirectClient(server));
QueryableDruidServer exists = clients.put(server.getName(), retVal);
if (exists != null) {
log.warn("QueryRunner for server[%s] already exists!? Well it's getting replaced", server);
}
return retVal;
}
private DirectDruidClient makeDirectClient(DruidServer server)
{
return new DirectDruidClient(
warehouse,
queryWatcher,
smileMapper,
httpClient,
server.getScheme(),
server.getHost(),
emitter
);
}
private QueryableDruidServer removeServer(DruidServer server)
{
for (DataSegment segment : server.iterateAllSegments()) {
serverRemovedSegment(server.getMetadata(), segment);
}
return clients.remove(server.getName());
}
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment)
{
SegmentId segmentId = segment.getId();
synchronized (lock) {
// in theory we could probably just filter this to ensure we don't put ourselves in here, to make broker tree
// query topologies, but for now just skip all brokers, so we don't create some sort of wild infinite query
// loop...
if (!server.getType().equals(ServerType.BROKER)) {
log.debug("Adding segment[%s] for server[%s]", segment, server);
ServerSelector selector = selectors.get(segmentId);
if (selector == null) {
selector = new ServerSelector(segment, tierSelectorStrategy);
VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource());
if (timeline == null) {
timeline = new VersionedIntervalTimeline<>(Ordering.natural());
timelines.put(segment.getDataSource(), timeline);
}
timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector));
selectors.put(segmentId, selector);
}
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (queryableDruidServer == null) {
queryableDruidServer = addServer(baseView.getInventoryValue(server.getName()));
}
selector.addServerAndUpdateSegment(queryableDruidServer, segment);
}
// run the callbacks, even if the segment came from a broker, lets downstream watchers decide what to do with it
runTimelineCallbacks(callback -> callback.segmentAdded(server, segment));
}
}
private void serverRemovedSegment(DruidServerMetadata server, DataSegment segment)
{
SegmentId segmentId = segment.getId();
final ServerSelector selector;
synchronized (lock) {
log.debug("Removing segment[%s] from server[%s].", segmentId, server);
// we don't store broker segments here, but still run the callbacks for the segment being removed from the server
// since the broker segments are not stored on the timeline, do not fire segmentRemoved event
if (server.getType().equals(ServerType.BROKER)) {
runTimelineCallbacks(callback -> callback.serverSegmentRemoved(server, segment));
return;
}
selector = selectors.get(segmentId);
if (selector == null) {
log.warn("Told to remove non-existant segment[%s]", segmentId);
return;
}
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (!selector.removeServer(queryableDruidServer)) {
log.warn(
"Asked to disassociate non-existant association between server[%s] and segment[%s]",
server,
segmentId
);
} else {
runTimelineCallbacks(callback -> callback.serverSegmentRemoved(server, segment));
}
if (selector.isEmpty()) {
VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource());
selectors.remove(segmentId);
final PartitionChunk<ServerSelector> removedPartition = timeline.remove(
segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector)
);
if (removedPartition == null) {
log.warn(
"Asked to remove timeline entry[interval: %s, version: %s] that doesn't exist",
segment.getInterval(),
segment.getVersion()
);
} else {
runTimelineCallbacks(callback -> callback.segmentRemoved(segment));
}
}
}
}
@Override
public Optional<VersionedIntervalTimeline<String, ServerSelector>> getTimeline(final DataSourceAnalysis analysis)
{
final TableDataSource table =
analysis.getBaseTableDataSource()
.orElseThrow(() -> new ISE("Cannot handle datasource: %s", analysis.getDataSource()));
synchronized (lock) {
return Optional.ofNullable(timelines.get(table.getName()));
}
}
@Override
public void registerTimelineCallback(final Executor exec, final TimelineCallback callback)
{
timelineCallbacks.put(callback, exec);
}
@Override
public <T> QueryRunner<T> getQueryRunner(DruidServer server)
{
synchronized (lock) {
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (queryableDruidServer == null) {
log.error("No QueryableDruidServer found for %s", server.getName());
return null;
}
return queryableDruidServer.getQueryRunner();
}
}
@Override
public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback)
{
baseView.registerServerRemovedCallback(exec, callback);
}
@Override
public void registerSegmentCallback(Executor exec, SegmentCallback callback)
{
baseView.registerSegmentCallback(exec, callback, segmentFilter);
}
private void runTimelineCallbacks(final Function<TimelineCallback, CallbackAction> function)
{
for (Map.Entry<TimelineCallback, Executor> entry : timelineCallbacks.entrySet()) {
entry.getValue().execute(
() -> {
if (CallbackAction.UNREGISTER == function.apply(entry.getKey())) {
timelineCallbacks.remove(entry.getKey());
}
}
);
}
}
@Override
public List<ImmutableDruidServer> getDruidServers()
{
return clients.values().stream()
.map(queryableDruidServer -> queryableDruidServer.getServer().toImmutableDruidServer())
.collect(Collectors.toList());
}
}
| |
/*
* Copyright (c) 2010-2015 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.05.20 at 05:41:15 PM CEST
//
package com.evolveum.prism.xml.ns._public.types_3;
import com.evolveum.midpoint.prism.Raw;
import com.evolveum.midpoint.util.JAXBUtil;
import com.evolveum.midpoint.util.MiscUtil;
import org.w3c.dom.Element;
import javax.activation.MimeType;
import javax.activation.MimeTypeParseException;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.*;
import javax.xml.datatype.Duration;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
/**
*
* Describe a change to a single attribute.
* In this case the path expression used in the "property"
* attribute must select exactly one property.
*
* TODO: this should be renamed to ItemDeltaType
*
*
* <p>Java class for ItemDeltaType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ItemDeltaType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="modificationType" type="{http://prism.evolveum.com/xml/ns/public/types-3}ModificationTypeType"/>
* <element name="path" type="{http://prism.evolveum.com/xml/ns/public/types-3}XPathType" minOccurs="0"/>
* <element name="value">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any processContents='lax' maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ItemDeltaType", propOrder = {
"modificationType",
"path",
"value",
"estimatedOldValue"
})
public class ItemDeltaType implements Serializable, Cloneable {
public static final QName COMPLEX_TYPE = new QName("http://prism.evolveum.com/xml/ns/public/types-3", "ItemDeltaType");
public static final QName F_PATH = new QName("http://prism.evolveum.com/xml/ns/public/types-3", "path");
public static final QName F_VALUE = new QName("http://prism.evolveum.com/xml/ns/public/types-3", "value");
@XmlElement(required = true)
protected ModificationTypeType modificationType;
// @XmlAnyElement
protected ItemPathType path;
@XmlElement(required = true)
@Raw
protected List<Object> value; // Object is here to show as xsd:anyType in WSDL
@XmlElement(required = true)
@Raw
protected List<Object> estimatedOldValue; // Object is here to show as xsd:anyType in WSDL
/**
* Gets the value of the modificationType property.
*
* @return
* possible object is
* {@link ModificationTypeType }
*
*/
public ModificationTypeType getModificationType() {
return modificationType;
}
/**
* Sets the value of the modificationType property.
*
* @param value
* allowed object is
* {@link ModificationTypeType }
*
*/
public void setModificationType(ModificationTypeType value) {
this.modificationType = value;
}
/**
* Gets the value of the path property.
*
* @return
* possible object is
* {@link Element }
*
*/
public ItemPathType getPath() {
return path;
}
/**
* Sets the value of the path property.
*
* @param value
* allowed object is
* {@link Element }
*
*/
public void setPath(ItemPathType value) {
this.path = value;
}
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link ItemDeltaType.Value }
*
*/
public List<RawType> getValue() {
if (value == null){
value = new ArrayList<>();
}
return (List<RawType>) (List) value; // brutal hack
}
// public List<Object> getAnyValues(){
// List<Object> vals = new ArrayList<Object>();
// for (Object raw : value){
// vals.addAll(((RawType) raw).getContent());
// }
// return vals;
// }
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link ItemDeltaType.Value }
*
*/
// public void setValue(List<RawType> value) {
// this.value = value;
// }
public List<RawType> getEstimatedOldValue() {
if (estimatedOldValue == null){
estimatedOldValue = new ArrayList<>();
}
return (List<RawType>) (List) estimatedOldValue; // brutal hack
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any processContents='lax' maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"any"
})
public static class Value implements Serializable, Cloneable {
@XmlAnyElement(lax = true)
protected List<Object> any;
/**
* Gets the value of the any property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the any property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAny().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Element }
* {@link Object }
*
*
*/
public List<Object> getAny() {
if (any == null) {
any = new ArrayList<Object>();
}
return this.any;
}
@Override
public Value clone() {
Value value = new Value();
copyContent(Value.this.any, value.getAny());
return value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((any == null) ? 0 : any.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Value other = (Value) obj;
if (any == null) {
if (other.any != null)
return false;
} else if (!JAXBUtil.compareElementList(any, other.any, false))
return false;
return true;
}
@Override
public String toString() {
return "Value(any=" + any + ")";
}
}
@Override
public ItemDeltaType clone() {
ItemDeltaType clone = new ItemDeltaType();
clone.setModificationType(getModificationType());
clone.setPath(getPath()); //TODO clone path
// If not to clone inside iteration, then in clone object would be an origin raw objects and manipulations with
// clones wail take effect on origin objects. For example parsing while taking data from raw objects.
// In our case it have bad side effect - equals doesn't work.
for (RawType rawType : getValue()) {
clone.getValue().add(rawType.clone());
}
// delta.setValue(value != null ? value.clone() : null);
clone.getEstimatedOldValue().addAll(getEstimatedOldValue());
return clone;
}
/**
* Copies all values of property {@code Content} deeply.
*
* @param source
* The source to copy from.
* @param target
* The target to copy {@code source} to.
* @throws NullPointerException
* if {@code target} is {@code null}.
*/
@SuppressWarnings("unchecked")
private static void copyContent(final List<Object> source, final List<Object> target) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
if ((source!= null)&&(!source.isEmpty())) {
for (final Iterator<?> it = source.iterator(); it.hasNext(); ) {
final Object next = it.next();
if (next instanceof JAXBElement) {
// Referenced elements without classes.
if (((JAXBElement) next).getValue() instanceof String) {
// CElementInfo: javax.xml.bind.JAXBElement<java.lang.String>
target.add(copyOfStringElement(((JAXBElement) next)));
continue;
}
}
if (next instanceof String) {
// CBuiltinLeafInfo: java.lang.String
target.add(((String) next));
continue;
}
if (next instanceof Object) {
// CBuiltinLeafInfo: java.lang.Object
target.add(copyOf(((Object) next)));
continue;
}
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw new AssertionError((("Unexpected instance '"+ next)+"' for property 'Content' of class 'com.evolveum.prism.xml.ns._public.types_3.PolyStringType'."));
}
}
}
@SuppressWarnings("unchecked")
private static Object copyOf(final Object o) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
try {
if (o!= null) {
if (o.getClass().isPrimitive()) {
return o;
}
if (o.getClass().isArray()) {
return copyOfArray(o);
}
// Immutable types.
if (o instanceof Boolean) {
return o;
}
if (o instanceof Byte) {
return o;
}
if (o instanceof Character) {
return o;
}
if (o instanceof Double) {
return o;
}
if (o instanceof Enum) {
return o;
}
if (o instanceof Float) {
return o;
}
if (o instanceof Integer) {
return o;
}
if (o instanceof Long) {
return o;
}
if (o instanceof Short) {
return o;
}
if (o instanceof String) {
return o;
}
if (o instanceof BigDecimal) {
return o;
}
if (o instanceof BigInteger) {
return o;
}
if (o instanceof UUID) {
return o;
}
if (o instanceof QName) {
return o;
}
if (o instanceof Duration) {
return o;
}
if (o instanceof Currency) {
return o;
}
// String based types.
if (o instanceof File) {
return new File(o.toString());
}
if (o instanceof URI) {
return new URI(o.toString());
}
if (o instanceof URL) {
return new URL(o.toString());
}
if (o instanceof MimeType) {
return new MimeType(o.toString());
}
// Cloneable types.
if (o instanceof XMLGregorianCalendar) {
return ((XMLGregorianCalendar) o).clone();
}
if (o instanceof Date) {
return ((Date) o).clone();
}
if (o instanceof Calendar) {
return ((Calendar) o).clone();
}
if (o instanceof TimeZone) {
return ((TimeZone) o).clone();
}
if (o instanceof Locale) {
return ((Locale) o).clone();
}
if (o instanceof Element) {
return ((Element)((Element) o).cloneNode(true));
}
if (o instanceof JAXBElement) {
return copyOf(((JAXBElement) o));
}
try {
return o.getClass().getMethod("clone", ((Class[]) null)).invoke(o, ((Object[]) null));
} catch (NoSuchMethodException e) {
if (o instanceof Serializable) {
return copyOf(((Serializable) o));
}
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (IllegalAccessException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (InvocationTargetException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (SecurityException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (IllegalArgumentException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (ExceptionInInitializerError e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
}
}
return null;
} catch (MalformedURLException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (URISyntaxException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (MimeTypeParseException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
}
}
/**
* Creates and returns a deep copy of a given array.
*
* @param array
* The array to copy or {@code null}.
* @return
* A deep copy of {@code array} or {@code null} if {@code array} is {@code null}.
*/
private static Object copyOfArray(final Object array) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
if (array!= null) {
if (array.getClass() == boolean[].class) {
return copyOf(((boolean[]) array));
}
if (array.getClass() == byte[].class) {
return copyOf(((byte[]) array));
}
if (array.getClass() == char[].class) {
return copyOf(((char[]) array));
}
if (array.getClass() == double[].class) {
return copyOf(((double[]) array));
}
if (array.getClass() == float[].class) {
return copyOf(((float[]) array));
}
if (array.getClass() == int[].class) {
return copyOf(((int[]) array));
}
if (array.getClass() == long[].class) {
return copyOf(((long[]) array));
}
if (array.getClass() == short[].class) {
return copyOf(((short[]) array));
}
final int len = Array.getLength(array);
final Object copy = Array.newInstance(array.getClass().getComponentType(), len);
for (int i = (len- 1); (i >= 0); i--) {
Array.set(copy, i, copyOf(Array.get(array, i)));
}
return copy;
}
return null;
}
/**
* Creates and returns a deep copy of a given {@code javax.xml.bind.JAXBElement<java.lang.String>} instance.
*
* @param e
* The instance to copy or {@code null}.
* @return
* A deep copy of {@code e} or {@code null} if {@code e} is {@code null}.
*/
@SuppressWarnings("unchecked")
private static JAXBElement<String> copyOfStringElement(final JAXBElement<String> e) {
// CC-XJC Version 2.0 Build 2011-09-16T18:27:24+0000
if (e!= null) {
final JAXBElement<String> copy = new JAXBElement<String>(e.getName(), e.getDeclaredType(), e.getScope(), e.getValue());
copy.setNil(e.isNil());
// CBuiltinLeafInfo: java.lang.String
copy.setValue(((String) copy.getValue()));
return copy;
}
return null;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((modificationType == null) ? 0 : modificationType.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ItemDeltaType other = (ItemDeltaType) obj;
if (modificationType != other.modificationType)
return false;
if (path == null) {
if (other.path != null)
return false;
} else if (!path.equals(other.path))
return false;
if (value == null) {
if (other.value != null)
return false;
} else if (!MiscUtil.unorderedCollectionEquals(value, other.value))
return false;
if (estimatedOldValue == null) {
if (other.estimatedOldValue != null)
return false;
} else if (!MiscUtil.unorderedCollectionEquals(estimatedOldValue, other.estimatedOldValue))
return false;
return true;
}
@Override
public String toString() {
return "ItemDeltaType(modificationType=" + modificationType
+ ", path=" + path + ", value=" + value + ", estimatedOldValue=" + estimatedOldValue + ")";
}
}
| |
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.ws.rs;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import org.xdi.oxauth.BaseTest;
import org.xdi.oxauth.client.*;
import org.xdi.oxauth.model.common.AuthenticationMethod;
import org.xdi.oxauth.model.common.GrantType;
import org.xdi.oxauth.model.common.ResponseType;
import org.xdi.oxauth.model.crypto.signature.RSAPublicKey;
import org.xdi.oxauth.model.crypto.signature.SignatureAlgorithm;
import org.xdi.oxauth.model.jws.RSASigner;
import org.xdi.oxauth.model.jwt.Jwt;
import org.xdi.oxauth.model.jwt.JwtClaimName;
import org.xdi.oxauth.model.jwt.JwtHeaderName;
import org.xdi.oxauth.model.register.ApplicationType;
import org.xdi.oxauth.model.util.StringUtils;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import static org.testng.Assert.*;
import static org.xdi.oxauth.model.register.RegisterRequestParam.*;
/**
* Test cases for the authorization code flow (HTTP)
*
* @author Javier Rojas Blum
* @version June 19, 2015
*/
public class AuthorizationCodeFlowHttpTest extends BaseTest {
/**
* Test for the complete Authorization Code Flow.
*/
@Parameters({"userId", "userSecret", "redirectUris", "redirectUri"})
@Test
public void authorizationCodeFlow(final String userId, final String userSecret, final String redirectUris,
final String redirectUri) throws Exception {
showTitle("authorizationCodeFlow");
List<ResponseType> responseTypes = Arrays.asList(
ResponseType.CODE,
ResponseType.ID_TOKEN);
List<String> scopes = Arrays.asList("openid", "profile", "address", "email", "user_name");
// 1. Register client
RegisterResponse registerResponse = registerClient(redirectUris, responseTypes, scopes);
String clientId = registerResponse.getClientId();
String clientSecret = registerResponse.getClientSecret();
// 2. Request authorization and receive the authorization code.
String nonce = UUID.randomUUID().toString();
AuthorizationResponse authorizationResponse = requestAuthorization(userId, userSecret, redirectUri, responseTypes, scopes, clientId, nonce);
String scope = authorizationResponse.getScope();
String authorizationCode = authorizationResponse.getCode();
String idToken = authorizationResponse.getIdToken();
// 3. Request access token using the authorization code.
TokenRequest tokenRequest = new TokenRequest(GrantType.AUTHORIZATION_CODE);
tokenRequest.setCode(authorizationCode);
tokenRequest.setRedirectUri(redirectUri);
tokenRequest.setAuthUsername(clientId);
tokenRequest.setAuthPassword(clientSecret);
tokenRequest.setAuthenticationMethod(AuthenticationMethod.CLIENT_SECRET_BASIC);
TokenClient tokenClient1 = new TokenClient(tokenEndpoint);
tokenClient1.setRequest(tokenRequest);
TokenResponse tokenResponse1 = tokenClient1.exec();
showClient(tokenClient1);
assertEquals(tokenResponse1.getStatus(), 200, "Unexpected response code: " + tokenResponse1.getStatus());
assertNotNull(tokenResponse1.getEntity(), "The entity is null");
assertNotNull(tokenResponse1.getAccessToken(), "The access token is null");
assertNotNull(tokenResponse1.getExpiresIn(), "The expires in value is null");
assertNotNull(tokenResponse1.getTokenType(), "The token type is null");
assertNotNull(tokenResponse1.getRefreshToken(), "The refresh token is null");
String refreshToken = tokenResponse1.getRefreshToken();
// 4. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.CODE_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString("oxValidationURI"));
assertNotNull(jwt.getClaims().getClaimAsString("oxOpenIDConnectVersion"));
assertNotNull(jwt.getClaims().getClaimAsString("user_name"));
assertNull(jwt.getClaims().getClaimAsString("org_name"));
assertNull(jwt.getClaims().getClaimAsString("work_phone"));
RSAPublicKey publicKey = JwkClient.getRSAPublicKey(
jwksUri,
jwt.getHeader().getClaimAsString(JwtHeaderName.KEY_ID));
RSASigner rsaSigner = new RSASigner(SignatureAlgorithm.RS256, publicKey);
assertTrue(rsaSigner.validate(jwt));
// 5. Request new access token using the refresh token.
TokenClient tokenClient2 = new TokenClient(tokenEndpoint);
TokenResponse tokenResponse2 = tokenClient2.execRefreshToken(scope, refreshToken, clientId, clientSecret);
showClient(tokenClient2);
assertEquals(tokenResponse2.getStatus(), 200, "Unexpected response code: " + tokenResponse2.getStatus());
assertNotNull(tokenResponse2.getEntity(), "The entity is null");
assertNotNull(tokenResponse2.getAccessToken(), "The access token is null");
assertNotNull(tokenResponse2.getTokenType(), "The token type is null");
assertNotNull(tokenResponse2.getRefreshToken(), "The refresh token is null");
assertNotNull(tokenResponse2.getScope(), "The scope is null");
String accessToken = tokenResponse2.getAccessToken();
// 6. Request user info
UserInfoClient userInfoClient = new UserInfoClient(userInfoEndpoint);
UserInfoResponse response2 = userInfoClient.execUserInfo(accessToken);
showClient(userInfoClient);
assertEquals(response2.getStatus(), 200, "Unexpected response code: " + response2.getStatus());
assertNotNull(response2.getClaim(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(response2.getClaim(JwtClaimName.NAME));
assertNotNull(response2.getClaim("user_name"));
}
@Parameters({"userId", "userSecret", "redirectUris", "redirectUri"})
@Test
public void authorizationCodeWithNotAllowedScopeFlow(final String userId, final String userSecret, final String redirectUris,
final String redirectUri) throws Exception {
showTitle("authorizationCodeWithNotAllowedScopeFlow");
List<ResponseType> responseTypes = Arrays.asList(
ResponseType.CODE,
ResponseType.ID_TOKEN);
List<String> scopes = Arrays.asList("openid", "profile", "address", "email", "user_name");
// 1. Register client
RegisterResponse registerResponse = registerClient(redirectUris, responseTypes, scopes);
String clientId = registerResponse.getClientId();
// 2. Request authorization and receive the authorization code.
List<String> authorizationScopes = Arrays.asList("openid", "profile", "address", "email", "user_name", "mobile_phone");
String nonce = UUID.randomUUID().toString();
AuthorizationResponse authorizationResponse = requestAuthorization(userId, userSecret, redirectUri, responseTypes, authorizationScopes, clientId, nonce);
String idToken = authorizationResponse.getIdToken();
// 3. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.CODE_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString("oxValidationURI"));
assertNotNull(jwt.getClaims().getClaimAsString("oxOpenIDConnectVersion"));
assertNotNull(jwt.getClaims().getClaimAsString("user_name"));
assertNull(jwt.getClaims().getClaimAsString("phone_mobile_number"));
}
@Parameters({"userId", "userSecret", "redirectUris", "redirectUri"})
@Test
public void authorizationCodeDynamicScopeFlow(final String userId, final String userSecret, final String redirectUris,
final String redirectUri) throws Exception {
showTitle("authorizationCodeDynamicScopeFlow");
List<ResponseType> responseTypes = Arrays.asList(
ResponseType.CODE,
ResponseType.ID_TOKEN);
List<String> scopes = Arrays.asList("openid", "profile", "address", "email", "user_name", "org_name", "work_phone");
// 1. Register client
RegisterResponse registerResponse = registerClient(redirectUris, responseTypes, scopes);
String clientId = registerResponse.getClientId();
// 2. Request authorization and receive the authorization code.
String nonce = UUID.randomUUID().toString();
AuthorizationResponse authorizationResponse = requestAuthorization(userId, userSecret, redirectUri, responseTypes, scopes, clientId, nonce);
String idToken = authorizationResponse.getIdToken();
// 3. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.CODE_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString("oxValidationURI"));
assertNotNull(jwt.getClaims().getClaimAsString("oxOpenIDConnectVersion"));
assertNotNull(jwt.getClaims().getClaimAsString("user_name"));
assertNotNull(jwt.getClaims().getClaimAsString("org_name"));
assertNotNull(jwt.getClaims().getClaimAsString("work_phone"));
}
@Parameters({"userId", "userSecret", "redirectUris", "redirectUri"})
@Test
public void authorizationCodeFlowWithOptionalNonce(final String userId, final String userSecret,
final String redirectUris,
final String redirectUri) throws Exception {
showTitle("authorizationCodeFlowWithOptionalNonce");
List<ResponseType> responseTypes = Arrays.asList(
ResponseType.CODE,
ResponseType.ID_TOKEN);
// 1. Register client
RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app",
StringUtils.spaceSeparatedToList(redirectUris));
registerRequest.setResponseTypes(responseTypes);
RegisterClient registerClient = new RegisterClient(registrationEndpoint);
registerClient.setRequest(registerRequest);
RegisterResponse registerResponse = registerClient.exec();
showClient(registerClient);
assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity());
assertNotNull(registerResponse.getClientId());
assertNotNull(registerResponse.getClientSecret());
assertNotNull(registerResponse.getRegistrationAccessToken());
assertNotNull(registerResponse.getClientIdIssuedAt());
assertNotNull(registerResponse.getClientSecretExpiresAt());
String clientId = registerResponse.getClientId();
String clientSecret = registerResponse.getClientSecret();
// 2. Request authorization and receive the authorization code.
List<String> scopes = Arrays.asList("openid", "profile", "address", "email");
String state = UUID.randomUUID().toString();
String nonce = UUID.randomUUID().toString();
AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, nonce);
authorizationRequest.setState(state);
AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess(
authorizationEndpoint, authorizationRequest, userId, userSecret);
assertNotNull(authorizationResponse.getLocation(), "The location is null");
assertNotNull(authorizationResponse.getCode(), "The authorization code is null");
assertNotNull(authorizationResponse.getState(), "The state is null");
assertNotNull(authorizationResponse.getScope(), "The scope is null");
String scope = authorizationResponse.getScope();
String authorizationCode = authorizationResponse.getCode();
String idToken = authorizationResponse.getIdToken();
// 3. Request access token using the authorization code.
TokenRequest tokenRequest = new TokenRequest(GrantType.AUTHORIZATION_CODE);
tokenRequest.setCode(authorizationCode);
tokenRequest.setRedirectUri(redirectUri);
tokenRequest.setAuthUsername(clientId);
tokenRequest.setAuthPassword(clientSecret);
tokenRequest.setAuthenticationMethod(AuthenticationMethod.CLIENT_SECRET_BASIC);
TokenClient tokenClient1 = new TokenClient(tokenEndpoint);
tokenClient1.setRequest(tokenRequest);
TokenResponse tokenResponse1 = tokenClient1.exec();
showClient(tokenClient1);
assertEquals(tokenResponse1.getStatus(), 200, "Unexpected response code: " + tokenResponse1.getStatus());
assertNotNull(tokenResponse1.getEntity(), "The entity is null");
assertNotNull(tokenResponse1.getAccessToken(), "The access token is null");
assertNotNull(tokenResponse1.getExpiresIn(), "The expires in value is null");
assertNotNull(tokenResponse1.getTokenType(), "The token type is null");
assertNotNull(tokenResponse1.getRefreshToken(), "The refresh token is null");
String refreshToken = tokenResponse1.getRefreshToken();
// 4. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.NONCE));
assertEquals(jwt.getClaims().getClaimAsString(JwtClaimName.NONCE), nonce);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.CODE_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString("oxValidationURI"));
assertNotNull(jwt.getClaims().getClaimAsString("oxOpenIDConnectVersion"));
RSAPublicKey publicKey = JwkClient.getRSAPublicKey(
jwksUri,
jwt.getHeader().getClaimAsString(JwtHeaderName.KEY_ID));
RSASigner rsaSigner = new RSASigner(SignatureAlgorithm.RS256, publicKey);
assertTrue(rsaSigner.validate(jwt));
// 5. Request new access token using the refresh token.
TokenClient tokenClient2 = new TokenClient(tokenEndpoint);
TokenResponse tokenResponse2 = tokenClient2.execRefreshToken(scope, refreshToken, clientId, clientSecret);
showClient(tokenClient2);
assertEquals(tokenResponse2.getStatus(), 200, "Unexpected response code: " + tokenResponse2.getStatus());
assertNotNull(tokenResponse2.getEntity(), "The entity is null");
assertNotNull(tokenResponse2.getAccessToken(), "The access token is null");
assertNotNull(tokenResponse2.getTokenType(), "The token type is null");
assertNotNull(tokenResponse2.getRefreshToken(), "The refresh token is null");
assertNotNull(tokenResponse2.getScope(), "The scope is null");
}
/**
* When an authorization code is used more than once, all the tokens issued
* for that authorization code must be revoked.
*/
@Parameters({"userId", "userSecret", "redirectUris", "redirectUri"})
@Test
public void revokeTokens(final String userId, final String userSecret, final String redirectUris,
final String redirectUri) throws Exception {
showTitle("revokeTokens");
List<ResponseType> responseTypes = Arrays.asList(
ResponseType.CODE,
ResponseType.ID_TOKEN);
// 1. Register client
RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app",
StringUtils.spaceSeparatedToList(redirectUris));
registerRequest.setResponseTypes(responseTypes);
RegisterClient registerClient = new RegisterClient(registrationEndpoint);
registerClient.setRequest(registerRequest);
RegisterResponse registerResponse = registerClient.exec();
showClient(registerClient);
assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity());
assertNotNull(registerResponse.getClientId());
assertNotNull(registerResponse.getClientSecret());
assertNotNull(registerResponse.getRegistrationAccessToken());
assertNotNull(registerResponse.getClientIdIssuedAt());
assertNotNull(registerResponse.getClientSecretExpiresAt());
String clientId = registerResponse.getClientId();
String clientSecret = registerResponse.getClientSecret();
String registrationAccessToken = registerResponse.getRegistrationAccessToken();
String registrationClientUri = registerResponse.getRegistrationClientUri();
// 2. Client read
RegisterRequest readClientRequest = new RegisterRequest(registrationAccessToken);
RegisterClient readClient = new RegisterClient(registrationClientUri);
readClient.setRequest(readClientRequest);
RegisterResponse readClientResponse = readClient.exec();
showClient(readClient);
assertEquals(readClientResponse.getStatus(), 200, "Unexpected response code: " + readClientResponse.getEntity());
assertNotNull(readClientResponse.getClientId());
assertNotNull(readClientResponse.getClientSecret());
assertNotNull(readClientResponse.getClientIdIssuedAt());
assertNotNull(readClientResponse.getClientSecretExpiresAt());
assertNotNull(readClientResponse.getClaims().get(RESPONSE_TYPES.toString()));
assertNotNull(readClientResponse.getClaims().get(REDIRECT_URIS.toString()));
assertNotNull(readClientResponse.getClaims().get(APPLICATION_TYPE.toString()));
assertNotNull(readClientResponse.getClaims().get(CLIENT_NAME.toString()));
assertNotNull(readClientResponse.getClaims().get(ID_TOKEN_SIGNED_RESPONSE_ALG.toString()));
assertNotNull(readClientResponse.getClaims().get("scopes"));
// 3. Request authorization and receive the authorization code.
List<String> scopes = Arrays.asList("openid", "profile", "address", "email");
String state = UUID.randomUUID().toString();
String nonce = UUID.randomUUID().toString();
AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, nonce);
authorizationRequest.setState(state);
AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess(
authorizationEndpoint, authorizationRequest, userId, userSecret);
assertNotNull(authorizationResponse.getLocation(), "The location is null");
assertNotNull(authorizationResponse.getCode(), "The authorization code is null");
assertNotNull(authorizationResponse.getState(), "The state is null");
assertNotNull(authorizationResponse.getScope(), "The scope is null");
assertNotNull(authorizationResponse.getIdToken(), "The id token is null");
String scope = authorizationResponse.getScope();
String authorizationCode = authorizationResponse.getCode();
String idToken = authorizationResponse.getIdToken();
// 4. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.CODE_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
RSAPublicKey publicKey = JwkClient.getRSAPublicKey(
jwksUri,
jwt.getHeader().getClaimAsString(JwtHeaderName.KEY_ID));
RSASigner rsaSigner = new RSASigner(SignatureAlgorithm.RS256, publicKey);
assertTrue(rsaSigner.validate(jwt));
// 5. Request access token using the authorization code.
TokenClient tokenClient1 = new TokenClient(tokenEndpoint);
TokenResponse response2 = tokenClient1.execAuthorizationCode(authorizationCode, redirectUri,
clientId, clientSecret);
showClient(tokenClient1);
assertEquals(response2.getStatus(), 200, "Unexpected response code: " + response2.getStatus());
assertNotNull(response2.getEntity(), "The entity is null");
assertNotNull(response2.getAccessToken(), "The access token is null");
assertNotNull(response2.getTokenType(), "The token type is null");
assertNotNull(response2.getRefreshToken(), "The refresh token is null");
String accessToken = response2.getAccessToken();
String refreshToken = response2.getRefreshToken();
// 6. Request access token using the same authorization code one more time. This call must fail.
TokenClient tokenClient2 = new TokenClient(tokenEndpoint);
TokenResponse response4 = tokenClient2.execAuthorizationCode(authorizationCode, redirectUri,
clientId, clientSecret);
showClient(tokenClient2);
assertEquals(response4.getStatus(), 400, "Unexpected response code: " + response4.getStatus());
assertNotNull(response4.getEntity(), "The entity is null");
assertNotNull(response4.getErrorType(), "The error type is null");
assertNotNull(response4.getErrorDescription(), "The error description is null");
// 7. Request new access token using the refresh token. This call must fail too.
TokenClient tokenClient3 = new TokenClient(tokenEndpoint);
TokenResponse response5 = tokenClient3.execRefreshToken(scope, refreshToken, clientId, clientSecret);
showClient(tokenClient3);
assertEquals(response5.getStatus(), 401, "Unexpected response code: " + response5.getStatus());
assertNotNull(response5.getEntity(), "The entity is null");
assertNotNull(response5.getErrorType(), "The error type is null");
assertNotNull(response5.getErrorDescription(), "The error description is null");
// 8. Request user info should fail
UserInfoClient userInfoClient = new UserInfoClient(userInfoEndpoint);
UserInfoResponse response7 = userInfoClient.execUserInfo(accessToken);
showClient(userInfoClient);
assertEquals(response7.getStatus(), 400, "Unexpected response code: " + response7.getStatus());
assertNotNull(response7.getErrorType(), "Unexpected result: errorType not found");
assertNotNull(response7.getErrorDescription(), "Unexpected result: errorDescription not found");
}
private AuthorizationResponse requestAuthorization(final String userId, final String userSecret, final String redirectUri,
List<ResponseType> responseTypes, List<String> scopes, String clientId, String nonce) {
String state = UUID.randomUUID().toString();
AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, nonce);
authorizationRequest.setState(state);
AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess(
authorizationEndpoint, authorizationRequest, userId, userSecret);
assertNotNull(authorizationResponse.getLocation(), "The location is null");
assertNotNull(authorizationResponse.getCode(), "The authorization code is null");
assertNotNull(authorizationResponse.getState(), "The state is null");
assertNotNull(authorizationResponse.getScope(), "The scope is null");
return authorizationResponse;
}
private RegisterResponse registerClient(final String redirectUris, List<ResponseType> responseTypes, List<String> scopes) {
RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app",
StringUtils.spaceSeparatedToList(redirectUris));
registerRequest.setResponseTypes(responseTypes);
registerRequest.setScopes(scopes);
RegisterClient registerClient = new RegisterClient(registrationEndpoint);
registerClient.setRequest(registerRequest);
RegisterResponse registerResponse = registerClient.exec();
showClient(registerClient);
assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity());
assertNotNull(registerResponse.getClientId());
assertNotNull(registerResponse.getClientSecret());
assertNotNull(registerResponse.getRegistrationAccessToken());
assertNotNull(registerResponse.getClientIdIssuedAt());
assertNotNull(registerResponse.getClientSecretExpiresAt());
return registerResponse;
}
}
| |
/**
*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2016 Alibaba Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.weex.dom;
import android.os.Message;
import android.text.TextUtils;
import com.alibaba.fastjson.JSONObject;
import com.taobao.weex.WXSDKManager;
import com.taobao.weex.common.WXModule;
import com.taobao.weex.common.WXModuleAnno;
import java.util.ArrayList;
/**
* Module class for dom operation. Methods in this class will run in dom thread by default.
* Actually, methods in this class are wrapper classes, they just wrap method call info, and hand
* the wrapped info to the {@link WXDomHandler} for further process. This class is also singleton
* in the {@link com.taobao.weex.WXSDKInstance}
*/
public final class WXDomModule extends WXModule {
/**
* Create a body for the current {@link com.taobao.weex.WXSDKInstance} according to given
* parameter.
* @param element info about how to create a body
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void createBody(JSONObject element) {
if (element == null) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(element);
msg.what = WXDomHandler.MsgType.WX_DOM_CREATE_BODY;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Update {@link WXDomObject#attr}
* @param ref {@link WXDomObject#ref}
* @param attr the expected attr
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void updateAttrs(String ref, JSONObject attr) {
if (TextUtils.isEmpty(ref) || attr == null || attr.size() < 1) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(attr);
msg.what = WXDomHandler.MsgType.WX_DOM_UPDATE_ATTRS;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Update {@link WXDomObject#style}
* @param ref {@link WXDomObject#ref}
* @param style the expected style
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void updateStyle(String ref, JSONObject style) {
if (TextUtils.isEmpty(ref) || style == null || style.size() < 1) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(style);
msg.what = WXDomHandler.MsgType.WX_DOM_UPDATE_STYLE;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Remove a node for the node tree.
* @param ref {@link WXDomObject#ref} of the node to be removed.
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void removeElement(String ref) {
if (TextUtils.isEmpty(ref)) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
msg.what = WXDomHandler.MsgType.WX_DOM_REMOVE_DOM;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Add a {@link WXDomObject} to the specified parent as its given n-th child.
* @param parentRef {@link WXDomObject#ref} of the parent.
* @param element the node to be added
* @param index the expected index that the new dom in its new parent
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void addElement(String parentRef, JSONObject element, Integer index) {
if (element == null
|| TextUtils.isEmpty(parentRef)) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(parentRef);
task.args.add(element);
task.args.add(index);
msg.what = WXDomHandler.MsgType.WX_DOM_ADD_DOM;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Move the DomElement to the specified parent as its given n-th child.
* @param ref {@link WXDomObject#ref} of the node to be moved.
* @param parentRef {@link WXDomObject#ref} of the parent.
* @param index the expected index that the dom in its new parent
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void moveElement(String ref, String parentRef, Integer index) {
if (TextUtils.isEmpty(ref)
|| TextUtils.isEmpty(parentRef)) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(parentRef);
task.args.add(index);
msg.what = WXDomHandler.MsgType.WX_DOM_MOVE_DOM;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Add eventListener for the specified {@link WXDomObject}
* @param ref {@link WXDomObject#ref} of the node
* @param type the type of the event listener to be added.
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void addEvent(String ref, String type) {
if (TextUtils.isEmpty(ref) || TextUtils.isEmpty(type)) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(type);
msg.what = WXDomHandler.MsgType.WX_DOM_ADD_EVENT;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Remove eventListener for the specified {@link WXDomObject}
* @param ref {@link WXDomObject#ref} of the node
* @param type the type of the event listener to be removed.
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void removeEvent(String ref, String type) {
if (TextUtils.isEmpty(ref) || TextUtils.isEmpty(type)) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(type);
msg.what = WXDomHandler.MsgType.WX_DOM_REMOVE_EVENT;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Notify the {@link WXDomManager} that creation of dom tree is finished.
* This notify is given by JS.
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void createFinish() {
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
msg.what = WXDomHandler.MsgType.WX_DOM_CREATE_FINISH;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Notify the {@link WXDomManager} that refreshing of dom tree is finished.
* This notify is given by JS.
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void refreshFinish() {
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
msg.what = WXDomHandler.MsgType.WX_DOM_REFRESH_FINISH;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
/**
* Scroll the specified {@link WXDomObject} to given offset in given duration
* @param ref {@link WXDomObject#ref} of specified dom object
* @param options scroll option, like {offset:0, duration:300}
*/
@WXModuleAnno(moduleMethod = true, runOnUIThread = false)
public void scrollToElement(String ref, JSONObject options) {
if (TextUtils.isEmpty(ref) || options == null) {
return;
}
Message msg = Message.obtain();
WXDomTask task = new WXDomTask();
task.instanceId = mWXSDKInstance.getInstanceId();
task.args = new ArrayList<>();
task.args.add(ref);
task.args.add(options);
msg.what = WXDomHandler.MsgType.WX_DOM_SCROLLTO;
msg.obj = task;
WXSDKManager.getInstance().getWXDomManager().sendMessage(msg);
}
}
| |
/*
* This file is part of Unusuals, licensed under the MIT license (MIT).
*
* Copyright (c) 2014-2018 Max Roncace <me@caseif.net>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* Updater for Bukkit.
*
* This class provides the means to safely and easily update a plugin, or check to see if it is updated using dev.bukkit.org
*/
package net.caseif.unusuals;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.plugin.Plugin;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Enumeration;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Check dev.bukkit.org to find updates for a given plugin, and download the updates if needed.
* <p/>
* <b>VERY, VERY IMPORTANT</b>: Because there are no standards for adding auto-update toggles in your plugin's config, this system provides NO CHECK WITH YOUR CONFIG to make sure the user has allowed auto-updating.
* <br>
* It is a <b>BUKKIT POLICY</b> that you include a boolean value in your config that prevents the auto-updater from running <b>AT ALL</b>.
* <br>
* If you fail to include this option in your config, your plugin will be <b>REJECTED</b> when you attempt to submit it to dev.bukkit.org.
* <p/>
* An example of a good configuration option would be something similar to 'auto-update: true' - if this value is set to false you may NOT run the auto-updater.
* <br>
* If you are unsure about these rules, please read the plugin submission guidelines: http://goo.gl/8iU5l
*
* @author Gravity
* @version 2.0
*/
class Updater {
private Plugin plugin;
private UpdateType type;
private String versionName;
private String versionLink;
private String versionType;
private String versionGameVersion;
private boolean announce; // Whether to announce file downloads
private URL url; // Connecting to RSS
private File file; // The plugin's file
private Thread thread; // Updater thread
private int id = -1; // Project's Curse ID
private String apiKey = null; // BukkitDev ServerMods API key
private static final String TITLE_VALUE = "name"; // Gets remote file's title
private static final String LINK_VALUE = "downloadUrl"; // Gets remote file's download link
private static final String TYPE_VALUE = "releaseType"; // Gets remote file's release type
private static final String VERSION_VALUE = "gameVersion"; // Gets remote file's build version
private static final String QUERY = "/servermods/files?projectIds="; // Path to GET
private static final String HOST = "https://api.curseforge.com"; // Slugs will be appended to this to get to the project's RSS feed
private static final String[] NO_UPDATE_TAG = {"-DEV", "-SNAPSHOT"}; // If the version number contains one of these, don't update.
private static final int BYTE_SIZE = 1024; // Used for downloading files
private YamlConfiguration config; // Config file
private String updateFolder;// The folder that downloads will be placed in
private Updater.UpdateResult result = Updater.UpdateResult.SUCCESS; // Used for determining the outcome of the update process
/**
* Gives the dev the result of the update process. Can be obtained by called getResult().
*/
public enum UpdateResult {
/**
* The updater found an update, and has readied it to be loaded the next time the server restarts/reloads.
*/
SUCCESS,
/**
* The updater did not find an update, and nothing was downloaded.
*/
NO_UPDATE,
/**
* The server administrator has disabled the updating system
*/
DISABLED,
/**
* The updater found an update, but was unable to download it.
*/
FAIL_DOWNLOAD,
/**
* For some reason, the updater was unable to contact dev.bukkit.org to download the file.
*/
FAIL_DBO,
/**
* When running the version check, the file on DBO did not contain the a version in the format 'vVersion' such as 'v1.0'.
*/
FAIL_NOVERSION,
/**
* The id provided by the plugin running the updater was invalid and doesn't exist on DBO.
*/
FAIL_BADID,
/**
* The server administrator has improperly configured their API key in the configuration
*/
FAIL_APIKEY,
/**
* The updater found an update, but because of the UpdateType being set to NO_DOWNLOAD, it wasn't downloaded.
*/
UPDATE_AVAILABLE
}
/**
* Allows the dev to specify the type of update that will be run.
*/
public enum UpdateType {
/**
* Run a version check, and then if the file is out of date, download the newest version.
*/
DEFAULT,
/**
* Don't run a version check, just find the latest update and download it.
*/
NO_VERSION_CHECK,
/**
* Get information about the version and the download size, but don't actually download anything.
*/
NO_DOWNLOAD
}
/**
* Initialize the updater
*
* @param plugin The plugin that is checking for an update.
* @param id The dev.bukkit.org id of the project
* @param file The file that the plugin is running from, get this by doing this.getFile() from within your main class.
* @param type Specify the type of update this will be. See {@link UpdateType}
* @param announce True if the program should announce the progress of new updates in console
*/
public Updater(Plugin plugin, int id, File file, UpdateType type, boolean announce) {
this.plugin = plugin;
this.type = type;
this.announce = announce;
this.file = file;
this.id = id;
this.updateFolder = plugin.getServer().getUpdateFolder();
final File pluginFile = plugin.getDataFolder().getParentFile();
final File updaterFile = new File(pluginFile, "Updater");
final File updaterConfigFile = new File(updaterFile, "config.yml");
if (!updaterFile.exists()) {
updaterFile.mkdir();
}
if (!updaterConfigFile.exists()) {
try {
updaterConfigFile.createNewFile();
} catch (final IOException e) {
plugin.getLogger().severe("The updater could not create a configuration in " + updaterFile.getAbsolutePath());
e.printStackTrace();
}
}
this.config = YamlConfiguration.loadConfiguration(updaterConfigFile);
this.config.options().header("This configuration file affects all plugins using the Updater system (version 2+ - http://forums.bukkit.org/threads/96681/ )" + '\n'
+ "If you wish to use your API key, read http://wiki.bukkit.org/ServerMods_API and place it below." + '\n'
+ "Some updating systems will not adhere to the disabled value, but these may be turned off in their plugin's configuration.");
this.config.addDefault("api-key", "PUT_API_KEY_HERE");
this.config.addDefault("disable", false);
if (this.config.get("api-key", null) == null) {
this.config.options().copyDefaults(true);
try {
this.config.save(updaterConfigFile);
} catch (final IOException e) {
plugin.getLogger().severe("The updater could not save the configuration in " + updaterFile.getAbsolutePath());
e.printStackTrace();
}
}
if (this.config.getBoolean("disable")) {
this.result = UpdateResult.DISABLED;
return;
}
String key = this.config.getString("api-key");
if (key.equalsIgnoreCase("PUT_API_KEY_HERE") || key.equals("")) {
key = null;
}
this.apiKey = key;
try {
this.url = new URL(Updater.HOST + Updater.QUERY + id);
} catch (final MalformedURLException e) {
plugin.getLogger().severe("The project ID provided for updating, " + id + " is invalid.");
this.result = UpdateResult.FAIL_BADID;
e.printStackTrace();
}
this.thread = new Thread(new UpdateRunnable());
this.thread.start();
}
/**
* Get the result of the update process.
*/
public Updater.UpdateResult getResult() {
this.waitForThread();
return this.result;
}
/**
* Get the latest version's release type (release, beta, or alpha).
*/
public String getLatestType() {
this.waitForThread();
return this.versionType;
}
/**
* Get the latest version's game version.
*/
public String getLatestGameVersion() {
this.waitForThread();
return this.versionGameVersion;
}
/**
* Get the latest version's name.
*/
public String getLatestName() {
this.waitForThread();
return this.versionName;
}
/**
* Get the latest version's file link.
*/
public String getLatestFileLink() {
this.waitForThread();
return this.versionLink;
}
/**
* As the result of Updater output depends on the thread's completion, it is necessary to wait for the thread to finish
* before allowing anyone to check the result.
*/
private void waitForThread() {
if ((this.thread != null) && this.thread.isAlive()) {
try {
this.thread.join();
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* Save an update from dev.bukkit.org into the server's update folder.
*/
private void saveFile(File folder, String file, String u) {
if (!folder.exists()) {
folder.mkdir();
}
BufferedInputStream in = null;
FileOutputStream fout = null;
try {
// Download the file
final URL url = new URL(u);
final int fileLength = url.openConnection().getContentLength();
in = new BufferedInputStream(url.openStream());
fout = new FileOutputStream(folder.getAbsolutePath() + "/" + file);
final byte[] data = new byte[Updater.BYTE_SIZE];
int count;
if (this.announce) {
this.plugin.getLogger().info("About to download a new update: " + this.versionName);
}
long downloaded = 0;
while ((count = in.read(data, 0, Updater.BYTE_SIZE)) != -1) {
downloaded += count;
fout.write(data, 0, count);
final int percent = (int) ((downloaded * 100) / fileLength);
if (this.announce && ((percent % 10) == 0)) {
this.plugin.getLogger().info("Downloading update: " + percent + "% of " + fileLength + " bytes.");
}
}
//Just a quick check to make sure we didn't leave any files from last time...
for (final File xFile : new File(this.plugin.getDataFolder().getParent(), this.updateFolder).listFiles()) {
if (xFile.getName().endsWith(".zip")) {
xFile.delete();
}
}
// Check to see if it's a zip file, if it is, unzip it.
final File dFile = new File(folder.getAbsolutePath() + "/" + file);
if (dFile.getName().endsWith(".zip")) {
// Unzip
this.unzip(dFile.getCanonicalPath());
}
if (this.announce) {
this.plugin.getLogger().info("Finished updating.");
}
} catch (final Exception ex) {
this.plugin.getLogger().warning("The auto-updater tried to download a new update, but was unsuccessful.");
this.result = Updater.UpdateResult.FAIL_DOWNLOAD;
} finally {
try {
if (in != null) {
in.close();
}
if (fout != null) {
fout.close();
}
} catch (final Exception ex) {
}
}
}
/**
* Part of Zip-File-Extractor, modified by Gravity for use with Bukkit
*/
private void unzip(String file) {
try {
final File fSourceZip = new File(file);
final String zipPath = file.substring(0, file.length() - 4);
ZipFile zipFile = new ZipFile(fSourceZip);
Enumeration<? extends ZipEntry> e = zipFile.entries();
while (e.hasMoreElements()) {
ZipEntry entry = e.nextElement();
File destinationFilePath = new File(zipPath, entry.getName());
destinationFilePath.getParentFile().mkdirs();
if (entry.isDirectory()) {
continue;
} else {
final BufferedInputStream bis = new BufferedInputStream(zipFile.getInputStream(entry));
int b;
final byte buffer[] = new byte[Updater.BYTE_SIZE];
final FileOutputStream fos = new FileOutputStream(destinationFilePath);
final BufferedOutputStream bos = new BufferedOutputStream(fos, Updater.BYTE_SIZE);
while ((b = bis.read(buffer, 0, Updater.BYTE_SIZE)) != -1) {
bos.write(buffer, 0, b);
}
bos.flush();
bos.close();
bis.close();
final String name = destinationFilePath.getName();
if (name.endsWith(".jar") && this.pluginFile(name)) {
destinationFilePath.renameTo(new File(this.plugin.getDataFolder().getParent(), this.updateFolder + "/" + name));
}
}
entry = null;
destinationFilePath = null;
}
e = null;
zipFile.close();
zipFile = null;
// Move any plugin data folders that were included to the right place, Bukkit won't do this for us.
for (final File dFile : new File(zipPath).listFiles()) {
if (dFile.isDirectory()) {
if (this.pluginFile(dFile.getName())) {
final File oFile = new File(this.plugin.getDataFolder().getParent(), dFile.getName()); // Get current dir
final File[] contents = oFile.listFiles(); // List of existing files in the current dir
for (final File cFile : dFile.listFiles()) // Loop through all the files in the new dir
{
boolean found = false;
for (final File xFile : contents) // Loop through contents to see if it exists
{
if (xFile.getName().equals(cFile.getName())) {
found = true;
break;
}
}
if (!found) {
// Move the new file into the current dir
cFile.renameTo(new File(oFile.getCanonicalFile() + "/" + cFile.getName()));
} else {
// This file already exists, so we don't need it anymore.
cFile.delete();
}
}
}
}
dFile.delete();
}
new File(zipPath).delete();
fSourceZip.delete();
} catch (final IOException ex) {
this.plugin.getLogger().warning("The auto-updater tried to unzip a new update file, but was unsuccessful.");
this.result = Updater.UpdateResult.FAIL_DOWNLOAD;
ex.printStackTrace();
}
new File(file).delete();
}
/**
* Check if the name of a jar is one of the plugins currently installed, used for extracting the correct files out of a zip.
*/
private boolean pluginFile(String name) {
for (final File file : new File("plugins").listFiles()) {
if (file.getName().equals(name)) {
return true;
}
}
return false;
}
/**
* Check to see if the program should continue by evaluation whether the plugin is already updated, or shouldn't be updated
*/
private boolean versionCheck(String title) {
if (this.type != UpdateType.NO_VERSION_CHECK) {
final String version = this.plugin.getDescription().getVersion();
if (title.split(" v").length == 2) {
final String remoteVersion = title.split(" v")[1].split(" ")[0]; // Get the newest file's version number
if (this.hasTag(version) || version.equalsIgnoreCase(remoteVersion)) {
// We already have the latest version, or this build is tagged for no-update
this.result = Updater.UpdateResult.NO_UPDATE;
return false;
}
} else {
// The file's name did not contain the string 'vVersion'
final String authorInfo = this.plugin.getDescription().getAuthors().size() == 0 ? "" : " (" + this.plugin.getDescription().getAuthors().get(0) + ")";
this.plugin.getLogger().warning("The author of this plugin" + authorInfo + " has misconfigured their Auto Update system");
this.plugin.getLogger().warning("File versions should follow the format 'PluginName vVERSION'");
this.plugin.getLogger().warning("Please notify the author of this error.");
this.result = Updater.UpdateResult.FAIL_NOVERSION;
return false;
}
}
return true;
}
/**
* Evaluate whether the version number is marked showing that it should not be updated by this program
*/
private boolean hasTag(String version) {
for (final String string : Updater.NO_UPDATE_TAG) {
if (version.toLowerCase().contains(string.toLowerCase())) {
return true;
}
}
return false;
}
private boolean read() {
try {
final URLConnection conn = this.url.openConnection();
conn.setConnectTimeout(5000);
if (this.apiKey != null) {
conn.addRequestProperty("X-API-Key", this.apiKey);
}
conn.addRequestProperty("User-Agent", "Updater (by Gravity)");
conn.setDoOutput(true);
final BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
final String response = reader.readLine();
final JSONArray array = (JSONArray) JSONValue.parse(response);
if (array.size() == 0) {
this.plugin.getLogger().warning("The updater could not find any files for the project id " + this.id);
this.result = UpdateResult.FAIL_BADID;
return false;
}
this.versionName = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.TITLE_VALUE);
this.versionLink = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.LINK_VALUE);
this.versionType = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.TYPE_VALUE);
this.versionGameVersion = (String) ((JSONObject) array.get(array.size() - 1)).get(Updater.VERSION_VALUE);
return true;
} catch (final IOException e) {
if (e.getMessage().contains("HTTP response code: 403")) {
this.plugin.getLogger().warning("dev.bukkit.org rejected the API key provided in plugins/Updater/config.yml");
this.plugin.getLogger().warning("Please double-check your configuration to ensure it is correct.");
this.result = UpdateResult.FAIL_APIKEY;
} else {
this.plugin.getLogger().warning("The updater could not contact dev.bukkit.org for updating.");
this.plugin.getLogger().warning("If you have not recently modified your configuration and this is the first time you are seeing this message, the site may be experiencing temporary downtime.");
this.result = UpdateResult.FAIL_DBO;
}
e.printStackTrace();
return false;
}
}
private class UpdateRunnable implements Runnable {
@Override
public void run() {
if (Updater.this.url != null) {
// Obtain the results of the project's file feed
if (Updater.this.read()) {
if (Updater.this.versionCheck(Updater.this.versionName)) {
if ((Updater.this.versionLink != null) && (Updater.this.type != UpdateType.NO_DOWNLOAD)) {
String name = Updater.this.file.getName();
// If it's a zip file, it shouldn't be downloaded as the plugin's name
if (Updater.this.versionLink.endsWith(".zip")) {
final String[] split = Updater.this.versionLink.split("/");
name = split[split.length - 1];
}
Updater.this.saveFile(new File(Updater.this.plugin.getDataFolder().getParent(), Updater.this.updateFolder), name, Updater.this.versionLink);
} else {
Updater.this.result = UpdateResult.UPDATE_AVAILABLE;
}
}
}
}
}
}
}
| |
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.dse.driver.internal.core.protocol;
import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory;
import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec;
import com.datastax.oss.driver.shaded.guava.common.base.Charsets;
import com.datastax.oss.protocol.internal.PrimitiveCodec;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.zip.CRC32;
import org.apache.tinkerpop.gremlin.structure.io.Buffer;
/**
* Minimal implementation of {@link PrimitiveCodec} for Tinkerpop {@link Buffer} instances.
*
* <p>This approach represents a temporary design compromise. PrimitiveCodec is primarily used for
* handling data directly from Netty, a task satisfied by {@link ByteBufPrimitiveCodec}. But
* PrimitiveCodec is also used to implement graph serialization for some of the "dynamic" types
* (notably UDTs and tuples). Since we're converting graph serialization to use the new Tinkerpop
* Buffer API we need just enough of a PrimitiveCodec impl to satisfy the needs of graph
* serialization... and nothing more.
*
* <p>A more explicit approach would be to change graph serialization to use a different interface,
* some kind of subset of PrimitiveCodec.... and then make PrimitiveCodec extend this interface.
* This is left as future work for now since it involves changes to the native-protocol lib(s).
*/
public class TinkerpopBufferPrimitiveCodec implements PrimitiveCodec<Buffer> {
private final DseNettyBufferFactory factory;
public TinkerpopBufferPrimitiveCodec(DseNettyBufferFactory factory) {
this.factory = factory;
}
@Override
public Buffer allocate(int size) {
// Note: we use io() here to match up to what ByteBufPrimitiveCodec does, but be warned that
// ByteBufs created in this way don't support the array() method used elsewhere in this codec
// (readString() specifically). As such usage of this method to create Buffer instances is
// discouraged; we have a factory for that.
return this.factory.io(size, size);
}
@Override
public void release(Buffer toRelease) {
toRelease.release();
}
@Override
public int sizeOf(Buffer toMeasure) {
return toMeasure.readableBytes();
}
// TODO
@Override
public Buffer concat(Buffer left, Buffer right) {
boolean leftReadable = left.readableBytes() > 0;
boolean rightReadable = right.readableBytes() > 0;
if (!(leftReadable || rightReadable)) {
return factory.heap();
}
if (!leftReadable) {
return right;
}
if (!rightReadable) {
return left;
}
Buffer rv = factory.composite(left, right);
// c.readerIndex() is 0, which is the first readable byte in left
rv.writerIndex(
left.writerIndex() - left.readerIndex() + right.writerIndex() - right.readerIndex());
return rv;
}
@Override
public void markReaderIndex(Buffer source) {
throw new UnsupportedOperationException();
}
@Override
public void resetReaderIndex(Buffer source) {
throw new UnsupportedOperationException();
}
@Override
public byte readByte(Buffer source) {
return source.readByte();
}
@Override
public int readInt(Buffer source) {
return source.readInt();
}
@Override
public int readInt(Buffer source, int offset) {
throw new UnsupportedOperationException();
}
@Override
public InetAddress readInetAddr(Buffer source) {
int length = readByte(source) & 0xFF;
byte[] bytes = new byte[length];
source.readBytes(bytes);
return newInetAddress(bytes);
}
@Override
public long readLong(Buffer source) {
return source.readLong();
}
@Override
public int readUnsignedShort(Buffer source) {
return source.readShort() & 0xFFFF;
}
@Override
public ByteBuffer readBytes(Buffer source) {
int length = readInt(source);
if (length < 0) return null;
return source.nioBuffer(source.readerIndex(), length);
}
@Override
public byte[] readShortBytes(Buffer source) {
try {
int length = readUnsignedShort(source);
byte[] bytes = new byte[length];
source.readBytes(bytes);
return bytes;
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException(
"Not enough bytes to read a byte array preceded by its 2 bytes length");
}
}
// Copy of PrimitiveCodec<ByteBuf> impl
@Override
public String readString(Buffer source) {
int length = readUnsignedShort(source);
return readString(source, length);
}
@Override
public String readLongString(Buffer source) {
int length = readInt(source);
return readString(source, length);
}
@Override
public Buffer readRetainedSlice(Buffer source, int sliceLength) {
throw new UnsupportedOperationException();
}
@Override
public void updateCrc(Buffer source, CRC32 crc) {
throw new UnsupportedOperationException();
}
@Override
public void writeByte(byte b, Buffer dest) {
dest.writeByte(b);
}
@Override
public void writeInt(int i, Buffer dest) {
dest.writeInt(i);
}
@Override
public void writeInetAddr(InetAddress address, Buffer dest) {
byte[] bytes = address.getAddress();
writeByte((byte) bytes.length, dest);
dest.writeBytes(bytes);
}
@Override
public void writeLong(long l, Buffer dest) {
dest.writeLong(l);
}
@Override
public void writeUnsignedShort(int i, Buffer dest) {
dest.writeShort(i);
}
// Copy of PrimitiveCodec<ByteBuf> impl
@Override
public void writeString(String s, Buffer dest) {
byte[] bytes = s.getBytes(Charsets.UTF_8);
writeUnsignedShort(bytes.length, dest);
dest.writeBytes(bytes);
}
@Override
public void writeLongString(String s, Buffer dest) {
byte[] bytes = s.getBytes(Charsets.UTF_8);
writeInt(bytes.length, dest);
dest.writeBytes(bytes);
}
@Override
public void writeBytes(ByteBuffer bytes, Buffer dest) {
if (bytes == null) {
writeInt(-1, dest);
} else {
writeInt(bytes.remaining(), dest);
dest.writeBytes(bytes.duplicate());
}
}
@Override
public void writeBytes(byte[] bytes, Buffer dest) {
if (bytes == null) {
writeInt(-1, dest);
} else {
writeInt(bytes.length, dest);
dest.writeBytes(bytes);
}
}
@Override
public void writeShortBytes(byte[] bytes, Buffer dest) {
writeUnsignedShort(bytes.length, dest);
dest.writeBytes(bytes);
}
// Based on PrimitiveCodec<ByteBuf> impl, although that method leverages some
// Netty built-ins which we have to do manually here
private static String readString(Buffer buff, int length) {
try {
// Basically what io.netty.buffer.ByteBufUtil.decodeString() does minus some extra
// ByteBuf-specific ops
int offset;
byte[] bytes;
ByteBuffer byteBuff = buff.nioBuffer();
if (byteBuff.hasArray()) {
bytes = byteBuff.array();
offset = byteBuff.arrayOffset();
} else {
bytes = new byte[length];
byteBuff.get(bytes, 0, length);
offset = 0;
}
String str = new String(bytes, offset, length, Charsets.UTF_8);
// Ops against the NIO buffers don't impact the read/write indexes for he Buffer
// itself so we have to do that manually
buff.readerIndex(buff.readerIndex() + length);
return str;
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException(
"Not enough bytes to read an UTF-8 serialized string of size " + length, e);
}
}
// TODO: Code below copied directly from ByteBufPrimitiveCodec, probably want to consolidate this
// somewhere
private static InetAddress newInetAddress(byte[] bytes) {
try {
return InetAddress.getByAddress(bytes);
} catch (UnknownHostException e) {
// Per the Javadoc, the only way this can happen is if the length is illegal
throw new IllegalArgumentException(
String.format("Invalid address length: %d (%s)", bytes.length, Arrays.toString(bytes)));
}
}
}
| |
/*
* Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.sun.nio.zipfs;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Map;
import static com.sun.nio.zipfs.ZipConstants.*;
import static com.sun.nio.zipfs.ZipUtils.*;
/**
* Print all loc and cen headers of the ZIP file
*
* @author Xueming Shen
*/
public class ZipInfo {
public static void main(String[] args) throws Throwable {
if (args.length < 1) {
print("Usage: java ZipInfo zfname");
} else {
Map<String, ?> env = Collections.emptyMap();
ZipFileSystem zfs = (ZipFileSystem)(new ZipFileSystemProvider()
.newFileSystem(Paths.get(args[0]), env));
byte[] cen = zfs.cen;
if (cen == null) {
print("zip file is empty%n");
return;
}
int pos = 0;
byte[] buf = new byte[1024];
int no = 1;
while (pos + CENHDR < cen.length) {
print("----------------#%d--------------------%n", no++);
printCEN(cen, pos);
// use size CENHDR as the extra bytes to read, just in case the
// loc.extra is bigger than the cen.extra, try to avoid to read
// twice
long len = LOCHDR + CENNAM(cen, pos) + CENEXT(cen, pos) + CENHDR;
if (zfs.readFullyAt(buf, 0, len, locoff(cen, pos)) != len)
zfs.zerror("read loc header failed");
if (LOCEXT(buf) > CENEXT(cen, pos) + CENHDR) {
// have to read the second time;
len = LOCHDR + LOCNAM(buf) + LOCEXT(buf);
if (zfs.readFullyAt(buf, 0, len, locoff(cen, pos)) != len)
zfs.zerror("read loc header failed");
}
printLOC(buf);
pos += CENHDR + CENNAM(cen, pos) + CENEXT(cen, pos) + CENCOM(cen, pos);
}
zfs.close();
}
}
static void print(String fmt, Object... objs) {
System.out.printf(fmt, objs);
}
static void printLOC(byte[] loc) {
print("%n");
print("[Local File Header]%n");
print(" Signature : %#010x%n", LOCSIG(loc));
if (LOCSIG(loc) != LOCSIG) {
print(" Wrong signature!");
return;
}
print(" Version : %#6x [%d.%d]%n",
LOCVER(loc), LOCVER(loc) / 10, LOCVER(loc) % 10);
print(" Flag : %#6x%n", LOCFLG(loc));
print(" Method : %#6x%n", LOCHOW(loc));
print(" LastMTime : %#10x [%tc]%n",
LOCTIM(loc), dosToJavaTime(LOCTIM(loc)));
print(" CRC : %#10x%n", LOCCRC(loc));
print(" CSize : %#10x%n", LOCSIZ(loc));
print(" Size : %#10x%n", LOCLEN(loc));
print(" NameLength : %#6x [%s]%n",
LOCNAM(loc), new String(loc, LOCHDR, LOCNAM(loc)));
print(" ExtraLength : %#6x%n", LOCEXT(loc));
if (LOCEXT(loc) != 0)
printExtra(loc, LOCHDR + LOCNAM(loc), LOCEXT(loc));
}
static void printCEN(byte[] cen, int off) {
print("[Central Directory Header]%n");
print(" Signature : %#010x%n", CENSIG(cen, off));
if (CENSIG(cen, off) != CENSIG) {
print(" Wrong signature!");
return;
}
print(" VerMadeby : %#6x [%d, %d.%d]%n",
CENVEM(cen, off), (CENVEM(cen, off) >> 8),
(CENVEM(cen, off) & 0xff) / 10,
(CENVEM(cen, off) & 0xff) % 10);
print(" VerExtract : %#6x [%d.%d]%n",
CENVER(cen, off), CENVER(cen, off) / 10, CENVER(cen, off) % 10);
print(" Flag : %#6x%n", CENFLG(cen, off));
print(" Method : %#6x%n", CENHOW(cen, off));
print(" LastMTime : %#10x [%tc]%n",
CENTIM(cen, off), dosToJavaTime(CENTIM(cen, off)));
print(" CRC : %#10x%n", CENCRC(cen, off));
print(" CSize : %#10x%n", CENSIZ(cen, off));
print(" Size : %#10x%n", CENLEN(cen, off));
print(" NameLen : %#6x [%s]%n",
CENNAM(cen, off), new String(cen, off + CENHDR, CENNAM(cen, off)));
print(" ExtraLen : %#6x%n", CENEXT(cen, off));
if (CENEXT(cen, off) != 0)
printExtra(cen, off + CENHDR + CENNAM(cen, off), CENEXT(cen, off));
print(" CommentLen : %#6x%n", CENCOM(cen, off));
print(" DiskStart : %#6x%n", CENDSK(cen, off));
print(" Attrs : %#6x%n", CENATT(cen, off));
print(" AttrsEx : %#10x%n", CENATX(cen, off));
print(" LocOff : %#10x%n", CENOFF(cen, off));
}
static long locoff(byte[] cen, int pos) {
long locoff = CENOFF(cen, pos);
if (locoff == ZIP64_MINVAL) { //ZIP64
int off = pos + CENHDR + CENNAM(cen, pos);
int end = off + CENEXT(cen, pos);
while (off + 4 < end) {
int tag = SH(cen, off);
int sz = SH(cen, off + 2);
if (tag != EXTID_ZIP64) {
off += 4 + sz;
continue;
}
off += 4;
if (CENLEN(cen, pos) == ZIP64_MINVAL)
off += 8;
if (CENSIZ(cen, pos) == ZIP64_MINVAL)
off += 8;
return LL(cen, off);
}
// should never be here
}
return locoff;
}
static void printExtra(byte[] extra, int off, int len) {
int end = off + len;
while (off + 4 <= end) {
int tag = SH(extra, off);
int sz = SH(extra, off + 2);
print(" [tag=0x%04x, sz=%d, data= ", tag, sz);
if (off + sz > end) {
print(" Error: Invalid extra data, beyond extra length");
break;
}
off += 4;
for (int i = 0; i < sz; i++)
print("%02x ", extra[off + i]);
print("]%n");
switch (tag) {
case EXTID_ZIP64 :
print(" ->ZIP64: ");
int pos = off;
while (pos + 8 <= off + sz) {
print(" *0x%x ", LL(extra, pos));
pos += 8;
}
print("%n");
break;
case EXTID_NTFS:
print(" ->PKWare NTFS%n");
// 4 bytes reserved
if (SH(extra, off + 4) != 0x0001 || SH(extra, off + 6) != 24)
print(" Error: Invalid NTFS sub-tag or subsz");
print(" mtime:%tc%n",
winToJavaTime(LL(extra, off + 8)));
print(" atime:%tc%n",
winToJavaTime(LL(extra, off + 16)));
print(" ctime:%tc%n",
winToJavaTime(LL(extra, off + 24)));
break;
case EXTID_EXTT:
print(" ->Inof-ZIP Extended Timestamp: flag=%x%n",extra[off]);
pos = off + 1 ;
while (pos + 4 <= off + sz) {
print(" *%tc%n",
unixToJavaTime(LG(extra, pos)));
pos += 4;
}
break;
default:
}
off += sz;
}
}
}
| |
/**
*
* Copyright (c) Microsoft and contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
package com.microsoft.azure.management.sql.models;
import com.microsoft.windowsazure.core.LazyArrayList;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Represents the properties of an Azure SQL Database.
*/
public class DatabaseProperties {
private String collation;
/**
* Optional. Gets the collation of the Azure SQL Database.
* @return The Collation value.
*/
public String getCollation() {
return this.collation;
}
/**
* Optional. Gets the collation of the Azure SQL Database.
* @param collationValue The Collation value.
*/
public void setCollation(final String collationValue) {
this.collation = collationValue;
}
private Calendar creationDate;
/**
* Optional. Gets the creation date of the Azure SQL Database.
* @return The CreationDate value.
*/
public Calendar getCreationDate() {
return this.creationDate;
}
/**
* Optional. Gets the creation date of the Azure SQL Database.
* @param creationDateValue The CreationDate value.
*/
public void setCreationDate(final Calendar creationDateValue) {
this.creationDate = creationDateValue;
}
private String currentServiceObjectiveId;
/**
* Optional. Gets the current Service Level Objective Id of the Azure SQL
* Database. This is the Id of the Service Level Objective that is
* currently active.
* @return The CurrentServiceObjectiveId value.
*/
public String getCurrentServiceObjectiveId() {
return this.currentServiceObjectiveId;
}
/**
* Optional. Gets the current Service Level Objective Id of the Azure SQL
* Database. This is the Id of the Service Level Objective that is
* currently active.
* @param currentServiceObjectiveIdValue The CurrentServiceObjectiveId value.
*/
public void setCurrentServiceObjectiveId(final String currentServiceObjectiveIdValue) {
this.currentServiceObjectiveId = currentServiceObjectiveIdValue;
}
private String databaseId;
/**
* Optional. Gets the Id of the Azure SQL Database.
* @return The DatabaseId value.
*/
public String getDatabaseId() {
return this.databaseId;
}
/**
* Optional. Gets the Id of the Azure SQL Database.
* @param databaseIdValue The DatabaseId value.
*/
public void setDatabaseId(final String databaseIdValue) {
this.databaseId = databaseIdValue;
}
private Calendar earliestRestoreDate;
/**
* Optional. Gets the recovery period starte date of the Azure SQL Database.
* This records the start date and time when recovery is available for this
* Azure SQL Database.
* @return The EarliestRestoreDate value.
*/
public Calendar getEarliestRestoreDate() {
return this.earliestRestoreDate;
}
/**
* Optional. Gets the recovery period starte date of the Azure SQL Database.
* This records the start date and time when recovery is available for this
* Azure SQL Database.
* @param earliestRestoreDateValue The EarliestRestoreDate value.
*/
public void setEarliestRestoreDate(final Calendar earliestRestoreDateValue) {
this.earliestRestoreDate = earliestRestoreDateValue;
}
private String edition;
/**
* Optional. Gets the edition of the Azure SQL Database. The
* DatabaseEditions enumeration contains all the valid editions.
* @return The Edition value.
*/
public String getEdition() {
return this.edition;
}
/**
* Optional. Gets the edition of the Azure SQL Database. The
* DatabaseEditions enumeration contains all the valid editions.
* @param editionValue The Edition value.
*/
public void setEdition(final String editionValue) {
this.edition = editionValue;
}
private String elasticPoolName;
/**
* Optional. Gets the name of the Azure SQL Database Elastic Pool the
* database is in.
* @return The ElasticPoolName value.
*/
public String getElasticPoolName() {
return this.elasticPoolName;
}
/**
* Optional. Gets the name of the Azure SQL Database Elastic Pool the
* database is in.
* @param elasticPoolNameValue The ElasticPoolName value.
*/
public void setElasticPoolName(final String elasticPoolNameValue) {
this.elasticPoolName = elasticPoolNameValue;
}
private long maxSizeBytes;
/**
* Optional. Gets the max size of the Azure SQL Database expressed in bytes.
* @return The MaxSizeBytes value.
*/
public long getMaxSizeBytes() {
return this.maxSizeBytes;
}
/**
* Optional. Gets the max size of the Azure SQL Database expressed in bytes.
* @param maxSizeBytesValue The MaxSizeBytes value.
*/
public void setMaxSizeBytes(final long maxSizeBytesValue) {
this.maxSizeBytes = maxSizeBytesValue;
}
private String requestedServiceObjectiveId;
/**
* Optional. Gets the configured Service Level Objective Id of the Azure SQL
* Database. This is the Service Level Objective that is being applied to
* the Azure SQL Database.
* @return The RequestedServiceObjectiveId value.
*/
public String getRequestedServiceObjectiveId() {
return this.requestedServiceObjectiveId;
}
/**
* Optional. Gets the configured Service Level Objective Id of the Azure SQL
* Database. This is the Service Level Objective that is being applied to
* the Azure SQL Database.
* @param requestedServiceObjectiveIdValue The RequestedServiceObjectiveId
* value.
*/
public void setRequestedServiceObjectiveId(final String requestedServiceObjectiveIdValue) {
this.requestedServiceObjectiveId = requestedServiceObjectiveIdValue;
}
private String requestedServiceObjectiveName;
/**
* Optional. Gets the name of the configured Service Level Objective of the
* Azure SQL Database. This is the Service Level Objective that is being
* applied to the Azure SQL Database.
* @return The RequestedServiceObjectiveName value.
*/
public String getRequestedServiceObjectiveName() {
return this.requestedServiceObjectiveName;
}
/**
* Optional. Gets the name of the configured Service Level Objective of the
* Azure SQL Database. This is the Service Level Objective that is being
* applied to the Azure SQL Database.
* @param requestedServiceObjectiveNameValue The
* RequestedServiceObjectiveName value.
*/
public void setRequestedServiceObjectiveName(final String requestedServiceObjectiveNameValue) {
this.requestedServiceObjectiveName = requestedServiceObjectiveNameValue;
}
private ArrayList<Schema> schemas;
/**
* Optional. Gets the schemas from this database.
* @return The Schemas value.
*/
public ArrayList<Schema> getSchemas() {
return this.schemas;
}
/**
* Optional. Gets the schemas from this database.
* @param schemasValue The Schemas value.
*/
public void setSchemas(final ArrayList<Schema> schemasValue) {
this.schemas = schemasValue;
}
private String serviceObjective;
/**
* Optional. Gets the Service Level Objective of the Azure SQL Database.
* @return The ServiceObjective value.
*/
public String getServiceObjective() {
return this.serviceObjective;
}
/**
* Optional. Gets the Service Level Objective of the Azure SQL Database.
* @param serviceObjectiveValue The ServiceObjective value.
*/
public void setServiceObjective(final String serviceObjectiveValue) {
this.serviceObjective = serviceObjectiveValue;
}
private ArrayList<ServiceTierAdvisor> serviceTierAdvisors;
/**
* Optional. Gets the list of service tier advisors for this database.
* Expanded property
* @return The ServiceTierAdvisors value.
*/
public ArrayList<ServiceTierAdvisor> getServiceTierAdvisors() {
return this.serviceTierAdvisors;
}
/**
* Optional. Gets the list of service tier advisors for this database.
* Expanded property
* @param serviceTierAdvisorsValue The ServiceTierAdvisors value.
*/
public void setServiceTierAdvisors(final ArrayList<ServiceTierAdvisor> serviceTierAdvisorsValue) {
this.serviceTierAdvisors = serviceTierAdvisorsValue;
}
private String status;
/**
* Optional. Gets the status of the Azure SQL Database.
* @return The Status value.
*/
public String getStatus() {
return this.status;
}
/**
* Optional. Gets the status of the Azure SQL Database.
* @param statusValue The Status value.
*/
public void setStatus(final String statusValue) {
this.status = statusValue;
}
private UpgradeHint upgradeHint;
/**
* Optional. Gets the upgrade hint for this database.
* @return The UpgradeHint value.
*/
public UpgradeHint getUpgradeHint() {
return this.upgradeHint;
}
/**
* Optional. Gets the upgrade hint for this database.
* @param upgradeHintValue The UpgradeHint value.
*/
public void setUpgradeHint(final UpgradeHint upgradeHintValue) {
this.upgradeHint = upgradeHintValue;
}
/**
* Initializes a new instance of the DatabaseProperties class.
*
*/
public DatabaseProperties() {
this.setSchemas(new LazyArrayList<Schema>());
this.setServiceTierAdvisors(new LazyArrayList<ServiceTierAdvisor>());
}
}
| |
package com.github.q115.goalie_android.ui.main;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.AppBarLayout;
import android.support.design.widget.TabLayout;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import com.github.q115.goalie_android.R;
import com.github.q115.goalie_android.services.MessagingServiceUtil;
import com.github.q115.goalie_android.ui.DelayedProgressDialog;
import com.github.q115.goalie_android.ui.friends.FriendsActivity;
import com.github.q115.goalie_android.ui.login.LoginActivity;
import com.github.q115.goalie_android.ui.main.feeds.FeedsPresenter;
import com.github.q115.goalie_android.ui.main.my_goals.MyGoalsPresenter;
import com.github.q115.goalie_android.ui.main.requests.RequestsPresenter;
import com.github.q115.goalie_android.ui.profile.ProfileActivity;
import com.github.q115.goalie_android.utils.UserHelper;
/*
* Copyright 2017 Qi Li
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class MainActivity extends AppCompatActivity implements MainActivityView,
MessagingServiceUtil.MessagingServiceListener, ViewPager.OnPageChangeListener {
private MainActivityPresenter mPresenter;
private ViewPager mViewPager;
private MainActivityPagerAdapter mViewPagerAdapter;
private DelayedProgressDialog progressDialog;
public static Intent newIntent(Context context, int tab) {
Intent newIntent = new Intent(context, MainActivity.class);
newIntent.putExtra("tab", tab);
return newIntent;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Create the presenter
mPresenter = new MainActivityPresenter(this);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
// Set up the ViewPager with the sections adapter.
TabLayout tabLayout = findViewById(R.id.tabs);
mViewPager = findViewById(R.id.container);
mViewPagerAdapter = new MainActivityPagerAdapter(this, getSupportFragmentManager());
mViewPager.setAdapter(mViewPagerAdapter);
mViewPager.addOnPageChangeListener(this);
tabLayout.setupWithViewPager(mViewPager);
progressDialog = new DelayedProgressDialog();
if (getIntent() != null)
onNewIntent(getIntent());
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
if (intent != null && intent.hasExtra("tab")) {
final int tab = intent.getIntExtra("tab", 0);
mViewPager.post(new Runnable() {
@Override
public void run() {
mViewPager.setCurrentItem(tab);
}
});
}
}
@Override
protected void onResume() {
super.onResume();
MessagingServiceUtil.setMessagingServiceListener("Main", this);
}
@Override
protected void onStart() {
super.onStart();
mPresenter.syncIfNeeded(this);
}
@Override
protected void onDestroy() {
MessagingServiceUtil.setMessagingServiceListener("Main", null);
super.onDestroy();
}
@Override
public void setPresenter(MainActivityPresenter presenter) {
mPresenter = presenter;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.action_friends:
startActivity(FriendsActivity.newIntent(this, UserHelper.getInstance().getOwnerProfile().username));
return true;
case R.id.action_profile:
startActivity(ProfileActivity.newIntent(this, UserHelper.getInstance().getOwnerProfile().username));
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
// intentionally left blank
}
@Override
public void onPageSelected(int position) {
MyGoalsPresenter myGoalsPresenter = mViewPagerAdapter.getMyGoalsPresenter();
if (myGoalsPresenter != null)
myGoalsPresenter.closeFABMenu();
AppBarLayout appBarLayout = findViewById(R.id.appbar);
appBarLayout.setExpanded(true, true);
}
@Override
public void onPageScrollStateChanged(int state) {
// intentionally left blank
}
@Override
public void onBackPressed() {
MyGoalsPresenter myGoalsPresenter = mViewPagerAdapter.getMyGoalsPresenter();
if (myGoalsPresenter == null || !myGoalsPresenter.isFABOpen()) {
super.onBackPressed();
} else {
myGoalsPresenter.closeFABMenu();
}
}
@Override
public void showLogin() {
startActivity(LoginActivity.newIntent(this));
}
@Override
public void reloadAll() {
MyGoalsPresenter myGoalsPresenter = mViewPagerAdapter.getMyGoalsPresenter();
RequestsPresenter requestsPresenter = mViewPagerAdapter.getRequestsPresenter();
FeedsPresenter feedsPresenter = mViewPagerAdapter.getFeedsPresenter();
if (myGoalsPresenter != null)
myGoalsPresenter.reload();
if (requestsPresenter != null)
requestsPresenter.reload();
if (feedsPresenter != null)
feedsPresenter.reload();
}
@Override
public void onNotification() {
runOnUiThread(new Runnable() {
@Override
public void run() {
reloadAll();
}
});
}
@Override
public void updateProgress(boolean shouldShow) {
if (shouldShow) {
progressDialog.show(getSupportFragmentManager(), "DelayedProgressDialog");
} else {
progressDialog.cancel();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.backup.example;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;
import org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.StoppableImplementation;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Spin up a small cluster and check that the hfiles of region are properly long-term archived as
* specified via the {@link ZKTableArchiveClient}.
*/
@Category(MediumTests.class)
public class TestZooKeeperTableArchiveClient {
private static final Log LOG = LogFactory.getLog(TestZooKeeperTableArchiveClient.class);
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
private static final String STRING_TABLE_NAME = "test";
private static final byte[] TEST_FAM = Bytes.toBytes("fam");
private static final byte[] TABLE_NAME = Bytes.toBytes(STRING_TABLE_NAME);
private static ZKTableArchiveClient archivingClient;
private final List<Path> toCleanup = new ArrayList<Path>();
/**
* Setup the config for the cluster
*/
@BeforeClass
public static void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
UTIL.startMiniZKCluster();
archivingClient = new ZKTableArchiveClient(UTIL.getConfiguration(), UTIL.getHBaseAdmin()
.getConnection());
// make hfile archiving node so we can archive files
ZooKeeperWatcher watcher = UTIL.getZooKeeperWatcher();
String archivingZNode = ZKTableArchiveClient.getArchiveZNode(UTIL.getConfiguration(), watcher);
ZKUtil.createWithParents(watcher, archivingZNode);
}
private static void setupConf(Configuration conf) {
// only compact with 3 files
conf.setInt("hbase.hstore.compaction.min", 3);
}
@After
public void tearDown() throws Exception {
try {
FileSystem fs = UTIL.getTestFileSystem();
// cleanup each of the files/directories registered
for (Path file : toCleanup) {
// remove the table and archive directories
FSUtils.delete(fs, file, true);
}
} catch (IOException e) {
LOG.warn("Failure to delete archive directory", e);
} finally {
toCleanup.clear();
}
// make sure that backups are off for all tables
archivingClient.disableHFileBackup();
}
@AfterClass
public static void cleanupTest() throws Exception {
try {
UTIL.shutdownMiniZKCluster();
} catch (Exception e) {
LOG.warn("problem shutting down cluster", e);
}
}
/**
* Test turning on/off archiving
*/
@Test (timeout=300000)
public void testArchivingEnableDisable() throws Exception {
// 1. turn on hfile backups
LOG.debug("----Starting archiving");
archivingClient.enableHFileBackupAsync(TABLE_NAME);
assertTrue("Archving didn't get turned on", archivingClient
.getArchivingEnabled(TABLE_NAME));
// 2. Turn off archiving and make sure its off
archivingClient.disableHFileBackup();
assertFalse("Archving didn't get turned off.", archivingClient.getArchivingEnabled(TABLE_NAME));
// 3. Check enable/disable on a single table
archivingClient.enableHFileBackupAsync(TABLE_NAME);
assertTrue("Archving didn't get turned on", archivingClient
.getArchivingEnabled(TABLE_NAME));
// 4. Turn off archiving and make sure its off
archivingClient.disableHFileBackup(TABLE_NAME);
assertFalse("Archving didn't get turned off for " + STRING_TABLE_NAME,
archivingClient.getArchivingEnabled(TABLE_NAME));
}
@Test (timeout=300000)
public void testArchivingOnSingleTable() throws Exception {
createArchiveDirectory();
FileSystem fs = UTIL.getTestFileSystem();
Path archiveDir = getArchiveDir();
Path tableDir = getTableDir(STRING_TABLE_NAME);
toCleanup.add(archiveDir);
toCleanup.add(tableDir);
Configuration conf = UTIL.getConfiguration();
// setup the delegate
Stoppable stop = new StoppableImplementation();
HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop);
List<BaseHFileCleanerDelegate> cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner);
final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0);
// create the region
HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAM);
HRegion region = UTIL.createTestRegion(STRING_TABLE_NAME, hcd);
loadFlushAndCompact(region, TEST_FAM);
// get the current hfiles in the archive directory
List<Path> files = getAllFiles(fs, archiveDir);
if (files == null) {
FSUtils.logFileSystemState(fs, UTIL.getDataTestDir(), LOG);
throw new RuntimeException("Didn't archive any files!");
}
CountDownLatch finished = setupCleanerWatching(delegate, cleaners, files.size());
runCleaner(cleaner, finished, stop);
// know the cleaner ran, so now check all the files again to make sure they are still there
List<Path> archivedFiles = getAllFiles(fs, archiveDir);
assertEquals("Archived files changed after running archive cleaner.", files, archivedFiles);
// but we still have the archive directory
assertTrue(fs.exists(HFileArchiveUtil.getArchivePath(UTIL.getConfiguration())));
}
/**
* Test archiving/cleaning across multiple tables, where some are retained, and others aren't
* @throws Exception on failure
*/
@Test (timeout=300000)
public void testMultipleTables() throws Exception {
createArchiveDirectory();
String otherTable = "otherTable";
FileSystem fs = UTIL.getTestFileSystem();
Path archiveDir = getArchiveDir();
Path tableDir = getTableDir(STRING_TABLE_NAME);
Path otherTableDir = getTableDir(otherTable);
// register cleanup for the created directories
toCleanup.add(archiveDir);
toCleanup.add(tableDir);
toCleanup.add(otherTableDir);
Configuration conf = UTIL.getConfiguration();
// setup the delegate
Stoppable stop = new StoppableImplementation();
HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop);
List<BaseHFileCleanerDelegate> cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner);
final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0);
// create the region
HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAM);
HRegion region = UTIL.createTestRegion(STRING_TABLE_NAME, hcd);
loadFlushAndCompact(region, TEST_FAM);
// create the another table that we don't archive
hcd = new HColumnDescriptor(TEST_FAM);
HRegion otherRegion = UTIL.createTestRegion(otherTable, hcd);
loadFlushAndCompact(otherRegion, TEST_FAM);
// get the current hfiles in the archive directory
List<Path> files = getAllFiles(fs, archiveDir);
if (files == null) {
FSUtils.logFileSystemState(fs, archiveDir, LOG);
throw new RuntimeException("Didn't load archive any files!");
}
// make sure we have files from both tables
int initialCountForPrimary = 0;
int initialCountForOtherTable = 0;
for (Path file : files) {
String tableName = file.getParent().getParent().getParent().getName();
// check to which table this file belongs
if (tableName.equals(otherTable)) initialCountForOtherTable++;
else if (tableName.equals(STRING_TABLE_NAME)) initialCountForPrimary++;
}
assertTrue("Didn't archive files for:" + STRING_TABLE_NAME, initialCountForPrimary > 0);
assertTrue("Didn't archive files for:" + otherTable, initialCountForOtherTable > 0);
// run the cleaners, checking for each of the directories + files (both should be deleted and
// need to be checked) in 'otherTable' and the files (which should be retained) in the 'table'
CountDownLatch finished = setupCleanerWatching(delegate, cleaners, files.size() + 3);
// run the cleaner
cleaner.start();
// wait for the cleaner to check all the files
finished.await();
// stop the cleaner
stop.stop("");
// know the cleaner ran, so now check all the files again to make sure they are still there
List<Path> archivedFiles = getAllFiles(fs, archiveDir);
int archivedForPrimary = 0;
for(Path file: archivedFiles) {
String tableName = file.getParent().getParent().getParent().getName();
// ensure we don't have files from the non-archived table
assertFalse("Have a file from the non-archived table: " + file, tableName.equals(otherTable));
if (tableName.equals(STRING_TABLE_NAME)) archivedForPrimary++;
}
assertEquals("Not all archived files for the primary table were retained.", initialCountForPrimary,
archivedForPrimary);
// but we still have the archive directory
assertTrue("Archive directory was deleted via archiver", fs.exists(archiveDir));
}
private void createArchiveDirectory() throws IOException {
//create the archive and test directory
FileSystem fs = UTIL.getTestFileSystem();
Path archiveDir = getArchiveDir();
fs.mkdirs(archiveDir);
}
private Path getArchiveDir() throws IOException {
return new Path(UTIL.getDataTestDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
}
private Path getTableDir(String tableName) throws IOException {
Path testDataDir = UTIL.getDataTestDir();
FSUtils.setRootDir(UTIL.getConfiguration(), testDataDir);
return new Path(testDataDir, tableName);
}
private HFileCleaner setupAndCreateCleaner(Configuration conf, FileSystem fs, Path archiveDir,
Stoppable stop) {
conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
LongTermArchivingHFileCleaner.class.getCanonicalName());
return new HFileCleaner(1000, stop, conf, fs, archiveDir);
}
/**
* Start archiving table for given hfile cleaner
* @param tableName table to archive
* @param cleaner cleaner to check to make sure change propagated
* @return underlying {@link LongTermArchivingHFileCleaner} that is managing archiving
* @throws IOException on failure
* @throws KeeperException on failure
*/
private List<BaseHFileCleanerDelegate> turnOnArchiving(String tableName, HFileCleaner cleaner)
throws IOException, KeeperException {
// turn on hfile retention
LOG.debug("----Starting archiving for table:" + tableName);
archivingClient.enableHFileBackupAsync(Bytes.toBytes(tableName));
assertTrue("Archving didn't get turned on", archivingClient.getArchivingEnabled(tableName));
// wait for the archiver to get the notification
List<BaseHFileCleanerDelegate> cleaners = cleaner.getDelegatesForTesting();
LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0);
while (!delegate.archiveTracker.keepHFiles(STRING_TABLE_NAME)) {
// spin until propagation - should be fast
}
return cleaners;
}
/**
* Spy on the {@link LongTermArchivingHFileCleaner} to ensure we can catch when the cleaner has
* seen all the files
* @return a {@link CountDownLatch} to wait on that releases when the cleaner has been called at
* least the expected number of times.
*/
private CountDownLatch setupCleanerWatching(LongTermArchivingHFileCleaner cleaner,
List<BaseHFileCleanerDelegate> cleaners, final int expected) {
// replace the cleaner with one that we can can check
BaseHFileCleanerDelegate delegateSpy = Mockito.spy(cleaner);
final int[] counter = new int[] { 0 };
final CountDownLatch finished = new CountDownLatch(1);
Mockito.doAnswer(new Answer<Iterable<FileStatus>>() {
@Override
public Iterable<FileStatus> answer(InvocationOnMock invocation) throws Throwable {
counter[0]++;
LOG.debug(counter[0] + "/ " + expected + ") Wrapping call to getDeletableFiles for files: "
+ invocation.getArguments()[0]);
@SuppressWarnings("unchecked")
Iterable<FileStatus> ret = (Iterable<FileStatus>) invocation.callRealMethod();
if (counter[0] >= expected) finished.countDown();
return ret;
}
}).when(delegateSpy).getDeletableFiles(Mockito.anyListOf(FileStatus.class));
cleaners.set(0, delegateSpy);
return finished;
}
/**
* Get all the files (non-directory entries) in the file system under the passed directory
* @param dir directory to investigate
* @return all files under the directory
*/
private List<Path> getAllFiles(FileSystem fs, Path dir) throws IOException {
FileStatus[] files = FSUtils.listStatus(fs, dir, null);
if (files == null) {
LOG.warn("No files under:" + dir);
return null;
}
List<Path> allFiles = new ArrayList<Path>();
for (FileStatus file : files) {
if (file.isDir()) {
List<Path> subFiles = getAllFiles(fs, file.getPath());
if (subFiles != null) allFiles.addAll(subFiles);
continue;
}
allFiles.add(file.getPath());
}
return allFiles;
}
private void loadFlushAndCompact(HRegion region, byte[] family) throws IOException {
// create two hfiles in the region
createHFileInRegion(region, family);
createHFileInRegion(region, family);
Store s = region.getStore(family);
int count = s.getStorefilesCount();
assertTrue("Don't have the expected store files, wanted >= 2 store files, but was:" + count,
count >= 2);
// compact the two files into one file to get files in the archive
LOG.debug("Compacting stores");
region.compactStores(true);
}
/**
* Create a new hfile in the passed region
* @param region region to operate on
* @param columnFamily family for which to add data
* @throws IOException
*/
private void createHFileInRegion(HRegion region, byte[] columnFamily) throws IOException {
// put one row in the region
Put p = new Put(Bytes.toBytes("row"));
p.add(columnFamily, Bytes.toBytes("Qual"), Bytes.toBytes("v1"));
region.put(p);
// flush the region to make a store file
region.flushcache();
}
/**
* @param cleaner
*/
private void runCleaner(HFileCleaner cleaner, CountDownLatch finished, Stoppable stop)
throws InterruptedException {
// run the cleaner
cleaner.start();
// wait for the cleaner to check all the files
finished.await();
// stop the cleaner
stop.stop("");
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.cruise.page;
import com.thoughtworks.cruise.CruiseAgents;
import com.thoughtworks.cruise.Regex;
import com.thoughtworks.cruise.Urls;
import com.thoughtworks.cruise.api.UsingAgentsApi;
import com.thoughtworks.cruise.api.response.AgentInformation;
import com.thoughtworks.cruise.editpipelinewizard.AutoCompleteSuggestions;
import com.thoughtworks.cruise.preconditions.AgentLauncher;
import com.thoughtworks.cruise.state.CurrentPageState;
import com.thoughtworks.cruise.state.CurrentPageState.Page;
import com.thoughtworks.cruise.state.ScenarioState;
import com.thoughtworks.cruise.util.TriStateScope;
import com.thoughtworks.cruise.utils.Assertions;
import com.thoughtworks.cruise.utils.Assertions.Assertion;
import com.thoughtworks.cruise.utils.Assertions.Function;
import com.thoughtworks.cruise.utils.Assertions.Predicate;
import com.thoughtworks.cruise.utils.Timeout;
import net.sf.sahi.client.Browser;
import net.sf.sahi.client.ElementStub;
import org.apache.commons.lang.StringUtils;
import org.hamcrest.Matchers;
import org.hamcrest.core.Is;
import org.hamcrest.core.StringContains;
import org.junit.Assert;
import java.net.InetAddress;
import java.util.*;
import java.util.regex.Pattern;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertThat;
@SuppressWarnings("serial")
public class OnAgentsPage extends CruisePage {
private final class AgentRowFinderPredicate implements Predicate {
private final String status;
private final int idx;
private String id;
private AgentRowFinderPredicate(String status, int idx) {
this.status = status;
this.idx = idx;
}
@Override
public boolean call() throws Exception {
id = agentsByStatus(status).get(idx).fetch("id");
return !StringUtils.isBlank(id);
}
}
private static final String ENVIRONMENT_EDIT_BUTTON = "show_environments_panel";
private static final String RESOURCES_EDIT_BUTTON = "show_resources_panel";
private static final String SCOPE_RESOURCES_PANEL = "resources_panel";
private static final String SCOPE_ENVIRONMENT_PANEL = "environments_panel";
private static final List<String> AGENT_TABLE_HEADERS = Arrays.asList(
"CHECKBOX", "AGENT NAME", "SANDBOX", "OS", "IP ADDRESS", "STATUS",
"FREE SPACE", "RESOURCES", "ENVIRONMENTS");
private final UsingAgentsApi agentsApi;
private final CruiseAgents createdAgents;
private final CurrentPageState currentPageState;
private boolean autoRefresh = false;
private Set<String> notEnabled = new HashSet<String>() {
{
add("pending");
add("disabled");
}
};
private Set<String> notAcceptedOrUnavailable = new HashSet<String>(
notEnabled) {
{
add("missing");
add("lost_contact");
}
};
public OnAgentsPage(ScenarioState state, UsingAgentsApi agentsApi,
CruiseAgents createdAgents, Browser browser,
CurrentPageState currentPageState) {
super(state, browser);
this.agentsApi = agentsApi;
this.createdAgents = createdAgents;
this.currentPageState = currentPageState;
}
@Override
protected String url() {
return Urls.urlFor("/agents?autoRefresh=" + autoRefresh);
}
@com.thoughtworks.gauge.Step("Wait for agent to show status <status>")
public void waitForAgentToShowStatus(String status) throws Exception {
waitForStatusWithTimeout(status, Timeout.TWO_MINUTES);
// Make sure we are REALLY building instead of just assigned (bug #3275)
Thread.sleep(5000);
waitForStatusWithTimeout(status, Timeout.ONE_MINUTE);
}
private ElementStub waitForStatusWithTimeout(final String status,
final Timeout timeout) {
return Assertions.waitFor(timeout, new Function<ElementStub>() {
public ElementStub call() {
reloadPage();
return browser.cell(status);
}
});
}
@com.thoughtworks.gauge.Step("Verify clicking the link <linkText> navigates to the job detail page")
public void verifyClickingTheLinkNavigatesToTheJobDetailPage(
final String linkText) throws Exception {
Assertions.waitUntil(Timeout.FIVE_MINUTES, new Predicate() {
public boolean call() {
reloadPage();
browser.link(linkText).click();
return true;
}
});
Assertions.assertWillHappen(browser.fetch("top.location.href"),
StringContains
.containsString("go/tab/build/detail/basic-pipeline"));
}
public int numberOfEnabledAgents() {
return agentsExcept(notEnabled);
}
public int numberOfAvailableAgents() {
return agentsExcept(notAcceptedOrUnavailable);
}
private int agentsExcept(Set<String> statusExclusionList) {
reloadPage();
int count = 0;
for (int i = 0; i < 100; i++) {
ElementStub element = browser.cell(String.format("status[%d]", i));
if (!element.exists())
break;
if (!statusExclusionList.contains(element.getText())) {
count++;
}
}
return count;
}
@com.thoughtworks.gauge.Step("Sort column <column>")
public void sortColumn(final String column) throws Exception {
elementColumnHeaderLink(column).click();
}
private ElementStub elementColumnHeaderLink(String column) {
return browser.link(0).in(browser.tableHeader(column));
}
@com.thoughtworks.gauge.Step("Verify agents in column <column> have order <order>")
public void verifyAgentsInColumnHaveOrder(String column, String order)
throws Exception {
List<String> uniqueConsequtiveValues = new ArrayList<String>();
for (String value : getColumnValues(column)) {
if (uniqueConsequtiveValues.isEmpty()
|| !uniqueConsequtiveValues.get(
uniqueConsequtiveValues.size() - 1).equals(value)) {
uniqueConsequtiveValues.add(value);
}
}
List<String> expectedOrder = Arrays.asList(order.replaceAll(",\\s*",
",").split(","));
Assert.assertThat(uniqueConsequtiveValues, Is.is(expectedOrder));
}
private List<String> getColumnValues(String column) {
List<String> values = new ArrayList<String>();
ElementStub table = browser.byId("agent_details");
for (int i = 1; i < 100; i++) {
ElementStub element = browser.cell(table, i,
AGENT_TABLE_HEADERS.indexOf(column.toUpperCase()));
if (!element.exists())
break;
values.add(element.getText());
}
return values;
}
// retains query params
@com.thoughtworks.gauge.Step("Reload page")
public void reloadPage() {
browserWrapper.reload();
}
// reloads the agents page with the autoRefresh setting only (needed because
// IE sometimes returns an empty string for driver.getCurrentUrl())
private void reloadUrl() {
reload(url());
}
private void reload(String url) {
if (!autoRefresh) {
browser.navigateTo(url, true);
}
}
@com.thoughtworks.gauge.Step("Verify url contains <s>")
public void verifyUrlContains(String s) throws Exception {
assertThat(browserWrapper.getCurrentUrl(), containsString(s));
}
@com.thoughtworks.gauge.Step("Verify url does not contain <s>")
public void verifyUrlDoesNotContain(String s) throws Exception {
assertThat(browserWrapper.getCurrentUrl(), not(containsString(s)));
}
@com.thoughtworks.gauge.Step("Verify the pipeline is building only on agents in <environmentName>")
public void verifyThePipelineIsBuildingOnlyOnAgentsIn(String environmentName)
throws Exception {
Set<String> agentsUnderEnvironment = scenarioState
.agentsUnderEnvironment(environmentName);
waitForAgentToShowStatus("building");
List<ElementStub> buildingAgents = buildingAgents();
assertThat(buildingAgents.size(), Matchers.greaterThan(0));
for (ElementStub row : buildingAgents) {
Assert.assertTrue(agentsUnderEnvironment.contains(row.fetch("id")));
}
}
@com.thoughtworks.gauge.Step("Verify none of the agents are building")
public void verifyNoneOfTheAgentsAreBuilding() throws Exception {
Assertions.assertOverTime(Timeout.THIRTY_SECONDS,
new Function<Boolean>() {
@Override
public Boolean call() {
reloadPage();
return buildingAgents().size() == 0;
}
});
}
private List<ElementStub> buildingAgents() {
return agentsByStatus("Building");
}
private List<ElementStub> agentsByStatus(String status) {
reloadPage();
List<ElementStub> list = new ArrayList<ElementStub>();
for (int i = 0; i < 100; i++) {
ElementStub row = browser.row(String.format("/agent_instance/[%d]",
i));
if (!row.exists())
break;
if (row.fetch("className").contains(status)) {
list.add(row);
}
}
return list;
}
public List<String> idleAgentUuids() {
List<ElementStub> agentsByStatus = agentsByStatus("Idle");
List<String> uuids = new ArrayList<String>();
for (ElementStub row : agentsByStatus) {
uuids.add(row.fetch("id"));
}
return uuids;
}
public void verifyAllAgentsHaveForResources(String resource)
throws Exception {
for (ElementStub resourceElement : elementsAgentResources()) {
assertThat(resourceElement.getText(), Is.is(resource));
}
}
private List<ElementStub> elementsAgentResources() {
return browserWrapper.collectIn("span", "", browser.cell("resources"));
}
@com.thoughtworks.gauge.Step("Verify the <status> agent has <freeSpace> free space")
public void verifyTheAgentHasFreeSpace(String status, String freeSpace)
throws Exception {
assertThat(elementUsableSpaceFor(status).getText(), Is.is(freeSpace));
}
private ElementStub elementUsableSpaceFor(String status) {
ElementStub usableSpace = browser.cell("usable_space").near(
browser.cell("/" + status + "/"));
return usableSpace;
}
private ElementStub elementAgentCountForStatus(String configStatus) {
reloadUrl();
return browser.listItem(
String.format("/%s/", configStatus.toLowerCase())).in(
browser.list("/agent_counts/"));
}
@com.thoughtworks.gauge.Step("Verify has <count> idle agents")
public void verifyHasIdleAgents(final Integer count) throws Exception {
Assertions.waitUntil(Timeout.ONE_MINUTE, new Assertion<Integer>() {
public Integer actual() {
try {
reloadPage();
return idleAgentUuids().size();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public Integer expected() {
return count;
}
});
}
private void bulkEdit(ElementStub element, String operation) {
selectAgent(element);
browser.submit(operation).click();
}
@com.thoughtworks.gauge.Step("Select agent <element> - On Agents Page")
public void selectAgent(ElementStub element) {
final ElementStub checkbox = agentCheckbox(element);
assertTrue(element.exists());
Assertions.waitUntil(Timeout.TEN_SECONDS, new Predicate() {
public boolean call() throws Exception {
checkbox.check();
return checkbox.checked();
}
});
}
private ElementStub agentCheckbox(ElementStub element) {
return browser.checkbox("/selected/").in(element);
}
@com.thoughtworks.gauge.Step("Enable an agent showing status <status>")
public void enableAnAgentShowingStatus(String status) throws Exception {
bulkEdit(elementAgentRowWithStatus(status), "Enable");
}
@com.thoughtworks.gauge.Step("Disable an agent showing status <status>")
public void disableAnAgentShowingStatus(String status) throws Exception {
bulkEdit(elementAgentRowWithStatus(status), "Disable");
}
@com.thoughtworks.gauge.Step("On Agents Page")
public void goToAgentsPage() throws Exception {
navigateToURL();
}
@com.thoughtworks.gauge.Step("Disable agent <agentIndex>")
public void disableAgent(Integer agentIndex) throws Exception {
int prevDisabled = disabledAgentCount();
AgentLauncher agent = createdAgents.get(agentIndex);
ElementStub element = elementAgentRowWithUuid(agent.getUuid());
bulkEdit(element, "Disable");
Assert.assertThat("incorrect disabled agents count",
disabledAgentCount(), Matchers.is(prevDisabled + 1));
}
@com.thoughtworks.gauge.Step("Delete a disabled agent")
public void deleteADisabledAgent() {
bulkEdit(elementAgentRowWithStatus("disabled"), "Delete");
}
@com.thoughtworks.gauge.Step("Delete an idle agent")
public void deleteAnIdleAgent() {
bulkEdit(elementAgentRowWithStatus("idle"), "Delete");
}
private int disabledAgentCount() {
return countFrom(elementAgentCountForStatus("DISABLED").getText());
}
private int enabledAgentCount() {
return countFrom(elementAgentCountForStatus("ENABLED").getText());
}
private int countFrom(String text) {
String s = text.substring(text.indexOf(": ") + 1).trim();
return Integer.parseInt(s);
}
@com.thoughtworks.gauge.Step("Enable agent <agentIndex>")
public void enableAgent(Integer agentIndex) throws Exception {
int prevEnabled = enabledAgentCount();
AgentLauncher agent = createdAgents.get(agentIndex);
bulkEdit(elementAgentRowWithUuid(agent.getUuid()), "Enable");
Assert.assertThat("incorrect enabled agents count",
enabledAgentCount(), Matchers.is(prevEnabled + 1));
}
@com.thoughtworks.gauge.Step("Add resource <resource> to all agents")
public void addResourceToAllAgents(String resource) throws Exception {
selectAllAgents();
clickResourcesButton();
addResource(resource, false);
assertAllAgentsHaveResource(resource);
}
private void assertAllAgentsHaveResource(String resource) {
List<ElementStub> afterAgentRows = allAgentRows();
for (int i = 0; i < afterAgentRows.size(); i++) {
assertAgentHasResource(i, resource);
}
}
@com.thoughtworks.gauge.Step("Select all agents")
public void selectAllAgents() {
ElementStub selectAllAgents = browser.checkbox("select_all_agents");
selectAllAgents.check();
}
private void assertVisible(final String xpath) {
Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate() {
public boolean call() {
return browser.div(xpath).isVisible();
}
});
}
private ElementStub elementAgentRowWithUuid(String uuid) {
return browser.row(uuid);
}
private ElementStub elementAgentRowWithStatus(String status) {
return browser.cell(status).parentNode("TR");
}
@com.thoughtworks.gauge.Step("Verify agents show operating system")
public void verifyAgentsShowOperatingSystem() throws Exception {
boolean showOS = false;
for(String os : getColumnValues("OS")){
if(os.matches("CentOS (\\d+\\.?)+ Final"))
showOS = true;
}
Assert.assertTrue(showOS);
}
private void enableUsingApi(String id) {
agentsApi.enable(id);
}
private String agentRowIdFor(final String status, final int idx) {
AgentRowFinderPredicate agentRowIdFinder = new AgentRowFinderPredicate(
status, idx);
Assertions.waitUntil(Timeout.FIVE_SECONDS, agentRowIdFinder);
return agentRowIdFinder.id;
}
@com.thoughtworks.gauge.Step("Add resource <resource> to agent <oneBasedIndex>")
public void addResourceToAgent(String resource, Integer oneBasedIndex)
throws Exception {
addResourceToAgent(resource, oneBasedIndex, false);
}
private void addResourceToAgent(String resource, Integer oneBasedIndex,
boolean pressReturn) {
unselectAllAgents();
selectAgent(oneBasedIndex);
browser.waitFor(3000);
clickResourcesButton();
addResource(resource, pressReturn);
assertAgentHasResource(oneBasedIndex - 1, resource);
}
@com.thoughtworks.gauge.Step("Add environment <environmentName> to agents <oneBasedIndexes>")
public void addEnvironmentToAgents(String environmentName,
String oneBasedIndexes) throws Exception {
setEnvironmentTo(environmentName, oneBasedIndexes, "add");
for (String index : oneBasedIndexes.split(",")) {
assertAgentHasEnvironment(Integer.valueOf(index.trim()) - 1,
environmentName);
}
}
@com.thoughtworks.gauge.Step("Remove environment <environmentName> from agents <oneBasedIndexes>")
public void removeEnvironmentFromAgents(String environmentName,
String oneBasedIndexes) throws Exception {
setEnvironmentTo(environmentName, oneBasedIndexes, "remove");
}
private void setEnvironmentTo(String environmentName,
String oneBasedIndexes, String action) throws Exception {
selectAgents(oneBasedIndexes);
setOnEnvironmentPopup(environmentName, action);
}
private void setOnEnvironmentPopup(String environmentName, String action)
throws Exception {
onEnvironmentPopupSet(environmentName + ":" + action);
}
private void unselectAllAgents() {
List<ElementStub> allAgentRows = allAgentRows();
for (ElementStub agentRow : allAgentRows) {
unselectAgent(agentRow);
}
}
private void unselectAgent(ElementStub agentRow) {
ElementStub checkbox = agentCheckbox(agentRow);
if (checkbox.checked()) {
checkbox.uncheck();
}
assertThat("agent row could not be unselected", checkbox.checked(),
Matchers.is(false));
}
private void assertAgentHasEnvironment(final int index,
final String environmentName) {
assertThat(browser.cell("environments").in(getAgentRow(index + 1))
.getText(), containsString(environmentName));
}
private void assertAgentHasResource(final Integer index,
final String resource) {
Assertions.waitUntil(Timeout.ONE_MINUTE, new Predicate() {
@Override
public boolean call() throws Exception {
reloadUrl();
return browser.cell("resources").in(getAgentRow(index + 1))
.getText().contains(resource);
}
});
}
public AgentInformation[] getAllAgentInformationOnUI() {
List<AgentInformation> agentInformationList = new ArrayList<AgentInformation>();
List<ElementStub> allAgentRows = allAgentRows();
for (ElementStub agentRow : allAgentRows) {
String uuid = agentRow.fetch("id");
String agent_name = browser.cell("hostname").in(agentRow).getText();
String ipAddress = browser.cell("ip_address").in(agentRow)
.getText();
String status = browser.cell("status").in(agentRow).getText();
String sandbox = browser.cell("location wrapped_word").in(agentRow).getText();
String os = browser.cell("operating_system").in(agentRow).getText();
String free_space = browser.cell("usable_space").in(agentRow)
.getText();
String resourceList = browser.cell("resources").in(agentRow)
.getText();
List<String> resources = new ArrayList<String>();
if (resourceList != null
&& !resourceList.trim().isEmpty()
&& !resourceList.trim().equalsIgnoreCase(
"no resources specified")) {
resources = Arrays.asList(resourceList.split(" | "));
}
String environmentList = browser.cell("environments").in(agentRow)
.getText();
List<String> environments = new ArrayList<String>();
if (environmentList != null
&& !environmentList.trim().isEmpty()
&& !environmentList.trim().equalsIgnoreCase(
"no environments specified")) {
environments = Arrays.asList(environmentList.split(" | "));
}
agentInformationList.add(new AgentInformation(uuid, null,
agent_name, ipAddress, "", status, sandbox, os, free_space,
resources, environments));
}
return agentInformationList
.toArray(new AgentInformation[agentInformationList.size()]);
}
private List<ElementStub> allAgentRows() {
List<ElementStub> list = new ArrayList<ElementStub>();
for (int i = 0; i < 100; i++) {
ElementStub row = browser.row(String.format("/agent_instance/[%d]",
i));
if (!row.exists())
break;
list.add(row);
}
return list;
}
private void addResource(String resource, boolean pressReturn) {
ElementStub addResourceTextbox = browser.textbox("add_resource");
addResourceTextbox.setValue("");
if (pressReturn) {
addResourceTextbox.setValue(resource);
browser.execute("_sahi._keyPress(" + addResourceTextbox + ", 13)");
} else {
addResourceTextbox.setValue(resource);
applyAgentEdit(SCOPE_RESOURCES_PANEL);
}
}
private void applyAgentEdit(final String scope) {
final ElementStub applyButtonBeforePost = applyButtonForPanel(scope);
applyButtonBeforePost.click();
browser.waitFor(5000);
}
private ElementStub applyButtonForPanel(String scope) {
return browser.submit("/submit/").in(browser.div(scope));
}
private void selectAgent(Integer oneBasedIndex) {
selectAgent(getAgentRow(oneBasedIndex));
}
private ElementStub getAgentRow(Integer oneBasedIndex) {
return browser.row(String.format("/agent_instance/[%s]",
oneBasedIndex - 1));
}
private void clickResourcesButton() {
browser.byId(RESOURCES_EDIT_BUTTON).click();
Assertions.waitUntil(Timeout.THIRTY_SECONDS, new Predicate() {
public boolean call() throws Exception {
assertPanelVisible(SCOPE_RESOURCES_PANEL);
return true;
}
});
}
private void clickEnvironmentButton() {
browser.byId(ENVIRONMENT_EDIT_BUTTON).click();
assertPanelVisible(SCOPE_ENVIRONMENT_PANEL);
}
private void assertPanelVisible(String scope) {
assertVisible(scope);
}
@com.thoughtworks.gauge.Step("Select agents <indexes>")
public void selectAgents(String indexes) throws Exception {
unselectAllAgents();
for (String index : indexes.split(", ")) {
selectAgent(Integer.parseInt(index));
}
}
@com.thoughtworks.gauge.Step("Verify resource popup shows <resourceStateMap>")
public void verifyResourcePopupShows(String resourceStateMap)
throws Exception {
clickResourcesButton();
assertAgentEditPopopShows(SCOPE_RESOURCES_PANEL, resourceStateMap);
closeMicrocontentPanel();
}
private void assertAgentEditPopopShows(String scope, String triStatesMap)
throws Exception {
TriStateScope triStateScope = new TriStateScope(browser, scope);
triStateScope.assertShows(triStatesMap);
}
@com.thoughtworks.gauge.Step("Verify environment popup shows <environmentStateMap>")
public void verifyEnvironmentPopupShows(String environmentStateMap)
throws Exception {
clickEnvironmentButton();
assertAgentEditPopopShows(SCOPE_ENVIRONMENT_PANEL, environmentStateMap);
closeMicrocontentPanel();
}
private void closeMicrocontentPanel() {
browser.div("edit_panel").click();
}
@com.thoughtworks.gauge.Step("On resource popup verify <resourceName> transitions between <states>")
public void onResourcePopupVerifyTransitionsBetween(
final String resourceName, String states) throws Exception {
clickResourcesButton();
assertTriStateTransition(SCOPE_RESOURCES_PANEL, resourceName, states);
}
@com.thoughtworks.gauge.Step("On environment popup verify <environmentName> transitions between <states>")
public void onEnvironmentPopupVerifyTransitionsBetween(
final String environmentName, String states) throws Exception {
clickEnvironmentButton();
assertTriStateTransition(SCOPE_ENVIRONMENT_PANEL, environmentName,
states);
}
private void assertTriStateTransition(String scope,
final String triStateName, String states) throws Exception {
TriStateScope triStateScope = new TriStateScope(browser, scope);
triStateScope.assertTransitions(triStateName, states);
}
@com.thoughtworks.gauge.Step("On resource popup set <resourceStateMap>")
public void onResourcePopupSet(String resourceStateMap) throws Exception {
clickResourcesButton();
setAgentEditTristateState(SCOPE_RESOURCES_PANEL, resourceStateMap);
applyAgentEdit(SCOPE_RESOURCES_PANEL);
}
@com.thoughtworks.gauge.Step("On environment popup set <environmentStateMap>")
public void onEnvironmentPopupSet(String environmentStateMap)
throws Exception {
clickEnvironmentButton();
setAgentEditTristateState(SCOPE_ENVIRONMENT_PANEL, environmentStateMap);
applyAgentEdit(SCOPE_ENVIRONMENT_PANEL);
}
private void setAgentEditTristateState(String scope, String resourceStateMap)
throws Exception {
TriStateScope triStateScope = new TriStateScope(browser, scope);
triStateScope.set(resourceStateMap);
}
@com.thoughtworks.gauge.Step("Verify agent <oneBasedIndex> has resources <resourceFieldValue>")
public void verifyAgentHasResources(Integer oneBasedIndex,
String resourceFieldValue) throws Exception {
assertThat(elementCellWithClass(oneBasedIndex, "resources").getText(),
Matchers.is(resourceFieldValue));
}
@com.thoughtworks.gauge.Step("Verify agent <oneBasedIndex> has environments <environmentFieldValue>")
public void verifyAgentHasEnvironments(Integer oneBasedIndex,
String environmentFieldValue) throws Exception {
assertThat(elementCellWithClass(oneBasedIndex, "environments")
.getText(), Matchers.is(environmentFieldValue));
}
private ElementStub elementCellWithClass(Integer oneBasedIndex,
String cssClass) {
ElementStub agentRow = getAgentRow(oneBasedIndex);
ElementStub resourcesCell = browser.cell(cssClass).in(agentRow);
return resourcesCell;
}
@com.thoughtworks.gauge.Step("Turn on autoRefresh")
public void turnOnAutoRefresh() throws Exception {
autoRefresh = true;
open();
}
private ElementStub agentCheckbox(int oneBasedIndex) {
return browser.checkbox(
String.format("agent_select[%d]", oneBasedIndex)).in(
browser.byId("ajax_agents_table"));
}
public void selectAgent(final int oneBasedIndex) throws Exception {
Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() {
public boolean call() {
ElementStub checkbox = agentCheckbox(oneBasedIndex);
if (!checkbox.checked()) {
checkbox.check();
return true;
}
return false;
}
@Override
public String toString() {
return "Could not select agent " + oneBasedIndex
+ " should be selected";
}
});
}
public void verifyAgentIsSelected(final int index) throws Exception {
Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() {
public boolean call() {
ElementStub checkbox = agentCheckbox(index);
return checkbox.checked();
}
@Override
public String toString() {
return "Agent " + index + " should be selected";
}
});
}
@com.thoughtworks.gauge.Step("Type new resource <resource> for agent <oneBasedIndex> and press return")
public void typeNewResourceForAgentAndPressReturn(String resource,
Integer oneBasedIndex) throws Exception {
addResourceToAgent(resource, oneBasedIndex, true);
}
public void verifyRedirectedToServerDetailPage() throws Exception {
super.verifyRedirectedToServerDetailPage();
}
public void verifyNotRedirected() throws Exception {
super.verifyNotRedirected();
}
public void verifyLinkIsNotPresent(String linkText) throws Exception {
ElementStub link = browser.link(linkText);
if (link.exists())
Assert.fail(String.format(
"Element with text %s% should not be present.", linkText));
}
@com.thoughtworks.gauge.Step("Verify cruise footer - On Agents Page")
@Override
public void verifyCruiseFooter() throws Exception {
super.verifyCruiseFooter();
}
@com.thoughtworks.gauge.Step("Verify <canDo> operate on agents")
public void verifyOperateOnAgents(String canDo) throws Exception {
assertThat(browser.div("edit_panel").exists(),
Is.is(Can.CAN.matches(canDo)));
}
enum Can {
CAN, CANNOT;
boolean matches(String can) {
return name().equalsIgnoreCase(can);
}
}
@com.thoughtworks.gauge.Step("Click on the live agent to go to the details page")
public void clickOnTheLiveAgentToGoToTheDetailsPage() throws Exception {
ElementStub agentRow = elementAgentRowWithStatus("idle");
String localAgentHostName = InetAddress.getLocalHost().getHostName();
ElementStub agentLink = browser.link(localAgentHostName).in(agentRow);
Assert.assertThat(
"Could not navigate to agent details page as no link exists",
agentLink.exists(), Is.is(true));
agentLink.click();
currentPageState.currentPageIs(Page.AGENT_DETAILS);
}
public void filterWith(String filterCriteria) throws Exception {
enterFilterValue(filterCriteria);
performFilter();
}
@com.thoughtworks.gauge.Step("Perform filter")
public void performFilter() {
ElementStub filterButton = browser.submit("/Filter/i").in(
browser.byId("agents_filter_form"));
filterButton.click();
}
@com.thoughtworks.gauge.Step("Enter filter value <filterCriteria>")
public void enterFilterValue(String filterCriteria) {
ElementStub filterTextBox = browser.textbox("filter_text");
filterTextBox.setValue(filterCriteria);
}
@com.thoughtworks.gauge.Step("Verify total agent count is <totalAgents>")
public void verifyTotalAgentCountIs(Integer totalAgents) throws Exception {
ElementStub row = browser.row(Regex.startsWith("agent_instance"));
int count = row.countSimilar();
assertThat(count, Is.is(totalAgents));
}
public void clearFilter() throws Exception {
browser.link("Clear").click();
}
public void verifySuggestionsShowUp(int numberOfSuggestions)
throws Exception {
AutoCompleteSuggestions suggestions = new AutoCompleteSuggestions(
browser, browserWrapper);
assertThat(suggestions.allSuggestion(0).size(),
Is.is(numberOfSuggestions));
}
@com.thoughtworks.gauge.Step("Auto completes should show suggestion <suggestion>")
public void autoCompletesShouldShowSuggestion(String suggestion)
throws Exception {
AutoCompleteSuggestions suggestions = new AutoCompleteSuggestions(
browser, browserWrapper);
suggestions.autoCompletesShouldShowSuggestion(suggestion);
}
@com.thoughtworks.gauge.Step("Auto completes should show suggestions <expectedSuggestions>")
public void autoCompletesShouldShowSuggestions(String expectedSuggestions)
throws Exception {
AutoCompleteSuggestions suggestions = new AutoCompleteSuggestions(
browser, browserWrapper);
String[] expected = expectedSuggestions.split(",");
for (String suggestion : expected) {
suggestions.autoCompletesShouldShowSuggestion(suggestion.trim());
}
assertThat(suggestions.allSuggestion(0).size(), Is.is(expected.length));
}
public void selectOption(int index) throws Exception {
AutoCompleteSuggestions suggestions = new AutoCompleteSuggestions(
browser, browserWrapper);
suggestions.selectFirstOption();
}
public void verifyAgentUpgradeMessageDoesNotExist() throws Exception {
Assertions.waitUntil(Timeout.ONE_MINUTE, new Predicate() {
public boolean call() {
return !agentUpgradeMessageExists();
}
});
}
public void verifyThatNoAgentsNeedToBeUpgraded() throws Exception {
assertThat(browser.image("info_icon").exists(), Is.is(false));
}
public void verifyAgentUpgradeMessageExists() throws Exception {
Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() {
public boolean call() {
reloadPage();
return agentUpgradeMessageExists();
}
});
}
private boolean agentUpgradeMessageExists() {
return browser.div("bootstraper_version_warning").exists();
}
public void verifyThatAgent2_4NeedsToBeUpgraded() throws Exception {
Assertions.waitUntil(Timeout.TWENTY_SECONDS, new Predicate() {
public boolean call() {
reloadPage();
ElementStub cell = browser.cell("hostname").in(
browser.row(Regex.wholeWord("agent_2_4")));
ElementStub spanWithInfoIcon = browser.span(
Regex.wholeWord("bootstrap-old")).in(cell);
return spanWithInfoIcon.exists();
}
});
}
public void verifyThatAgentWithResourceHasStatus(String resource,
String status) throws Exception {
ElementStub row = browser.row(Regex.wholeWord(resource));
ElementStub cell = browser.cell("status").in(row);
assertThat(browser.span(status).in(cell).exists(), Is.is(true));
}
public void waitForAgentsToShowUp() throws Exception {
Thread.sleep(Timeout.TWENTY_SECONDS.inMillis());
}
@com.thoughtworks.gauge.Step("Verify agents <expectedAgents> show up in results")
public void verifyAgentsShowUpInResults(String expectedAgents)
throws Exception {
String[] expected = expectedAgents.split(",");
for (String expectedAgent : expected) {
ElementStub cell = browser.cell("hostname").in(
browser.row(Regex.wholeWord(expectedAgent.trim())));
assertThat(cell.exists(), Is.is(true));
}
int actualNoOfAgents = browser.row(Regex.startsWith("agent_instance"))
.countSimilar();
assertThat(actualNoOfAgents, Is.is(expected.length));
}
@com.thoughtworks.gauge.Step("Verify data in agent listing api is same as data on agents tab")
public void verifyDataInAgentListingApiIsSameAsDataOnAgentsTab()
throws Exception {
// agents listing on UI
List<AgentInformation> agentListOnUI = Arrays.asList(getAllAgentInformationOnUI());
Collections.sort(agentListOnUI, new AgentSorterBasedOnUUID());
// agents listing in API
List<AgentInformation> agentListInAPI = Arrays.asList(agentsApi.listInformationOfAllAgents());
Collections.sort(agentListInAPI, new AgentSorterBasedOnUUID());
assertTrue(agentListOnUI.equals(agentListInAPI));
}
@com.thoughtworks.gauge.Step("Verify delete error message <errorMessage>")
public void verifyDeleteErrorMessage(String errorMessage) throws Exception {
Assert.assertThat(browser.div("message_pane").getText(),
Is.is(errorMessage));
}
class AgentSorterBasedOnUUID implements Comparator<AgentInformation> {
@Override
public int compare(AgentInformation o1, AgentInformation o2) {
return o1.getUuid().compareTo(o2.getUuid());
}
}
public void printSummary() {
List<AgentInformation> agentListOnUI = Arrays.asList(getAllAgentInformationOnUI());
System.out.println(String.format("%20s %20s %20s %20s %20s %20s", "Agent Name", "Status", "Environments", "Free Space", "Resources", "Sandbox"));
for(AgentInformation info : agentListOnUI) {
System.err.println(String.format("%20s %20s %20s %20s %20s %20s",
info.getAgent_name(),
info.getAgent_state(),
info.getEnvironments(),
info.getFree_space(),
info.getResources(),
info.getSandbox()));
}
}
}
| |
// Copyright (c) 2013-2014 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.core.internal;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ApplicationErrorReport;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Paint;
import android.graphics.Rect;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Looper;
import android.provider.MediaStore;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.webkit.ValueCallback;
import android.widget.FrameLayout;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.ref.WeakReference;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import org.chromium.base.ActivityState;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.ApplicationStatus.ActivityStateListener;
import org.chromium.base.ApplicationStatusManager;
import org.chromium.base.CommandLine;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.net.NetworkChangeNotifier;
import org.xwalk.core.internal.extension.BuiltinXWalkExtensions;
/**
* <p>XWalkViewInternal represents an Android view for web apps/pages. Thus most of attributes
* for Android view are valid for this class. Since it internally uses
* <a href="http://developer.android.com/reference/android/view/SurfaceView.html">
* android.view.SurfaceView</a> for rendering web pages by default, it can't be resized,
* rotated, transformed and animated due to the limitations of SurfaceView.
* Alternatively, if the preference key {@link XWalkPreferencesInternal#ANIMATABLE_XWALK_VIEW}
* is set to True, XWalkViewInternal can be transformed and animated because
* <a href="http://developer.android.com/reference/android/view/TextureView.html">
* TextureView</a> is intentionally used to render web pages for animation support.
* Besides, XWalkViewInternal won't be rendered if it's invisible.</p>
*
* <p>XWalkViewInternal needs hardware acceleration to render web pages. As a result, the
* AndroidManifest.xml of the caller's app must be appended with the attribute
* "android:hardwareAccelerated" and its value must be set as "true".</p>
* <pre>
* <application android:name="android.app.Application" android:label="XWalkUsers"
* android:hardwareAccelerated="true">
* </pre>
*
* <p>Crosswalk provides 2 major callback classes, namely {@link XWalkResourceClientInternal} and
* {@link XWalkUIClientInternal} for listening to the events related to resource loading and UI.
* By default, Crosswalk has a default implementation. Callers can override them if needed.</p>
*
* <p>Unlike other Android views, this class has to listen to system events like intents and activity result.
* The web engine inside this view need to get and handle them.
* With contianer activity's lifecycle change, XWalkViewInternal will pause all timers and other
* components like videos when activity paused, resume back them when activity resumed.
* When activity is about to destroy, XWalkViewInternal will destroy itself as well.
* Embedders can also call onHide() and pauseTimers() to explicitly pause XWalkViewInternal.
* Similarily with onShow(), resumeTimers() and onDestroy().
*
* For example:</p>
*
* <pre>
* import android.app.Activity;
* import android.os.Bundle;
*
* import org.xwalk.core.internal.XWalkResourceClientInternal;
* import org.xwalk.core.internal.XWalkUIClientInternal;
* import org.xwalk.core.internal.XWalkViewInternal;
*
* public class MyActivity extends Activity {
* XWalkViewInternal mXwalkView;
*
* class MyResourceClient extends XWalkResourceClientInternal {
* MyResourceClient(XWalkViewInternal view) {
* super(view);
* }
*
* @Override
* WebResourceResponse shouldInterceptLoadRequest(XWalkViewInternal view, String url) {
* // Handle it here.
* ...
* }
* }
*
* class MyUIClient extends XWalkUIClientInternal {
* MyUIClient(XWalkViewInternal view) {
* super(view);
* }
*
* @Override
* void onFullscreenToggled(XWalkViewInternal view, String url) {
* // Handle it here.
* ...
* }
* }
*
* @Override
* protected void onCreate(Bundle savedInstanceState) {
* mXwalkView = new XWalkViewInternal(this);
* setContentView(mXwalkView);
* mXwalkView.setResourceClient(new MyResourceClient(mXwalkView));
* mXwalkView.setUIClient(new MyUIClient(mXwalkView));
* mXwalkView.load("http://www.crosswalk-project.org", null);
* }
*
* @Override
* protected void onActivityResult(int requestCode, int resultCode, Intent data) {
* if (mXwalkView != null) {
* mXwalkView.onActivityResult(requestCode, resultCode, data);
* }
* }
*
* @Override
* protected void onNewIntent(Intent intent) {
* if (mXwalkView != null) {
* mXwalkView.onNewIntent(intent);
* }
* }
* }
* </pre>
*/
@XWalkAPI(extendClass = FrameLayout.class, createExternally = true)
public class XWalkViewInternal extends android.widget.FrameLayout {
private class XWalkActivityStateListener implements ActivityStateListener {
WeakReference<XWalkViewInternal> mXWalkViewRef;
XWalkActivityStateListener(XWalkViewInternal view) {
mXWalkViewRef = new WeakReference<XWalkViewInternal>(view);
}
@Override
public void onActivityStateChange(Activity activity, int newState) {
XWalkViewInternal view = mXWalkViewRef.get();
if (view == null) return;
view.onActivityStateChange(activity, newState);
}
}
static final String PLAYSTORE_DETAIL_URI = "market://details?id=";
public static final int INPUT_FILE_REQUEST_CODE = 1;
private static final String TAG = XWalkViewInternal.class.getSimpleName();
private static final String PATH_PREFIX = "file:";
private static boolean sInitialized = false;
private XWalkContent mContent;
private Activity mActivity;
private Context mContext;
private boolean mIsHidden;
private XWalkActivityStateListener mActivityStateListener;
private ValueCallback<Uri> mFilePathCallback;
private String mCameraPhotoPath;
/**
* Normal reload mode as default.
* @since 1.0
*/
@XWalkAPI
public static final int RELOAD_NORMAL = 0;
/**
* Reload mode with bypassing the cache.
* @since 1.0
*/
@XWalkAPI
public static final int RELOAD_IGNORE_CACHE = 1;
/**
* Constructs a new XWalkView with a Context object.
* @param context a Context object used to access application assets.
* @since 6.0
*/
@XWalkAPI(preWrapperLines = {
" super(${param1}, null);"},
postWrapperLines = {
" addView((FrameLayout)bridge, new FrameLayout.LayoutParams(",
" FrameLayout.LayoutParams.MATCH_PARENT,",
" FrameLayout.LayoutParams.MATCH_PARENT));"})
public XWalkViewInternal(Context context) {
super(context, null);
checkThreadSafety();
mActivity = (Activity) context;
mContext = getContext();
init(getContext(), getActivity());
initXWalkContent(mContext, null);
}
/**
* Constructor for inflating via XML.
* @param context a Context object used to access application assets.
* @param attrs an AttributeSet passed to our parent.
* @since 1.0
*/
@XWalkAPI(preWrapperLines = {
" super(${param1}, ${param2});"},
postWrapperLines = {
" addView((FrameLayout)bridge, new FrameLayout.LayoutParams(",
" FrameLayout.LayoutParams.MATCH_PARENT,",
" FrameLayout.LayoutParams.MATCH_PARENT));"})
public XWalkViewInternal(Context context, AttributeSet attrs) {
super(context, attrs);
checkThreadSafety();
mActivity = (Activity) context;
mContext = getContext();
init(getContext(), getActivity());
initXWalkContent(mContext, attrs);
}
/**
* Constructor for Crosswalk runtime. In shared mode, context isi
* different from activity. In embedded mode, they're same.
* @param context a Context object used to access application assets
* @param activity the activity for this XWalkViewInternal.
* @since 1.0
*/
@XWalkAPI(preWrapperLines = {
" super(${param1}, null);"},
postWrapperLines = {
" addView((FrameLayout)bridge, new FrameLayout.LayoutParams(",
" FrameLayout.LayoutParams.MATCH_PARENT,",
" FrameLayout.LayoutParams.MATCH_PARENT));"})
public XWalkViewInternal(Context context, Activity activity) {
super(context, null);
checkThreadSafety();
// Make sure mActivity is initialized before calling 'init' method.
mActivity = activity;
mContext = getContext();
init(getContext(), getActivity());
initXWalkContent(mContext, null);
}
private static void init(Context context, Activity activity) {
if (sInitialized) return;
XWalkViewDelegate.init(null, activity);
// Initialize the ActivityStatus. This is needed and used by many internal
// features such as location provider to listen to activity status.
ApplicationStatusManager.init(activity.getApplication());
// Auto detect network connectivity state.
// setAutoDetectConnectivityState() need to be called before activity started.
NetworkChangeNotifier.init(activity);
NetworkChangeNotifier.setAutoDetectConnectivityState(true);
// We will miss activity onCreate() status in ApplicationStatusManager,
// informActivityStarted() will simulate these callbacks.
ApplicationStatusManager.informActivityStarted(activity);
sInitialized = true;
}
/**
* Get the current activity passed from callers. It's never null.
* @return the activity instance passed from callers.
*
* @hide
*/
public Activity getActivity() {
if (mActivity != null) {
return mActivity;
} else if (getContext() instanceof Activity) {
return (Activity)getContext();
}
// Never achieve here.
assert(false);
return null;
}
// TODO(yongsheng): we should remove this since we have getContext()?
/**
* @hide
*/
public Context getViewContext() {
return mContext;
}
public void completeWindowCreation(XWalkViewInternal newXWalkView) {
mContent.supplyContentsForPopup(newXWalkView == null ? null : newXWalkView.mContent);
}
private void initXWalkContent(Context context, AttributeSet attrs) {
mActivityStateListener = new XWalkActivityStateListener(this);
ApplicationStatus.registerStateListenerForActivity(
mActivityStateListener, getActivity());
mIsHidden = false;
mContent = new XWalkContent(context, attrs, this);
// If XWalkView was created in onXWalkReady(), and the activity which owns
// XWalkView was destroyed, pauseTimers() will be invoked. Reentry the activity,
// resumeTimers() will not be invoked since onResume() was invoked before
// XWalkView creation. So to invoke resumeTimers() explicitly here.
mContent.resumeTimers();
// Set default XWalkClientImpl.
setXWalkClient(new XWalkClient(this));
// Set default XWalkWebChromeClient and DownloadListener. The default actions
// are provided via the following clients if special actions are not needed.
setXWalkWebChromeClient(new XWalkWebChromeClient(this));
// Set with internal implementation. Could be overwritten by embedders'
// setting.
setUIClient(new XWalkUIClientInternal(this));
setResourceClient(new XWalkResourceClientInternal(this));
setDownloadListener(new XWalkDownloadListenerImpl(context));
setNavigationHandler(new XWalkNavigationHandlerImpl(context));
setNotificationService(new XWalkNotificationServiceImpl(context, this));
if (!CommandLine.getInstance().hasSwitch("disable-xwalk-extensions")) {
BuiltinXWalkExtensions.load(context, getActivity());
} else {
XWalkPreferencesInternal.setValue(XWalkPreferencesInternal.ENABLE_EXTENSIONS, false);
}
XWalkPathHelper.initialize();
XWalkPathHelper.setCacheDirectory(
mContext.getApplicationContext().getCacheDir().getPath());
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state) ||
Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
File extCacheDir = mContext.getApplicationContext().getExternalCacheDir();
if (null != extCacheDir) {
XWalkPathHelper.setExternalCacheDirectory(extCacheDir.getPath());
}
}
}
/**
* Load a web page/app from a given base URL or a content.
* If url is null or empty and content is null or empty, then this function
* will do nothing.
* If content is not null, load the web page/app from the content.
* If content is not null and the url is not set, return "about:blank" ifi
* calling {@link XWalkViewInternal#getUrl()}.
* If content is null, try to load the content from the url.
*
* It supports URL schemes like 'http:', 'https:' and 'file:'.
* It can also load files from Android assets, e.g. 'file:///android_asset/'.
* @param url the url for web page/app.
* @param content the content for the web page/app. Could be empty.
* @since 1.0
*/
@XWalkAPI
public void load(String url, String content) {
if (mContent == null) return;
checkThreadSafety();
mContent.loadUrl(url, content);
}
/**
* Load a web app from a given manifest.json file. If content is not null,
* load the manifest.json from the content. If content is null, try to load
* the manifest.json from the url. Note that url should not be null if the
* launched path defined in manifest.json is relative.
*
* It supports URL schemes like 'http:', 'https:' and 'file:'.
* It can also load files from Android assets, e.g. 'file:///android_asset/'.
* @param url the url for manifest.json.
* @param content the content for manifest.json.
* @since 1.0
*/
@XWalkAPI
public void loadAppFromManifest(String url, String content) {
if (mContent == null) return;
checkThreadSafety();
mContent.loadAppFromManifest(url, content);
}
/**
* Reload a web app with a given mode.
* @param mode the reload mode.
* @since 1.0
*/
@XWalkAPI
public void reload(int mode) {
if (mContent == null) return;
checkThreadSafety();
mContent.reload(mode);
}
/**
* Stop current loading progress.
* @since 1.0
*/
@XWalkAPI
public void stopLoading() {
if (mContent == null) return;
checkThreadSafety();
mContent.stopLoading();
}
/**
* Get the url of current web page/app. This may be different from what's passed
* by caller.
* @return the url for current web page/app.
* @since 1.0
*/
@XWalkAPI
public String getUrl() {
if (mContent == null) return null;
checkThreadSafety();
return mContent.getUrl();
}
/**
* Get the title of current web page/app. This may be different from what's passed
* by caller.
* @return the title for current web page/app.
* @since 1.0
*/
@XWalkAPI
public String getTitle() {
if (mContent == null) return null;
checkThreadSafety();
return mContent.getTitle();
}
/**
* Get the original url specified by caller.
* @return the original url.
* @since 1.0
*/
@XWalkAPI
public String getOriginalUrl() {
if (mContent == null) return null;
checkThreadSafety();
return mContent.getOriginalUrl();
}
/**
* Get the navigation history for current XWalkViewInternal. It's synchronized with
* this XWalkViewInternal if any backward/forward and navigation operations.
* @return the navigation history.
* @since 1.0
*/
@XWalkAPI
public XWalkNavigationHistoryInternal getNavigationHistory() {
if (mContent == null) return null;
checkThreadSafety();
return mContent.getNavigationHistory();
}
/**
* Injects the supplied Java object into this XWalkViewInternal.
* Each method defined in the class of the object should be
* marked with {@link JavascriptInterface} if it's called by JavaScript.
* @param object the supplied Java object, called by JavaScript.
* @param name the name injected in JavaScript.
* @since 1.0
*/
@XWalkAPI
public void addJavascriptInterface(Object object, String name) {
if (mContent == null) return;
checkThreadSafety();
mContent.addJavascriptInterface(object, name);
}
/**
* Evaluate a fragment of JavaScript code and get the result via callback.
* @param script the JavaScript string.
* @param callback the callback to handle the evaluated result.
* @since 1.0
*/
@XWalkAPI
public void evaluateJavascript(String script, ValueCallback<String> callback) {
if (mContent == null) return;
checkThreadSafety();
mContent.evaluateJavascript(script, callback);
}
/**
* Clear the resource cache. Note that the cache is per-application, so this
* will clear the cache for all XWalkViews used.
* @param includeDiskFiles indicate whether to clear disk files for cache.
* @since 1.0
*/
@XWalkAPI
public void clearCache(boolean includeDiskFiles) {
if (mContent == null) return;
checkThreadSafety();
mContent.clearCache(includeDiskFiles);
}
/**
* Indicate that a HTML element is occupying the whole screen.
* @return true if any HTML element is occupying the whole screen.
* @since 1.0
*/
@XWalkAPI
public boolean hasEnteredFullscreen() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.hasEnteredFullscreen();
}
/**
* Leave fullscreen mode if it's. Do nothing if it's not
* in fullscreen.
* @since 1.0
*/
@XWalkAPI
public void leaveFullscreen() {
if (mContent == null) return;
checkThreadSafety();
mContent.exitFullscreen();
}
/**
* Pause all layout, parsing and JavaScript timers for all XWalkViewInternal instances.
* It will be called when the container Activity get paused. It can also be explicitly
* called to pause timers.
*
* Note that it will globally impact all XWalkViewInternal instances, not limited to
* just this XWalkViewInternal.
*
* @since 1.0
*/
@XWalkAPI
public void pauseTimers() {
if (mContent == null) return;
checkThreadSafety();
mContent.pauseTimers();
}
/**
* Resume all layout, parsing and JavaScript timers for all XWalkViewInternal instances.
* It will be called when the container Activity get resumed. It can also be explicitly
* called to resume timers.
*
* Note that it will globally impact all XWalkViewInternal instances, not limited to
* just this XWalkViewInternal.
*
* @since 1.0
*/
@XWalkAPI
public void resumeTimers() {
if (mContent == null) return;
checkThreadSafety();
mContent.resumeTimers();
}
/**
* Pause many other things except JavaScript timers inside rendering engine,
* like video player, modal dialogs, etc. See {@link #pauseTimers} about pausing
* JavaScript timers.
* It will be called when the container Activity get paused. It can also be explicitly
* called to pause above things.
* @since 1.0
*/
@XWalkAPI
public void onHide() {
if (mContent == null || mIsHidden) return;
mContent.onPause();
mIsHidden = true;
}
/**
* Resume video player, modal dialogs. Embedders are in charge of calling
* this during resuming this activity if they call onHide.
* Typically it should be called when the activity for this view is resumed.
* It will be called when the container Activity get resumed. It can also be explicitly
* called to resume above things.
* @since 1.0
*/
@XWalkAPI
public void onShow() {
if (mContent == null || !mIsHidden ) return;
mContent.onResume();
mIsHidden = false;
}
/**
* Release internal resources occupied by this XWalkViewInternal.
* It will be called when the container Activity get destroyed. It can also be explicitly
* called to release resources.
* @since 1.0
*/
@XWalkAPI
public void onDestroy() {
destroy();
}
/**
* Pass through activity result to XWalkViewInternal. Many internal facilities need this
* to handle activity result like JavaScript dialog, Crosswalk extensions, etc.
* See <a href="http://developer.android.com/reference/android/app/Activity.html">
* android.app.Activity.onActivityResult()</a>.
* @param requestCode passed from android.app.Activity.onActivityResult().
* @param resultCode passed from android.app.Activity.onActivityResult().
* @param data passed from android.app.Activity.onActivityResult().
* @since 1.0
*/
@XWalkAPI
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (mContent == null) return;
if(requestCode == INPUT_FILE_REQUEST_CODE && mFilePathCallback != null) {
Uri results = null;
// Check that the response is a good one
if(Activity.RESULT_OK == resultCode) {
if(data == null) {
// If there is not data, then we may have taken a photo
if(mCameraPhotoPath != null) {
results = Uri.parse(mCameraPhotoPath);
}
} else {
String dataString = data.getDataString();
if (dataString != null) {
results = Uri.parse(dataString);
}
deleteImageFile();
}
} else if (Activity.RESULT_CANCELED == resultCode) {
deleteImageFile();
}
mFilePathCallback.onReceiveValue(results);
mFilePathCallback = null;
return;
}
mContent.onActivityResult(requestCode, resultCode, data);
}
/**
* Pass through intents to XWalkViewInternal. Many internal facilities need this
* to receive the intents like web notification. See
* <a href="http://developer.android.com/reference/android/app/Activity.html">
* android.app.Activity.onNewIntent()</a>.
* @param intent passed from android.app.Activity.onNewIntent().
* @since 1.0
*/
@XWalkAPI
public boolean onNewIntent(Intent intent) {
if (mContent == null) return false;
return mContent.onNewIntent(intent);
}
/**
* Save current internal state of this XWalkViewInternal. This can help restore this state
* afterwards restoring.
* @param outState the saved state for restoring.
* @since 1.0
*/
@XWalkAPI
public boolean saveState(Bundle outState) {
if (mContent == null) return false;
mContent.saveState(outState);
return true;
}
/**
* Restore the state from the saved bundle data.
* @param inState the state saved from saveState().
* @return true if it can restore the state.
* @since 1.0
*/
@XWalkAPI
public boolean restoreState(Bundle inState) {
if (mContent == null) return false;
if (mContent.restoreState(inState) != null) return true;
return false;
}
/**
* Get the API version of Crosswalk embedding API.
* @return the string of API level.
* @since 1.0
*/
// TODO(yongsheng): make it static?
@XWalkAPI
public String getAPIVersion() {
return "5.0";
}
/**
* Get the Crosswalk version.
* @return the string of Crosswalk.
* @since 1.0
*/
// TODO(yongsheng): make it static?
@XWalkAPI
public String getXWalkVersion() {
if (mContent == null) return null;
return mContent.getXWalkVersion();
}
/**
* Embedders use this to customize their handlers to events/callbacks related
* to UI.
* @param client the XWalkUIClientInternal defined by callers.
* @since 1.0
*/
@XWalkAPI(reservable = true)
public void setUIClient(XWalkUIClientInternal client) {
if (mContent == null) return;
checkThreadSafety();
mContent.setUIClient(client);
}
/**
* Embedders use this to customize their handlers to events/callbacks related
* to resource loading.
* @param client the XWalkResourceClientInternal defined by callers.
* @since 1.0
*/
@XWalkAPI(reservable = true)
public void setResourceClient(XWalkResourceClientInternal client) {
if (mContent == null) return;
checkThreadSafety();
mContent.setResourceClient(client);
}
/**
* Set Background color of the view
*/
@Override
@XWalkAPI
public void setBackgroundColor(int color) {
if (mContent == null) return;
checkThreadSafety();
mContent.setBackgroundColor(color);
}
/**
* override setLayerType
*/
@Override
@XWalkAPI
public void setLayerType(int layerType, Paint paint) {
if (layerType != LAYER_TYPE_SOFTWARE) {
super.setLayerType(layerType, paint);
} else {
Log.w(TAG, "LAYER_TYPE_SOFTWARE is not supported by XwalkView");
}
}
/**
* Set the user agent of web page/app.
* @param userAgent the user agent string passed from client.
* @since 5.0
*/
@XWalkAPI
public void setUserAgentString(String userAgent) {
XWalkSettings settings = getSettings();
if (settings == null) return;
checkThreadSafety();
settings.setUserAgentString(userAgent);
}
/**
* Set the accept languages of XWalkView.
* @param acceptLanguages the accept languages string passed from client.
* @since 5.0
*/
@XWalkAPI
public void setAcceptLanguages(final String acceptLanguages) {
XWalkSettings settings = getSettings();
if (settings == null) return;
checkThreadSafety();
settings.setAcceptLanguages(acceptLanguages);
}
// TODO(yongsheng): this is not public.
/**
* @hide
*/
public XWalkSettings getSettings() {
if (mContent == null) return null;
checkThreadSafety();
return mContent.getSettings();
}
/**
* This method is used by Cordova for hacking.
* TODO(yongsheng): remove this and related test cases?
*/
@XWalkAPI
public void setNetworkAvailable(boolean networkUp) {
if (mContent == null) return;
checkThreadSafety();
mContent.setNetworkAvailable(networkUp);
}
/**
* Enables remote debugging and returns the URL at which the dev tools
* server is listening for commands.
*/
public void enableRemoteDebugging() {
if (mContent == null) return;
checkThreadSafety();
mContent.enableRemoteDebugging();
}
/**
* Get the websocket url for remote debugging.
* @return the web socket url to remote debug this xwalk view.
* null will be returned if remote debugging is not enabled.
* @since 4.1
*/
@XWalkAPI
public Uri getRemoteDebuggingUrl() {
if (mContent == null) return null;
checkThreadSafety();
String wsUrl = mContent.getRemoteDebuggingUrl();
if (wsUrl == null || wsUrl.isEmpty()) return null;
return Uri.parse(wsUrl);
}
/**
* Performs zoom in in this XWalkView.
* @return true if zoom in succeeds, false if no zoom changes
* @since 5.0
*/
@XWalkAPI
public boolean zoomIn() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.zoomIn();
}
/**
* Performs zoom out in this XWalkView.
* @return true if zoom out succeeds, false if no zoom changes
* @since 5.0
*/
@XWalkAPI
public boolean zoomOut() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.zoomOut();
}
/**
* Performs a zoom operation in this XWalkView.
* @param factor the zoom factor to apply.
* The zoom factor will be clamped to the XWalkView's zoom limits.
* This value must be in the range 0.01 to 100.0 inclusive.
* @since 5.0
*/
@XWalkAPI
public void zoomBy(float factor) {
if (mContent == null) return;
checkThreadSafety();
mContent.zoomBy(factor);
}
/**
* Gets whether this XWalkView can be zoomed in.
* @return true if this XWalkView can be zoomed in
* @since 5.0
*/
@XWalkAPI
public boolean canZoomIn() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.canZoomIn();
}
/**
* Gets whether this XWalkView can be zoomed out.
* @return true if this XWalkView can be zoomed out
* @since 5.0
*/
@XWalkAPI
public boolean canZoomOut() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.canZoomOut();
}
/**
* Create a new InputConnection for and InputMethod to interact with the view.
* The default implementation returns the InputConnection created by ContentView
* @param outAttrs Fill in with attribute information about the connection
* @return the new InputConnection
* @since 5.0
*/
@XWalkAPI
public InputConnection onCreateInputConnection(EditorInfo outAttrs) {
return mContent.onCreateInputConnection(outAttrs);
}
/**
* It's used for Presentation API.
* @hide
*/
public int getContentID() {
if (mContent == null) return -1;
return mContent.getRoutingID();
}
boolean canGoBack() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.canGoBack();
}
void goBack() {
if (mContent == null) return;
checkThreadSafety();
mContent.goBack();
}
boolean canGoForward() {
if (mContent == null) return false;
checkThreadSafety();
return mContent.canGoForward();
}
void goForward() {
if (mContent == null) return;
checkThreadSafety();
mContent.goForward();
}
void clearHistory() {
if (mContent == null) return;
checkThreadSafety();
mContent.clearHistory();
}
void destroy() {
if (mContent == null) return;
ApplicationStatus.unregisterActivityStateListener(mActivityStateListener);
mActivityStateListener = null;
mContent.destroy();
disableRemoteDebugging();
}
void disableRemoteDebugging() {
if (mContent == null) return;
checkThreadSafety();
mContent.disableRemoteDebugging();
}
private static void checkThreadSafety() {
if (Looper.myLooper() != Looper.getMainLooper()) {
Throwable throwable = new Throwable(
"Warning: A XWalkViewInternal method was called on thread '" +
Thread.currentThread().getName() + "'. " +
"All XWalkViewInternal methods must be called on the UI thread. ");
throw new RuntimeException(throwable);
}
}
boolean isOwnerActivityRunning() {
int status = ApplicationStatus.getStateForActivity(getActivity());
if (status == ActivityState.DESTROYED) return false;
return true;
}
void navigateTo(int offset) {
if (mContent == null) return;
mContent.navigateTo(offset);
}
void setOverlayVideoMode(boolean enabled) {
mContent.setOverlayVideoMode(enabled);
}
/**
* Control whether the XWalkView's surface is placed on top of its window.
* Note this only works when XWalkPreferences.ANIMATABLE_XWALK_VIEW is false.
* @param onTop true for on top.
* @since 5.0
*/
@XWalkAPI
public void setZOrderOnTop(boolean onTop) {
if (mContent == null) return;
mContent.setZOrderOnTop(onTop);
}
/**
* Removes the autocomplete popup from the currently focused form field, if present.
* Note this only affects the display of the autocomplete popup, it does not remove
* any saved form data from this WebView's store.
* This is a poorly named method, but we keep it for historical reasons.
* @since 6.0
*/
@XWalkAPI
public void clearFormData() {
if (mContent == null) return;
checkThreadSafety();
mContent.hideAutofillPopup();
}
// Below methods are for test shell and instrumentation tests.
/**
* @hide
*/
public void setXWalkClient(XWalkClient client) {
if (mContent == null) return;
checkThreadSafety();
mContent.setXWalkClient(client);
}
/**
* @hide
*/
public void setXWalkWebChromeClient(XWalkWebChromeClient client) {
if (mContent == null) return;
checkThreadSafety();
mContent.setXWalkWebChromeClient(client);
}
/**
* Registers the interface to be used when content can not be handled by
* the rendering engine, and should be downloaded instead. This will replace
* the current handler.
* @param listener an implementation of XWalkDownloadListenerInternal
* @since 5.0
*/
@XWalkAPI
public void setDownloadListener(XWalkDownloadListenerInternal listener) {
if (mContent == null) return;
checkThreadSafety();
mContent.setDownloadListener(listener);
}
/**
* @hide
*/
public void setNavigationHandler(XWalkNavigationHandler handler) {
if (mContent == null) return;
checkThreadSafety();
mContent.setNavigationHandler(handler);
}
/**
* @hide
*/
public void setNotificationService(XWalkNotificationService service) {
if (mContent == null) return;
checkThreadSafety();
mContent.setNotificationService(service);
}
/**
* @hide
*/
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
if (event.getAction() == KeyEvent.ACTION_UP &&
event.getKeyCode() == KeyEvent.KEYCODE_BACK) {
// If there's navigation happens when app is fullscreen,
// the content will still be fullscreen after navigation.
// In such case, the back key will exit fullscreen first.
if (hasEnteredFullscreen()) {
leaveFullscreen();
return true;
} else if (canGoBack()) {
goBack();
return true;
}
}
return super.dispatchKeyEvent(event);
}
private void onActivityStateChange(Activity activity, int newState) {
assert(getActivity() == activity);
switch (newState) {
case ActivityState.STARTED:
onShow();
break;
case ActivityState.PAUSED:
pauseTimers();
break;
case ActivityState.RESUMED:
resumeTimers();
break;
case ActivityState.DESTROYED:
onDestroy();
break;
case ActivityState.STOPPED:
onHide();
break;
default:
break;
}
}
/**
* Tell the client to show a file chooser.
* @param uploadFile the callback class to handle the result from caller. It MUST
* be invoked in all cases. Leave it not invoked will block all following
* requests to open file chooser.
* @param acceptType value of the 'accept' attribute of the input tag associated
* with this file picker.
* @param capture value of the 'capture' attribute of the input tag associated
* with this file picker
*/
public boolean showFileChooser(ValueCallback<Uri> uploadFile, String acceptType,
String capture) {
mFilePathCallback = uploadFile;
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (takePictureIntent.resolveActivity(getActivity().getPackageManager()) != null) {
// Create the File where the photo should go
File photoFile = createImageFile();
// Continue only if the File was successfully created
if (photoFile != null) {
mCameraPhotoPath = PATH_PREFIX + photoFile.getAbsolutePath();
takePictureIntent.putExtra("PhotoPath", mCameraPhotoPath);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photoFile));
} else {
takePictureIntent = null;
}
}
Intent contentSelectionIntent = new Intent(Intent.ACTION_GET_CONTENT);
contentSelectionIntent.addCategory(Intent.CATEGORY_OPENABLE);
contentSelectionIntent.setType("*/*");
Intent camcorder = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
Intent soundRecorder = new Intent(
MediaStore.Audio.Media.RECORD_SOUND_ACTION);
ArrayList<Intent> extraIntents = new ArrayList<Intent>();
if (takePictureIntent != null) extraIntents.add(takePictureIntent);
extraIntents.add(camcorder);
extraIntents.add(soundRecorder);
Intent chooserIntent = new Intent(Intent.ACTION_CHOOSER);
chooserIntent.putExtra(Intent.EXTRA_INTENT, contentSelectionIntent);
chooserIntent.putExtra(Intent.EXTRA_TITLE, "Choose an action");
chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS,
extraIntents.toArray(new Intent[] { }));
getActivity().startActivityForResult(chooserIntent, INPUT_FILE_REQUEST_CODE);
return true;
}
private File createImageFile() {
// FIXME: If the external storage state is not "MEDIA_MOUNTED", we need to get
// other volume paths by "getVolumePaths()" when it was exposed.
String state = Environment.getExternalStorageState();
if (!state.equals(Environment.MEDIA_MOUNTED)) {
Log.e(TAG, "External storage is not mounted.");
return null;
}
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
if (!storageDir.exists()) storageDir.mkdirs();
try {
return File.createTempFile(imageFileName, ".jpg", storageDir);
} catch (IOException ex) {
// Error occurred while creating the File
Log.e(TAG, "Unable to create Image File", ex);
}
return null;
}
private boolean deleteImageFile() {
if (mCameraPhotoPath == null || !mCameraPhotoPath.contains(PATH_PREFIX)) {
return false;
}
String filePath = mCameraPhotoPath.split(PATH_PREFIX)[1];
File file = new File(filePath);
return file.delete();
}
// For instrumentation test.
public ContentViewCore getXWalkContentForTest() {
return mContent.getContentViewCoreForTest();
}
// This is used to call back to XWalkView's performLongClick() so that developer can
// override performLongClick() or setOnLongClickListener to disable copy/paste
// action bar.
@XWalkAPI(delegate = true,
preWrapperLines = {
"return performLongClick();"})
public boolean onPerformLongClick(){
return false;
}
}
| |
package nl.esi.metis.aisparser.impl;
import nl.esi.metis.aisparser.AISMessage21;
import nl.esi.metis.aisparser.Sixbit;
import nl.esi.metis.aisparser.UtilsLatitude27;
import nl.esi.metis.aisparser.UtilsLongitude28;
import nl.esi.metis.aisparser.UtilsPositionInfo;
import nl.esi.metis.aisparser.UtilsSpare;
import nl.esi.metis.aisparser.UtilsString;
import nl.esi.metis.aisparser.UtilsTwosComplement;
import nl.esi.metis.aisparser.annotations.AISIllegalValueAnnotation;
import nl.esi.metis.aisparser.provenance.VDMMessageProvenance;
import cern.colt.bitvector.BitVector;
/** ESI AIS Parser
*
* Copyright 2011/2012 by Pierre van de Laar (Embedded Systems Institute)
* Copyright 2008 by Brian C. Lane <bcl@brianlane.com>
* All Rights Reserved
*
* @author Pierre van de Laar
* @author Brian C. Lane
*/
/** AIS Message 21
* Aids-to-Navigation Report (AtoN)
*
* Field Nr Field Name NrOf Bits (from, to )
* ------------------------------------------------------------------------
* 1 messageID 6 ( 1, 6)
* 2 repeatIndicator 2 ( 7, 8)
* 3 userID 30 ( 9, 38)
* 4 typeOfAtoN 5 ( 39, 43)
* 5 nameOfAtoN 120 ( 44, 163)
* 6 positionAccuracy 1 ( 164, 164)
* 7 longitude 28 ( 165, 192)
* 8 latitude 27 ( 193, 219)
* 9 dimension 30 ( 220, 249)
* 10 typeOfElectronicPositionFixingDevice 4 ( 250, 253)
* 11 timeStamp 6 ( 254, 259)
* 12 offPositionIndicator 1 ( 260, 260)
* 13 statusAtoN 8 ( 261, 268)
* 14 raimFlag 1 ( 269, 269)
* 15 virtualAtoNFlag 1 ( 270, 270)
* 16 assignedModeFlag 1 ( 271, 271)
* 17 spare1 1 ( 272, 272)
* 18 nameOfAtoNExtension 84 ( 273, 356)
* 19 spare2 4 ( 357, 360)
* ---- +
* (maximum) number of bits 360
*/
class AISMessage21Impl extends AISMessageImpl implements AISMessage21 {
public static final int MINLENGTH = 272;
public static final int MAXLENGTH = 360;
public static boolean validLength(int length)
{
//TODO: also check that length is aligned with (8?) byte boundary
return (MINLENGTH <= length && length <= MAXLENGTH);
}
/**
* Return the difference in available and needed bits to parse this sixbit as an AISMessage
* A positive difference indicates that there are more bits available than needed by the standard.
* @param sb
* @return
*/
public static int differenceInBits(Sixbit sb)
{
final int available = sb.available();
if (available > MAXLENGTH)
{
return (available - MAXLENGTH);
}
else if (available < MINLENGTH)
{
return available - MINLENGTH;
}
else
{
return 0;
}
}
private static final int TYPEOFATON_FROM = 39;
private static final int TYPEOFATON_TO = 43;
private int typeOfAtoN;
/** typeOfAtoN
* @return int value of typeOfAtoN (5 bits [39,43])
*/
public int getTypeOfAtoN() { return typeOfAtoN; }
private static final int NAMEOFATON_FROM = 44;
private static final int NAMEOFATON_TO = 163;
private String nameOfAtoN;
/** nameOfAtoN
* @return String value of nameOfAtoN (120 bits [44,163])
*/
public String getNameOfAtoN() { return nameOfAtoN; }
private static final int POSITIONACCURACY_BITINDEX = 164;
private boolean positionAccuracy;
/** positionAccuracy
* @return boolean value of positionAccuracy (bit 164)
*/
public boolean getPositionAccuracy() { return positionAccuracy; }
private static final int LONGITUDE_FROM = 165;
private static final int LONGITUDE_TO = 192;
private double longitude;
/** longitude
* @return double value of longitude in degrees (28 bits [165,192])
*/
public double getLongitudeInDegrees() { return longitude; }
private static final int LATITUDE_FROM = 193;
private static final int LATITUDE_TO = 219;
private double latitude;
/** latitude
* @return double value of latitude in degrees (27 bits [193,219])
*/
public double getLatitudeInDegrees() { return latitude; }
private static final int DIMENSION_FROM = 220;
private static final int DIMENSION_TO = 249;
private BitVector dimension;
/** dimension
* @return BitVector value of dimension (30 bits [220,249])
*/
public BitVector getDimension() { return dimension; }
private static final int TYPEOFELECTRONICPOSITIONFIXINGDEVICE_FROM = 250;
private static final int TYPEOFELECTRONICPOSITIONFIXINGDEVICE_TO = 253;
private int typeOfElectronicPositionFixingDevice;
/** typeOfElectronicPositionFixingDevice
* @return int value of typeOfElectronicPositionFixingDevice (4 bits [250,253])
*/
public int getTypeOfElectronicPositionFixingDevice() { return typeOfElectronicPositionFixingDevice; }
private static final int TIMESTAMP_FROM = 254;
private static final int TIMESTAMP_TO = 259;
private int timeStamp;
/** timeStamp
* @return int value of timeStamp (6 bits [254,259])
*/
public int getTimeStamp() { return timeStamp; }
private static final int OFFPOSITIONINDICATOR_BITINDEX = 260;
private boolean offPositionIndicator;
/** offPositionIndicator
* @return boolean value of offPositionIndicator (bit 260)
*/
public boolean getOffPositionIndicator() { return offPositionIndicator; }
private static final int STATUSATON_FROM = 261;
private static final int STATUSATON_TO = 268;
private int statusAtoN;
/** statusAtoN
* @return int value of statusAtoN (8 bits [261,268])
*/
public int getStatusAtoN() { return statusAtoN; }
private static final int RAIMFLAG_BITINDEX = 269;
private boolean raimFlag;
/** raimFlag
* @return boolean value of raimFlag (bit 269)
*/
public boolean getRaimFlag() { return raimFlag; }
private static final int VIRTUALATONFLAG_BITINDEX = 270;
private boolean virtualAtoNFlag;
/** virtualAtoNFlag
* @return boolean value of virtualAtoNFlag (bit 270)
*/
public boolean getVirtualAtoNFlag() { return virtualAtoNFlag; }
private static final int ASSIGNEDMODEFLAG_BITINDEX = 271;
private boolean assignedModeFlag;
/** assignedModeFlag
* @return boolean value of assignedModeFlag (bit 271)
*/
public boolean getAssignedModeFlag() { return assignedModeFlag; }
private static final int SPARE1_FROM = 272;
private static final int SPARE1_TO = 272;
private int spare1;
/** spare1
* @return int value of spare1 (1 bits [272,272])
*/
public int getSpare1() { return spare1; }
private static final int NAMEOFATONEXTENSION_FROM = 273;
private String nameOfAtoNExtension;
/** nameOfAtoNExtension
* @return String value of nameOfAtoNExtension (84 maximally bits [273,356])
*/
public String getNameOfAtoNExtension() { return nameOfAtoNExtension; }
private int spare2;
/** spare2
* @return int value of spare2 (0,2,4 or 6 bits)
*/
public int getSpare2() { return spare2; }
/** AISMessage 21 constructor
* @param content AIS content
* @param prov the provenance of the message
* @precondition validLength(content.length()) && AISMessageBase.getMessageId(content)==21
*/
public AISMessage21Impl(Sixbit content, VDMMessageProvenance prov)
{
super(content, prov);
assert(validLength(content.length()));
assert(getMessageID() == 21);
typeOfAtoN = content.getIntFromTo(TYPEOFATON_FROM,TYPEOFATON_TO);
nameOfAtoN = UtilsString.stripAtSigns(content.getStringFromTo(NAMEOFATON_FROM,NAMEOFATON_TO));
positionAccuracy = content.getBoolean(POSITIONACCURACY_BITINDEX);
longitude = UtilsLongitude28.toDegrees( UtilsTwosComplement.convertFrom28Bits( content.getIntFromTo(LONGITUDE_FROM,LONGITUDE_TO) ) );
if (!UtilsPositionInfo.isLongitudeSemanticallyCorrect(longitude))
{
annotations.add (new AISIllegalValueAnnotation("getLongitudeInDegrees", longitude, UtilsPositionInfo.rangeLongitude));
}
latitude = UtilsLatitude27.toDegrees( UtilsTwosComplement.convertFrom27Bits( content.getIntFromTo(LATITUDE_FROM,LATITUDE_TO) ) );
if (!UtilsPositionInfo.isLatitudeSemanticallyCorrect(latitude))
{
annotations.add (new AISIllegalValueAnnotation("getLatitudeInDegrees", latitude, UtilsPositionInfo.rangeLatitude));
}
dimension = content.getBitVectorFromTo(DIMENSION_FROM,DIMENSION_TO);
typeOfElectronicPositionFixingDevice = content.getIntFromTo(TYPEOFELECTRONICPOSITIONFIXINGDEVICE_FROM,TYPEOFELECTRONICPOSITIONFIXINGDEVICE_TO);
timeStamp = content.getIntFromTo(TIMESTAMP_FROM,TIMESTAMP_TO);
offPositionIndicator = content.getBoolean(OFFPOSITIONINDICATOR_BITINDEX);
statusAtoN = content.getIntFromTo(STATUSATON_FROM,STATUSATON_TO);
raimFlag = content.getBoolean(RAIMFLAG_BITINDEX);
virtualAtoNFlag = content.getBoolean(VIRTUALATONFLAG_BITINDEX);
assignedModeFlag = content.getBoolean(ASSIGNEDMODEFLAG_BITINDEX);
spare1 = content.getIntFromTo(SPARE1_FROM,SPARE1_TO);
if (!UtilsSpare.isSpareSemanticallyCorrect(spare1))
{
annotations.add(new AISIllegalValueAnnotation("getSpare1", spare1, UtilsSpare.range));
}
/*TODO: on page 125 of ITU-R M.1371-4 is written
* "That spare can be 0,2,4, or 6 in order to observe byte boundaries"
* So I assume that bytes are 8 bits and NOT 6.
* Also is written "No @-character should be used", so when 6 bits are all zero they are spare and not a character
* This seems to be the only distinction between six character bits and six spare bits.
*/
int length = content.length();
int nrof_spare = (length-MINLENGTH)%6;
nameOfAtoNExtension = content.getStringFromTo(NAMEOFATONEXTENSION_FROM, length-nrof_spare);
if((nameOfAtoNExtension.length() > 0) && nameOfAtoNExtension.charAt(nameOfAtoNExtension.length()-1) == '@')
{
//remove spare bits
nameOfAtoNExtension = nameOfAtoNExtension.substring(0, nameOfAtoNExtension.length()-1 );
}
spare2 = content.getIntFromTo(length-nrof_spare+1,length);
if (!UtilsSpare.isSpareSemanticallyCorrect(spare2))
{
annotations.add(new AISIllegalValueAnnotation("getSpare2", spare2, UtilsSpare.range));
}
}
}
| |
/*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.ui.commit;
import static android.app.Activity.RESULT_OK;
import static android.content.DialogInterface.BUTTON_NEGATIVE;
import static android.graphics.Paint.UNDERLINE_TEXT_FLAG;
import static com.github.mobile.Intents.EXTRA_BASE;
import static com.github.mobile.Intents.EXTRA_COMMENT;
import static com.github.mobile.Intents.EXTRA_REPOSITORY;
import static com.github.mobile.RequestCodes.COMMENT_CREATE;
import android.accounts.Account;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.github.kevinsawicki.wishlist.ViewFinder;
import com.github.kevinsawicki.wishlist.ViewUtils;
import com.github.mobile.R;
import com.github.mobile.core.commit.CommitStore;
import com.github.mobile.core.commit.CommitUtils;
import com.github.mobile.core.commit.FullCommit;
import com.github.mobile.core.commit.FullCommitFile;
import com.github.mobile.core.commit.RefreshCommitTask;
import com.github.mobile.ui.DialogFragment;
import com.github.mobile.ui.HeaderFooterListAdapter;
import com.github.mobile.ui.LightAlertDialog;
import com.github.mobile.ui.StyledText;
import com.github.mobile.util.AvatarLoader;
import com.github.mobile.util.HttpImageGetter;
import com.github.mobile.util.ShareUtils;
import com.github.mobile.util.ToastUtils;
import com.google.inject.Inject;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.eclipse.egit.github.core.Commit;
import org.eclipse.egit.github.core.CommitComment;
import org.eclipse.egit.github.core.CommitFile;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.RepositoryCommit;
/**
* Fragment to display commit details with diff output
*/
public class CommitDiffListFragment extends DialogFragment implements
OnItemClickListener {
private DiffStyler diffStyler;
private ListView list;
private ProgressBar progress;
private Repository repository;
private String base;
private RepositoryCommit commit;
private List<CommitComment> comments;
private List<FullCommitFile> files;
@Inject
private AvatarLoader avatars;
@Inject
private CommitStore store;
private View loadingView;
private View commitHeader;
private TextView commitMessage;
private View authorArea;
private ImageView authorAvatar;
private TextView authorName;
private TextView authorDate;
private View committerArea;
private ImageView committerAvatar;
private TextView committerName;
private TextView committerDate;
private HeaderFooterListAdapter<CommitFileListAdapter> adapter;
@Inject
private HttpImageGetter commentImageGetter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle args = getArguments();
base = args.getString(EXTRA_BASE);
repository = (Repository) args.getSerializable(EXTRA_REPOSITORY);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
commit = store.getCommit(repository, base);
((TextView) loadingView.findViewById(R.id.tv_loading))
.setText(R.string.loading_files_and_comments);
if (files == null
|| (commit != null && commit.getCommit().getCommentCount() > 0 && comments == null))
adapter.addFooter(loadingView);
if (commit != null && comments != null && files != null)
updateList(commit, comments, files);
else {
if (commit != null)
updateHeader(commit);
refreshCommit();
}
}
private void addComment(final CommitComment comment) {
if (comments != null && files != null) {
comments.add(comment);
Commit rawCommit = commit.getCommit();
if (rawCommit != null)
rawCommit.setCommentCount(rawCommit.getCommentCount() + 1);
commentImageGetter.encode(comment, comment.getBodyHtml());
updateItems(comments, files);
} else
refreshCommit();
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (RESULT_OK == resultCode && COMMENT_CREATE == requestCode
&& data != null) {
CommitComment comment = (CommitComment) data
.getSerializableExtra(EXTRA_COMMENT);
addComment(comment);
return;
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public void onCreateOptionsMenu(final Menu optionsMenu,
final MenuInflater inflater) {
inflater.inflate(R.menu.commit_view, optionsMenu);
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
if (!isUsable())
return false;
switch (item.getItemId()) {
case R.id.m_refresh:
refreshCommit();
return true;
case R.id.m_copy_hash:
copyHashToClipboard();
return true;
case R.id.m_comment:
startActivityForResult(
CreateCommentActivity.createIntent(repository, base),
COMMENT_CREATE);
return true;
case R.id.m_share:
shareCommit();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void copyHashToClipboard() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
ClipboardManager manager = (ClipboardManager) getActivity().getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("hash", commit.getSha());
manager.setPrimaryClip(clip);
} else {
android.text.ClipboardManager manager = (android.text.ClipboardManager) getActivity().getSystemService
(Context.CLIPBOARD_SERVICE);
manager.setText(commit.getSha());
}
Toast.makeText(getActivity(), R.string.toast_msg_copied, Toast.LENGTH_SHORT).show();
}
private void shareCommit() {
String id = repository.generateId();
startActivity(ShareUtils.create(
"Commit " + CommitUtils.abbreviate(base) + " on " + id,
"https://github.com/" + id + "/commit/" + base));
}
private void refreshCommit() {
new RefreshCommitTask(getActivity(), repository, base,
commentImageGetter) {
@Override
protected FullCommit run(Account account) throws Exception {
FullCommit full = super.run(account);
List<CommitFile> files = full.getCommit().getFiles();
diffStyler.setFiles(files);
if (files != null)
Collections.sort(files, new CommitFileComparator());
return full;
}
@Override
protected void onSuccess(FullCommit commit) throws Exception {
super.onSuccess(commit);
updateList(commit.getCommit(), commit, commit.getFiles());
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(getActivity(), e, R.string.error_commit_load);
ViewUtils.setGone(progress, true);
}
}.execute();
}
private boolean isDifferentCommitter(final String author,
final String committer) {
return committer != null && !committer.equals(author);
}
private void addCommitDetails(RepositoryCommit commit) {
adapter.addHeader(commitHeader);
commitMessage.setText(commit.getCommit().getMessage());
String commitAuthor = CommitUtils.getAuthor(commit);
String commitCommitter = CommitUtils.getCommitter(commit);
if (commitAuthor != null) {
CommitUtils.bindAuthor(commit, avatars, authorAvatar);
authorName.setText(commitAuthor);
StyledText styledAuthor = new StyledText();
styledAuthor.append(getString(R.string.authored));
Date commitAuthorDate = CommitUtils.getAuthorDate(commit);
if (commitAuthorDate != null)
styledAuthor.append(' ').append(commitAuthorDate);
authorDate.setText(styledAuthor);
ViewUtils.setGone(authorArea, false);
} else
ViewUtils.setGone(authorArea, true);
if (isDifferentCommitter(commitAuthor, commitCommitter)) {
CommitUtils.bindCommitter(commit, avatars, committerAvatar);
committerName.setText(commitCommitter);
StyledText styledCommitter = new StyledText();
styledCommitter.append(getString(R.string.committed));
Date commitCommitterDate = CommitUtils.getCommitterDate(commit);
if (commitCommitterDate != null)
styledCommitter.append(' ').append(commitCommitterDate);
committerDate.setText(styledCommitter);
ViewUtils.setGone(committerArea, false);
} else
ViewUtils.setGone(committerArea, true);
}
private void addDiffStats(RepositoryCommit commit, LayoutInflater inflater) {
View fileHeader = inflater.inflate(R.layout.commit_file_details_header,
null);
((TextView) fileHeader.findViewById(R.id.tv_commit_file_summary))
.setText(CommitUtils.formatStats(commit.getFiles()));
adapter.addHeader(fileHeader);
}
private void addCommitParents(RepositoryCommit commit,
LayoutInflater inflater) {
List<Commit> parents = commit.getParents();
if (parents == null || parents.isEmpty())
return;
for (Commit parent : parents) {
View parentView = inflater.inflate(R.layout.commit_parent_item, null);
TextView parentIdText = (TextView) parentView
.findViewById(R.id.tv_commit_id);
parentIdText.setPaintFlags(parentIdText.getPaintFlags()
| UNDERLINE_TEXT_FLAG);
StyledText parentText = new StyledText();
parentText.append(getString(R.string.parent_prefix));
parentText.monospace(CommitUtils.abbreviate(parent));
parentIdText.setText(parentText);
adapter.addHeader(parentView, parent, true);
}
}
private void updateHeader(RepositoryCommit commit) {
ViewUtils.setGone(progress, true);
ViewUtils.setGone(list, false);
addCommitDetails(commit);
addCommitParents(commit, getActivity().getLayoutInflater());
}
private void updateList(RepositoryCommit commit,
List<CommitComment> comments, List<FullCommitFile> files) {
if (!isUsable())
return;
this.commit = commit;
this.comments = comments;
this.files = files;
adapter.clearHeaders();
adapter.clearFooters();
updateHeader(commit);
addDiffStats(commit, getActivity().getLayoutInflater());
updateItems(comments, files);
}
private void updateItems(List<CommitComment> comments,
List<FullCommitFile> files) {
CommitFileListAdapter rootAdapter = adapter.getWrappedAdapter();
rootAdapter.clear();
for (FullCommitFile file : files)
rootAdapter.addItem(file);
for (CommitComment comment : comments)
rootAdapter.addComment(comment);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
list = finder.find(android.R.id.list);
progress = finder.find(R.id.pb_loading);
diffStyler = new DiffStyler(getResources());
list.setOnItemClickListener(this);
LayoutInflater inflater = getActivity().getLayoutInflater();
adapter = new HeaderFooterListAdapter<CommitFileListAdapter>(list,
new CommitFileListAdapter(inflater, diffStyler, avatars,
commentImageGetter));
adapter.addFooter(inflater.inflate(R.layout.footer_separator, null));
list.setAdapter(adapter);
commitHeader = inflater.inflate(R.layout.commit_header, null);
commitMessage = (TextView) commitHeader
.findViewById(R.id.tv_commit_message);
authorArea = commitHeader.findViewById(R.id.ll_author);
authorAvatar = (ImageView) commitHeader.findViewById(R.id.iv_author);
authorName = (TextView) commitHeader.findViewById(R.id.tv_author);
authorDate = (TextView) commitHeader.findViewById(R.id.tv_author_date);
committerArea = commitHeader.findViewById(R.id.ll_committer);
committerAvatar = (ImageView) commitHeader
.findViewById(R.id.iv_committer);
committerName = (TextView) commitHeader.findViewById(R.id.tv_committer);
committerDate = (TextView) commitHeader.findViewById(R.id.tv_commit_date);
loadingView = inflater.inflate(R.layout.loading_item, null);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.commit_diff_list, null);
}
private void showFileOptions(CharSequence line, final int position,
final CommitFile file) {
final AlertDialog dialog = LightAlertDialog.create(getActivity());
dialog.setTitle(CommitUtils.getName(file));
dialog.setCanceledOnTouchOutside(true);
View view = getActivity().getLayoutInflater().inflate(
R.layout.diff_line_dialog, null);
ViewFinder finder = new ViewFinder(view);
TextView diff = finder.textView(R.id.tv_diff);
diff.setText(line);
diffStyler.updateColors(line, diff);
finder.setText(R.id.tv_commit, getString(R.string.commit_prefix)
+ CommitUtils.abbreviate(commit));
finder.find(R.id.ll_view_area).setOnClickListener(new OnClickListener() {
public void onClick(View v) {
dialog.dismiss();
openFile(file);
}
});
finder.find(R.id.ll_comment_area).setOnClickListener(
new OnClickListener() {
public void onClick(View v) {
dialog.dismiss();
startActivityForResult(CreateCommentActivity
.createIntent(repository, commit.getSha(),
file.getFilename(), position),
COMMENT_CREATE);
}
});
dialog.setView(view);
dialog.setButton(BUTTON_NEGATIVE, getString(R.string.cancel),
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
dialog.show();
}
private void openFile(CommitFile file) {
if (!TextUtils.isEmpty(file.getFilename())
&& !TextUtils.isEmpty(file.getSha()))
startActivity(CommitFileViewActivity.createIntent(repository, base,
file));
}
/**
* Select previous file by scanning backwards from the current position
*
* @param position
* @param item
* @param parent
*/
private void selectPreviousFile(int position, Object item,
AdapterView<?> parent) {
CharSequence line;
if (item instanceof CharSequence)
line = (CharSequence) item;
else
line = null;
int linePosition = 0;
while (--position >= 0) {
item = parent.getItemAtPosition(position);
if (item instanceof CommitFile) {
if (line != null)
showFileOptions(line, linePosition, (CommitFile) item);
break;
} else if (item instanceof CharSequence)
if (line != null)
linePosition++;
else
line = (CharSequence) item;
}
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
Object item = parent.getItemAtPosition(position);
if (item instanceof Commit)
startActivity(CommitViewActivity.createIntent(repository,
((Commit) item).getSha()));
else if (item instanceof CommitFile)
openFile((CommitFile) item);
else if (item instanceof CharSequence)
selectPreviousFile(position, item, parent);
else if (item instanceof CommitComment)
if (!TextUtils.isEmpty(((CommitComment) item).getPath()))
selectPreviousFile(position, item, parent);
}
}
| |
package org.codehaus.mojo.exec;
/*
* Copyright 2005-2006 The Codehaus.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.x
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectBuilder;
import org.apache.maven.project.artifact.MavenMetadataSource;
import java.io.File;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
/**
* Executes the supplied java class in the current VM with the enclosing project's
* dependencies as classpath.
*
* @author <a href="mailto:kaare.nilsen@gmail.com">Kaare Nilsen</a>, <a href="mailto:dsmiley@mitre.org">David Smiley</a>
* @goal java
* @requiresDependencyResolution test
* @execute phase="validate"
* @since 1.0
*/
public class ExecJavaMojo
extends AbstractExecMojo
{
/**
* @component
*/
private ArtifactResolver artifactResolver;
/**
* @component
*/
private ArtifactFactory artifactFactory;
/**
* @component
*/
private ArtifactMetadataSource metadataSource;
/**
* @parameter expression="${localRepository}"
* @required
* @readonly
* @since 1.0
*/
private ArtifactRepository localRepository;
/**
* @parameter expression="${project.remoteArtifactRepositories}"
* @required
* @readonly
* @since 1.1-beta-1
*/
private List remoteRepositories;
/**
* @component
* @since 1.0
*/
private MavenProjectBuilder projectBuilder;
/**
* @parameter expression="${plugin.artifacts}"
* @readonly
* @since 1.1-beta-1
*/
private List pluginDependencies;
/**
* The main class to execute.
*
* @since 1.0
*/
private final String mainClass = "vanadis.main.Main";
/**
* The class arguments.
*
* @parameter expression="${exec.arguments}"
* @since 1.0
*/
private String[] arguments;
/**
* A list of system properties to be passed. Note: as the execution is not forked, some system properties
* required by the JVM cannot be passed here. Use MAVEN_OPTS or the exec:exec instead. See the user guide for
* more information.
*
* @parameter
* @since 1.0
*/
private Property[] systemProperties;
/**
* Indicates if mojo should be kept running after the mainclass terminates.
* Usefull for serverlike apps with deamonthreads.
*
* @parameter expression="${exec.keepAlive}" default-value="false"
* @deprecated since 1.1-alpha-1
* @since 1.0
*/
private boolean keepAlive;
/**
* Indicates if the project dependencies should be used when executing
* the main class.
*
* @since 1.1-beta-1
*/
private boolean includeProjectDependencies = true;
/**
* Indicates if this plugin's dependencies should be used when executing
* the main class.
* <p/>
* This is useful when project dependencies are not appropriate. Using only
* the plugin dependencies can be particularly useful when the project is
* not a java project. For example a mvn project using the csharp plugins
* only expects to see dotnet libraries as dependencies.
*/
private boolean includePluginDependencies = true;
/**
* If provided the ExecutableDependency identifies which of the plugin dependencies
* contains the executable class. This will have the affect of only including
* plugin dependencies required by the identified ExecutableDependency.
* <p/>
* If includeProjectDependencies is set to <code>true</code>, all of the project dependencies
* will be included on the executable's classpath. Whether a particular project
* dependency is a dependency of the identified ExecutableDependency will be
* irrelevant to its inclusion in the classpath.
*
* @parameter
* @optional
* @since 1.1-beta-1
*/
private ExecutableDependency executableDependency;
/**
* Wether to interrupt/join and possibly stop the daemon threads upon quitting. <br/> If this is <code>false</code>,
* maven does nothing about the daemon threads. When maven has no more work to do, the VM will normally terminate
* any remaining daemon threads.
* <p>
* In certain cases (in particular if maven is embedded),
* you might need to keep this enabled to make sure threads are properly cleaned up to ensure they don't interfere
* with subsequent activity.
* In that case, see {@link #daemonThreadJoinTimeout} and
* {@link #stopUnresponsiveDaemonThreads} for further tuning.
* </p>
* @parameter expression="${exec.cleanupDaemonThreads} default-value="true"
* @since 1.1-beta-1
*/
private boolean cleanupDaemonThreads;
/**
* This defines the number of milliseconds to wait for daemon threads to quit following their interruption.<br/>
* This is only taken into account if {@link #cleanupDaemonThreads} is <code>true</code>.
* A value <=0 means to not timeout (i.e. wait indefinitely for threads to finish). Following a timeout, a
* warning will be logged.
* <p>Note: properly coded threads <i>should</i> terminate upon interruption but some threads may prove
* problematic: as the VM does interrupt daemon threads, some code may not have been written to handle
* interruption properly. For example java.util.Timer is known to not handle interruptions in JDK <= 1.6.
* So it is not possible for us to infinitely wait by default otherwise maven could hang. A sensible default
* value has been chosen, but this default value <i>may change</i> in the future based on user feedback.</p>
* @parameter expression="${exec.daemonThreadJoinTimeout}" default-value="15000"
* @since 1.1-beta-1
*/
private long daemonThreadJoinTimeout;
/**
* Wether to call {@link Thread#stop()} following a timing out of waiting for an interrupted thread to finish.
* This is only taken into account if {@link #cleanupDaemonThreads} is <code>true</code>
* and the {@link #daemonThreadJoinTimeout} threshold has been reached for an uncooperative thread.
* If this is <code>false</code>, or if {@link Thread#stop()} fails to get the thread to stop, then
* a warning is logged and Maven will continue on while the affected threads (and related objects in memory)
* linger on. Consider setting this to <code>true</code> if you are invoking problematic code that you can't fix.
* An example is {@link java.util.Timer} which doesn't respond to interruption. To have <code>Timer</code>
* fixed, vote for <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6336543">this bug</a>.
* @parameter expression="${exec.stopUnresponsiveDaemonThreads} default-value="false"
* @since 1.1-beta-1
*/
private boolean stopUnresponsiveDaemonThreads;
/**
* Deprecated this is not needed anymore.
*
* @parameter expression="${exec.killAfter}" default-value="-1"
* @deprecated since 1.1-alpha-1
* @since 1.0
*/
private long killAfter;
private Properties originalSystemProperties;
/**
* Execute goal.
* @throws MojoExecutionException execution of the main class or one of the threads it generated failed.
* @throws MojoFailureException something bad happened...
*/
public void execute()
throws MojoExecutionException, MojoFailureException
{
if ( killAfter != -1 )
{
getLog().warn( "Warning: killAfter is now deprecated. Do you need it ? Please comment on MEXEC-6." );
}
if ( null == arguments )
{
arguments = new String[0];
}
if ( getLog().isDebugEnabled() )
{
StringBuffer msg = new StringBuffer( "Invoking : " );
msg.append( mainClass );
msg.append( ".main(" );
for ( int i = 0; i < arguments.length; i++ )
{
if ( i > 0 )
{
msg.append( ", " );
}
msg.append( arguments[i] );
}
msg.append( ")" );
getLog().debug( msg );
}
IsolatedThreadGroup threadGroup = new IsolatedThreadGroup( mainClass /*name*/ );
Thread bootstrapThread = new Thread( threadGroup, new Runnable()
{
public void run()
{
try
{
Method main = Thread.currentThread().getContextClassLoader().loadClass( mainClass )
.getMethod( "main", new Class[]{ String[].class } );
if ( ! main.isAccessible() )
{
getLog().debug( "Setting accessibility to true in order to invoke main()." );
main.setAccessible( true );
}
main.invoke( main, new Object[]{arguments} );
}
catch ( NoSuchMethodException e )
{ // just pass it on
Thread.currentThread().getThreadGroup().uncaughtException( Thread.currentThread(),
new Exception(
"The specified mainClass doesn't contain a main method with appropriate signature.", e
)
);
}
catch ( Exception e )
{ // just pass it on
Thread.currentThread().getThreadGroup().uncaughtException( Thread.currentThread(), e );
}
}
}, mainClass + ".main()" );
bootstrapThread.setContextClassLoader( getClassLoader() );
setSystemProperties();
bootstrapThread.start();
joinNonDaemonThreads( threadGroup );
// It's plausible that spontaneously a non-daemon thread might be created as we try and shut down,
// but it's too late since the termination condition (only daemon threads) has been triggered.
if ( keepAlive )
{
getLog().warn(
"Warning: keepAlive is now deprecated and obsolete. Do you need it? Please comment on MEXEC-6." );
waitFor( 0 );
}
if ( cleanupDaemonThreads )
{
terminateThreads( threadGroup );
try
{
threadGroup.destroy();
}
catch ( IllegalThreadStateException e )
{
getLog().warn( "Couldn't destroy threadgroup " + threadGroup, e );
}
}
if ( originalSystemProperties != null )
{
System.setProperties( originalSystemProperties );
}
synchronized ( threadGroup )
{
if ( threadGroup.uncaughtException != null )
{
throw new MojoExecutionException( "An exception occured while executing the Java class. "
+ threadGroup.uncaughtException.getMessage(),
threadGroup.uncaughtException );
}
}
registerSourceRoots();
}
/**
* a ThreadGroup to isolate execution and collect exceptions.
*/
class IsolatedThreadGroup extends ThreadGroup
{
Throwable uncaughtException; // synchronize access to this
public IsolatedThreadGroup( String name )
{
super( name );
}
public void uncaughtException( Thread thread, Throwable throwable )
{
if ( throwable instanceof ThreadDeath )
{
return; //harmless
}
boolean doLog = false;
synchronized ( this )
{
if ( uncaughtException == null ) // only remember the first one
{
uncaughtException = throwable; // will be reported eventually
}
else
{
doLog = true;
}
}
if ( doLog )
{
getLog().warn( "an additional exception was thrown", throwable );
}
}
}
private void joinNonDaemonThreads( ThreadGroup threadGroup )
{
boolean foundNonDaemon;
do
{
foundNonDaemon = false;
Collection threads = getActiveThreads( threadGroup );
for ( Iterator iter = threads.iterator(); iter.hasNext(); )
{
Thread thread = (Thread) iter.next();
if ( thread.isDaemon() )
{
continue;
}
foundNonDaemon = true; //try again; maybe more threads were created while we were busy
joinThread( thread, 0 );
}
} while ( foundNonDaemon );
}
private void joinThread( Thread thread, long timeoutMsecs )
{
try
{
getLog().debug( "joining on thread " + thread );
thread.join( timeoutMsecs );
}
catch ( InterruptedException e )
{
Thread.currentThread().interrupt(); // good practice if don't throw
getLog().warn( "interrupted while joining against thread " + thread, e ); // not expected!
}
if ( thread.isAlive() ) //generally abnormal
{
getLog().warn( "thread " + thread + " was interrupted but is still alive after waiting at least "
+ timeoutMsecs + "msecs" );
}
}
private void terminateThreads( ThreadGroup threadGroup )
{
long startTime = System.currentTimeMillis();
Set uncooperativeThreads = new HashSet(); // these were not responsive to interruption
for ( Collection threads = getActiveThreads( threadGroup ); !threads.isEmpty();
threads = getActiveThreads( threadGroup ), threads.removeAll( uncooperativeThreads ) )
{
// Interrupt all threads we know about as of this instant (harmless if spuriously went dead (! isAlive())
// or if something else interrupted it ( isInterrupted() ).
for ( Iterator iter = threads.iterator(); iter.hasNext(); )
{
Thread thread = (Thread) iter.next();
getLog().debug( "interrupting thread " + thread );
thread.interrupt();
}
// Now join with a timeout and call stop() (assuming flags are set right)
for ( Iterator iter = threads.iterator(); iter.hasNext(); )
{
Thread thread = (Thread) iter.next();
if ( ! thread.isAlive() )
{
continue; //and, presumably it won't show up in getActiveThreads() next iteration
}
if ( daemonThreadJoinTimeout <= 0 )
{
joinThread( thread, 0 ); //waits until not alive; no timeout
continue;
}
long timeout = daemonThreadJoinTimeout
- ( System.currentTimeMillis() - startTime );
if ( timeout > 0 )
{
joinThread( thread, timeout );
}
if ( ! thread.isAlive() )
{
continue;
}
uncooperativeThreads.add( thread ); // ensure we don't process again
if ( stopUnresponsiveDaemonThreads )
{
getLog().warn( "thread " + thread + " will be Thread.stop()'ed" );
thread.stop();
}
else
{
getLog().warn( "thread " + thread + " will linger despite being asked to die via interruption" );
}
}
}
if ( ! uncooperativeThreads.isEmpty() )
{
getLog().warn( "NOTE: " + uncooperativeThreads.size() + " thread(s) did not finish despite being asked to "
+ " via interruption. This is not a problem with exec:java, it is a problem with the running code."
+ " Although not serious, it should be remedied." );
}
else
{
int activeCount = threadGroup.activeCount();
if ( activeCount != 0 )
{
// TODO this may be nothing; continue on anyway; perhaps don't even log in future
Thread[] threadsArray = new Thread[1];
threadGroup.enumerate( threadsArray );
getLog().debug( "strange; " + activeCount
+ " thread(s) still active in the group " + threadGroup + " such as " + threadsArray[0] );
}
}
}
private Collection getActiveThreads( ThreadGroup threadGroup )
{
Thread[] threads = new Thread[ threadGroup.activeCount() ];
int numThreads = threadGroup.enumerate( threads );
Collection result = new ArrayList( numThreads );
for ( int i = 0; i < threads.length && threads[i] != null; i++ )
{
result.add( threads[i] );
}
return result; //note: result should be modifiable
}
/**
* Pass any given system properties to the java system properties.
*/
private void setSystemProperties()
{
if ( systemProperties != null )
{
originalSystemProperties = System.getProperties();
for ( int i = 0; i < systemProperties.length; i++ )
{
Property systemProperty = systemProperties[i];
String value = systemProperty.getValue();
System.setProperty( systemProperty.getKey(), value == null ? "" : value );
}
}
}
/**
* Set up a classloader for the execution of the main class.
*
* @return the classloader
* @throws MojoExecutionException if a problem happens
*/
private ClassLoader getClassLoader()
throws MojoExecutionException
{
List classpathURLs = new ArrayList();
this.addRelevantPluginDependenciesToClasspath( classpathURLs );
this.addRelevantProjectDependenciesToClasspath( classpathURLs );
return new URLClassLoader( ( URL[] ) classpathURLs.toArray( new URL[ classpathURLs.size() ] ) );
}
/**
* Add any relevant project dependencies to the classpath.
* Indirectly takes includePluginDependencies and ExecutableDependency into consideration.
*
* @param path classpath of {@link java.net.URL} objects
* @throws MojoExecutionException if a problem happens
*/
private void addRelevantPluginDependenciesToClasspath( List path )
throws MojoExecutionException
{
if ( hasCommandlineArgs() )
{
arguments = parseCommandlineArgs();
}
try
{
Iterator iter = this.determineRelevantPluginDependencies().iterator();
while ( iter.hasNext() )
{
Artifact classPathElement = (Artifact) iter.next();
getLog().debug(
"Adding plugin dependency artifact: " + classPathElement.getArtifactId() + " to classpath" );
path.add( classPathElement.getFile().toURL() );
}
}
catch ( MalformedURLException e )
{
throw new MojoExecutionException( "Error during setting up classpath", e );
}
}
/**
* Add any relevant project dependencies to the classpath.
* Takes includeProjectDependencies into consideration.
*
* @param path classpath of {@link java.net.URL} objects
* @throws MojoExecutionException if a problem happens
*/
private void addRelevantProjectDependenciesToClasspath( List path )
throws MojoExecutionException
{
if ( this.includeProjectDependencies )
{
try
{
getLog().debug( "Project Dependencies will be included." );
List artifacts = new ArrayList();
List theClasspathFiles = new ArrayList();
collectProjectArtifactsAndClasspath( artifacts, theClasspathFiles );
for ( Iterator it = theClasspathFiles.iterator(); it.hasNext(); )
{
URL url = ( (File) it.next() ).toURL();
getLog().debug( "Adding to classpath : " + url );
path.add( url );
}
Iterator iter = artifacts.iterator();
while ( iter.hasNext() )
{
Artifact classPathElement = (Artifact) iter.next();
getLog().debug(
"Adding project dependency artifact: " + classPathElement.getArtifactId() + " to classpath" );
path.add( classPathElement.getFile().toURL() );
}
}
catch ( MalformedURLException e )
{
throw new MojoExecutionException( "Error during setting up classpath", e );
}
}
else
{
getLog().debug( "Project Dependencies will be excluded." );
}
}
/**
* Determine all plugin dependencies relevant to the executable.
* Takes includePlugins, and the executableDependency into consideration.
*
* @return a set of Artifact objects.
* (Empty set is returned if there are no relevant plugin dependencies.)
* @throws MojoExecutionException if a problem happens resolving the plufin dependencies
*/
private Set determineRelevantPluginDependencies()
throws MojoExecutionException
{
Set relevantDependencies;
if ( this.includePluginDependencies )
{
if ( this.executableDependency == null )
{
getLog().debug( "All Plugin Dependencies will be included." );
relevantDependencies = new HashSet( this.pluginDependencies );
}
else
{
getLog().debug( "Selected plugin Dependencies will be included." );
Artifact executableArtifact = this.findExecutableArtifact();
Artifact executablePomArtifact = this.getExecutablePomArtifact( executableArtifact );
relevantDependencies = this.resolveExecutableDependencies( executablePomArtifact );
}
}
else
{
relevantDependencies = Collections.EMPTY_SET;
getLog().debug( "Plugin Dependencies will be excluded." );
}
return relevantDependencies;
}
/**
* Get the artifact which refers to the POM of the executable artifact.
*
* @param executableArtifact this artifact refers to the actual assembly.
* @return an artifact which refers to the POM of the executable artifact.
*/
private Artifact getExecutablePomArtifact( Artifact executableArtifact )
{
return this.artifactFactory.createBuildArtifact( executableArtifact.getGroupId(),
executableArtifact.getArtifactId(),
executableArtifact.getVersion(), "pom" );
}
/**
* Examine the plugin dependencies to find the executable artifact.
*
* @return an artifact which refers to the actual executable tool (not a POM)
* @throws MojoExecutionException if no executable artifact was found
*/
private Artifact findExecutableArtifact()
throws MojoExecutionException
{
//ILimitedArtifactIdentifier execToolAssembly = this.getExecutableToolAssembly();
Artifact executableTool = null;
for ( Iterator iter = this.pluginDependencies.iterator(); iter.hasNext(); )
{
Artifact pluginDep = (Artifact) iter.next();
if ( this.executableDependency.matches( pluginDep ) )
{
executableTool = pluginDep;
break;
}
}
if ( executableTool == null )
{
throw new MojoExecutionException(
"No dependency of the plugin matches the specified executableDependency."
+ " Specified executableToolAssembly is: " + executableDependency.toString() );
}
return executableTool;
}
/**
* Resolve the executable dependencies for the specified project
* @param executablePomArtifact the project's POM
* @return a set of Artifacts
* @throws MojoExecutionException if a failure happens
*/
private Set resolveExecutableDependencies( Artifact executablePomArtifact )
throws MojoExecutionException
{
Set executableDependencies;
try
{
MavenProject executableProject = this.projectBuilder.buildFromRepository( executablePomArtifact,
this.remoteRepositories,
this.localRepository );
//get all of the dependencies for the executable project
List dependencies = executableProject.getDependencies();
//make Artifacts of all the dependencies
Set dependencyArtifacts =
MavenMetadataSource.createArtifacts( this.artifactFactory, dependencies, null, null, null );
//not forgetting the Artifact of the project itself
dependencyArtifacts.add( executableProject.getArtifact() );
//resolve all dependencies transitively to obtain a comprehensive list of assemblies
ArtifactResolutionResult result = artifactResolver.resolveTransitively( dependencyArtifacts,
executablePomArtifact,
Collections.EMPTY_MAP,
this.localRepository,
this.remoteRepositories,
metadataSource, null,
Collections.EMPTY_LIST );
executableDependencies = result.getArtifacts();
}
catch ( Exception ex )
{
throw new MojoExecutionException(
"Encountered problems resolving dependencies of the executable " + "in preparation for its execution.",
ex );
}
return executableDependencies;
}
/**
* Stop program execution for nn millis.
*
* @param millis the number of millis-seconds to wait for,
* <code>0</code> stops program forever.
*/
private void waitFor( long millis )
{
Object lock = new Object();
synchronized ( lock )
{
try
{
lock.wait( millis );
}
catch ( InterruptedException e )
{
Thread.currentThread().interrupt(); // good practice if don't throw
getLog().warn( "Spuriously interrupted while waiting for " + millis + "ms", e );
}
}
}
}
| |
/*
* RED5 Open Source Flash Server - http://code.google.com/p/red5/
*
* Copyright 2006-2012 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.logging;
import java.lang.reflect.Method;
import org.red5.server.adapter.StatefulScopeWrappingAdapter;
import org.red5.server.api.scope.IScope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.impl.StaticLoggerBinder;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.selector.ContextSelector;
/**
* LoggerFactory to simplify requests for Logger instances within
* Red5 applications. This class is expected to be run only once per
* logger request and is optimized as such.
*
* @author Paul Gregoire (mondain@gmail.com)
*/
public class Red5LoggerFactory {
@SuppressWarnings({ "unchecked", "rawtypes" })
public static Logger getLogger(Class<?> clazz) {
//determine the red5 app name or servlet context name
String contextName = null;
//if the incoming class extends StatefulScopeWrappingAdapter we lookup the context
//by scope name
boolean scopeAware = StatefulScopeWrappingAdapter.class.isAssignableFrom(clazz);
//System.out.printf("Wrapper - %s\n", StatefulScopeWrappingAdapter.class.isAssignableFrom(clazz));
if (scopeAware) {
try {
Class wrapper = null;
if ((wrapper = clazz.asSubclass(StatefulScopeWrappingAdapter.class)) != null) {
Method getScope = wrapper.getMethod("getScope", new Class[0]);
//NPE will occur here if the scope is not yet set on the application adapter
IScope scope = (IScope) getScope.invoke(null, new Object[0]);
contextName = scope.getName();
}
} catch (Exception cce) {
//cclog.warn("Exception {}", e);
}
} else {
//route the Launcher entries to the correct context
String[] parts = Thread.currentThread().getName().split("Launcher:/");
if (parts.length > 1) {
contextName = parts[1];
}
}
/* TODO: For a future day, the context or application will be determined
//get a reference to our caller
Class caller = Reflection.getCallerClass(2);
//System.err.printf("Caller class: %s classloader: %s\n", caller, caller.getClassLoader());
try {
//check to see if we've been called by a servlet
Class sub = caller.asSubclass(Servlet.class);
//System.err.println("Caller is a Servlet");
//Method[] methods = caller.getMethods();
//for (Method meth : methods) {
// System.err.printf("Method: %s\n", meth.getName());
//}
Method getContext = caller.getMethod("getServletContext", new Class[0]);
//System.err.printf("got context method - %s\n", getContext);
ServletContext context = (ServletContext) getContext.invoke(caller, null);
System.err.printf("invoked context\n");
contextName = context.getServletContextName();
//System.err.printf("Servlet context name: %s\n", contextName);
Method getContextName = context.getClass().getMethod("getServletContextName", new Class[0]);
System.err.printf("got context name\n");
Object ctxName = getContextName.invoke(null, new Object[0]);
System.err.printf("Servlet context result: %s\n", ctxName);
if (ctxName != null && ctxName instanceof String) {
contextName = ctxName.toString();
}
} catch (Exception ex) {
//ex.printStackTrace();
}
*/
return getLogger(clazz, contextName);
}
@SuppressWarnings({ "rawtypes" })
public static Logger getLogger(Class clazz, String contextName) {
Logger logger = null;
try {
//check for logback
Class cs = Class.forName("ch.qos.logback.classic.selector.ContextSelector");
//trigger an exception if the class doesn't actually exist
cs.getDeclaredMethods();
// get the class for static binding
cs = Class.forName("org.slf4j.impl.StaticLoggerBinder");
// get its declared methods
Method[] methods = cs.getDeclaredMethods();
for (Method method : methods) {
//ensure method exists
if (method.getName().equals("getContextSelector")) {
//System.out.println("Logger context selector method found");
//get the context selector
StaticLoggerBinder binder = StaticLoggerBinder.getSingleton();
Method m1 = binder.getClass().getMethod("getContextSelector", (Class[]) null);
ContextSelector selector = (ContextSelector) m1.invoke(binder, (Object[]) null);
//get the context for the given context name or default if null
LoggerContext ctx = null;
if (contextName != null && contextName.length() > 0) {
ctx = selector.getLoggerContext(contextName);
}
// and if we get here, fall back to the default context
if (ctx == null) {
ctx = selector.getLoggerContext();
}
//debug
//StatusPrinter.print(ctx);
//get the logger from the context or default context
logger = ((ctx != null) ? ctx.getLogger(clazz) : selector.getDefaultLoggerContext().getLogger(clazz));
break;
}
}
} catch (Exception e) {
//no logback, use whatever logger is in-place
System.err.printf("Exception %s", e.getMessage());
}
if (logger == null) {
//no logback, use whatever logger is in-place
logger = LoggerFactory.getLogger(clazz);
}
return logger;
}
@SuppressWarnings({ "rawtypes" })
public static Logger getLogger(String name, String contextName) {
Logger logger = null;
try {
//check for logback
Class cs = Class.forName("ch.qos.logback.classic.selector.ContextSelector");
//trigger an exception if the class doesn't actually exist
cs.getDeclaredMethods();
// get the class for static binding
cs = Class.forName("org.slf4j.impl.StaticLoggerBinder");
// get its declared methods
Method[] methods = cs.getDeclaredMethods();
for (Method method : methods) {
//ensure method exists
if (method.getName().equals("getContextSelector")) {
//System.out.println("Logger context selector method found");
//get the context selector
StaticLoggerBinder binder = StaticLoggerBinder.getSingleton();
Method m1 = binder.getClass().getMethod("getContextSelector", (Class[]) null);
ContextSelector selector = (ContextSelector) m1.invoke(binder, (Object[]) null);
//get the context for the given context name or default if null
LoggerContext ctx = null;
if (contextName != null && contextName.length() > 0) {
ctx = selector.getLoggerContext(contextName);
}
// and if we get here, fall back to the default context
if (ctx == null) {
ctx = selector.getLoggerContext();
}
//debug
//StatusPrinter.print(ctx);
//get the logger from the context or default context
logger = ((ctx != null) ? ctx.getLogger(name) : selector.getDefaultLoggerContext().getLogger(name));
break;
}
}
} catch (Exception e) {
//no logback, use whatever logger is in-place
System.err.printf("Exception %s", e.getMessage());
}
if (logger == null) {
//no logback, use whatever logger is in-place
logger = LoggerFactory.getLogger(name);
}
return logger;
}
public static ContextSelector getContextSelector() {
ContextSelector selector = null;
StaticLoggerBinder binder = StaticLoggerBinder.getSingleton();
try {
Method m1 = binder.getClass().getMethod("getContextSelector", (Class[]) null);
selector = (ContextSelector) m1.invoke(binder, (Object[]) null);
} catch (Exception e) {
System.err.printf("Exception %s", e.getMessage());
}
return selector;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
*
*/
public class DescribeTagsResult implements Serializable, Cloneable {
/**
* A list of tags.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription> tags;
/**
* The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*/
private String nextToken;
/**
* A list of tags.
*
* @return A list of tags.
*/
public java.util.List<TagDescription> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription>();
tags.setAutoConstruct(true);
}
return tags;
}
/**
* A list of tags.
*
* @param tags A list of tags.
*/
public void setTags(java.util.Collection<TagDescription> tags) {
if (tags == null) {
this.tags = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
/**
* A list of tags.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTags(java.util.Collection)} or {@link
* #withTags(java.util.Collection)} if you want to override the existing
* values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTagsResult withTags(TagDescription... tags) {
if (getTags() == null) setTags(new java.util.ArrayList<TagDescription>(tags.length));
for (TagDescription value : tags) {
getTags().add(value);
}
return this;
}
/**
* A list of tags.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tags A list of tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTagsResult withTags(java.util.Collection<TagDescription> tags) {
if (tags == null) {
this.tags = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription> tagsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<TagDescription>(tags.size());
tagsCopy.addAll(tags);
this.tags = tagsCopy;
}
return this;
}
/**
* The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*
* @return The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*/
public String getNextToken() {
return nextToken;
}
/**
* The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*
* @param nextToken The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param nextToken The token to use to retrieve the next page of results. This value is
* <code>null</code> when there are no more results to return..
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTagsResult withNextToken(String nextToken) {
this.nextToken = nextToken;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null) sb.append("Tags: " + getTags() + ",");
if (getNextToken() != null) sb.append("NextToken: " + getNextToken() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeTagsResult == false) return false;
DescribeTagsResult other = (DescribeTagsResult)obj;
if (other.getTags() == null ^ this.getTags() == null) return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false;
if (other.getNextToken() == null ^ this.getNextToken() == null) return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false;
return true;
}
@Override
public DescribeTagsResult clone() {
try {
return (DescribeTagsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.lang;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.ByteBuffer;
import java.security.ProtectionDomain;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
/*-[
#import "java/util/ArrayList.h"
#import "java/util/Collections.h"
]-*/
/**
* Loads classes and resources from a repository. One or more class loaders are
* installed at runtime. These are consulted whenever the runtime system needs a
* specific class that is not yet available in-memory. Typically, class loaders
* are grouped into a tree where child class loaders delegate all requests to
* parent class loaders. Only if the parent class loader cannot satisfy the
* request, the child class loader itself tries to handle it.
* <p>
* {@code ClassLoader} is an abstract class that implements the common
* infrastructure required by all class loaders. J2ObjC provides a native
* implementation of the class, SystemClassLoader, which is the one typically
* used. Other applications may implement subclasses of {@code ClassLoader}
* to provide special ways for loading classes.
* </p><p>
* Note: since classes cannot be dynamically created in iOS or OS X, class
* loaders have a much more limited utility than with JVM-based systems like
* Java and Android.
* </p>
* @see Class
*/
public abstract class ClassLoader {
/**
* The parent ClassLoader.
*/
private final ClassLoader parent;
/**
* The packages known to the class loader.
*/
private final Map<String, Package> packages = new HashMap<String, Package>();
/**
* Returns the system class loader. This is the parent for new
* {@code ClassLoader} instances and is typically the class loader used to
* start the application.
*/
public static ClassLoader getSystemClassLoader() {
return SystemClassLoader.loader;
}
/**
* Finds the URL of the resource with the specified name. The system class
* loader's resource lookup algorithm is used to find the resource.
*
* @return the {@code URL} object for the requested resource or {@code null}
* if the resource can not be found.
* @param resName
* the name of the resource to find.
* @see Class#getResource
*/
public static URL getSystemResource(String resName) {
return SystemClassLoader.loader.getResource(resName);
}
/**
* Returns an enumeration of URLs for the resource with the specified name.
* The system class loader's resource lookup algorithm is used to find the
* resource.
*
* @return an enumeration of {@code URL} objects containing the requested
* resources.
* @param resName
* the name of the resource to find.
* @throws IOException
* if an I/O error occurs.
*/
public static Enumeration<URL> getSystemResources(String resName) throws IOException {
return SystemClassLoader.loader.getResources(resName);
}
/**
* Returns a stream for the resource with the specified name. The system
* class loader's resource lookup algorithm is used to find the resource.
* Basically, the contents of the java.class.path are searched in order,
* looking for a path which matches the specified resource.
*
* @return a stream for the resource or {@code null}.
* @param resName
* the name of the resource to find.
* @see Class#getResourceAsStream
*/
public static InputStream getSystemResourceAsStream(String resName) {
return SystemClassLoader.loader.getResourceAsStream(resName);
}
/**
* Constructs a new instance of this class with the system class loader as
* its parent.
*/
protected ClassLoader() {
this(getSystemClassLoader(), false);
}
/**
* Constructs a new instance of this class with the specified class loader
* as its parent.
*
* @param parentLoader
* The {@code ClassLoader} to use as the new class loader's
* parent.
*/
protected ClassLoader(ClassLoader parentLoader) {
this(parentLoader, false);
}
/*
* constructor for the BootClassLoader which needs parent to be null.
*/
ClassLoader(ClassLoader parentLoader, boolean nullAllowed) {
if (parentLoader == null && !nullAllowed) {
throw new NullPointerException("parentLoader == null && !nullAllowed");
}
parent = parentLoader;
}
/**
* Constructs a new class from an array of bytes containing a class
* definition in class file format.
*
* @param classRep
* the memory image of a class file.
* @param offset
* the offset into {@code classRep}.
* @param length
* the length of the class file.
* @return the {@code Class} object created from the specified subset of
* data in {@code classRep}.
* @throws ClassFormatError
* if {@code classRep} does not contain a valid class.
* @throws IndexOutOfBoundsException
* if {@code offset < 0}, {@code length < 0} or if
* {@code offset + length} is greater than the length of
* {@code classRep}.
* @deprecated Use {@link #defineClass(String, byte[], int, int)}
*/
@Deprecated
protected final Class<?> defineClass(byte[] classRep, int offset, int length)
throws ClassFormatError {
throw new UnsupportedOperationException("can't load this type of class file");
}
/**
* Constructs a new class from an array of bytes containing a class
* definition in class file format.
*
* @param className
* the expected name of the new class, may be {@code null} if not
* known.
* @param classRep
* the memory image of a class file.
* @param offset
* the offset into {@code classRep}.
* @param length
* the length of the class file.
* @return the {@code Class} object created from the specified subset of
* data in {@code classRep}.
* @throws ClassFormatError
* if {@code classRep} does not contain a valid class.
* @throws IndexOutOfBoundsException
* if {@code offset < 0}, {@code length < 0} or if
* {@code offset + length} is greater than the length of
* {@code classRep}.
*/
protected final Class<?> defineClass(String className, byte[] classRep, int offset, int length)
throws ClassFormatError {
throw new UnsupportedOperationException("can't load this type of class file");
}
/**
* Constructs a new class from an array of bytes containing a class
* definition in class file format and assigns the specified protection
* domain to the new class. If the provided protection domain is
* {@code null} then a default protection domain is assigned to the class.
*
* @param className
* the expected name of the new class, may be {@code null} if not
* known.
* @param classRep
* the memory image of a class file.
* @param offset
* the offset into {@code classRep}.
* @param length
* the length of the class file.
* @param protectionDomain
* the protection domain to assign to the loaded class, may be
* {@code null}.
* @return the {@code Class} object created from the specified subset of
* data in {@code classRep}.
* @throws ClassFormatError
* if {@code classRep} does not contain a valid class.
* @throws IndexOutOfBoundsException
* if {@code offset < 0}, {@code length < 0} or if
* {@code offset + length} is greater than the length of
* {@code classRep}.
* @throws NoClassDefFoundError
* if {@code className} is not equal to the name of the class
* contained in {@code classRep}.
*/
protected final Class<?> defineClass(String className, byte[] classRep, int offset, int length,
ProtectionDomain protectionDomain) throws java.lang.ClassFormatError {
throw new UnsupportedOperationException("can't load this type of class file");
}
/**
* Defines a new class with the specified name, byte code from the byte
* buffer and the optional protection domain. If the provided protection
* domain is {@code null} then a default protection domain is assigned to
* the class.
*
* @param name
* the expected name of the new class, may be {@code null} if not
* known.
* @param b
* the byte buffer containing the byte code of the new class.
* @param protectionDomain
* the protection domain to assign to the loaded class, may be
* {@code null}.
* @return the {@code Class} object created from the data in {@code b}.
* @throws ClassFormatError
* if {@code b} does not contain a valid class.
* @throws NoClassDefFoundError
* if {@code className} is not equal to the name of the class
* contained in {@code b}.
*/
protected final Class<?> defineClass(String name, ByteBuffer b,
ProtectionDomain protectionDomain) throws ClassFormatError {
throw new UnsupportedOperationException("can't load this type of class file");
}
/**
* Overridden by subclasses, throws a {@code ClassNotFoundException} by
* default. This method is called by {@code loadClass} after the parent
* {@code ClassLoader} has failed to find a loaded class of the same name.
*
* @param className
* the name of the class to look for.
* @return the {@code Class} object that is found.
* @throws ClassNotFoundException
* if the class cannot be found.
*/
protected Class<?> findClass(String className) throws ClassNotFoundException {
throw new ClassNotFoundException(className);
}
/**
* Returns the class with the specified name if it has already been loaded
* by the VM or {@code null} if it has not yet been loaded.
*
* @param className
* the name of the class to look for.
* @return the {@code Class} object or {@code null} if the requested class
* has not been loaded.
*/
protected final Class<?> findLoadedClass(String className) {
try {
return SystemClassLoader.loader.findClass(className);
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* Finds the class with the specified name, loading it using the system
* class loader if necessary.
*
* @param className
* the name of the class to look for.
* @return the {@code Class} object with the requested {@code className}.
* @throws ClassNotFoundException
* if the class can not be found.
*/
protected final Class<?> findSystemClass(String className) throws ClassNotFoundException {
return Class.forName(className, false, getSystemClassLoader());
}
/**
* Returns this class loader's parent.
*
* @return this class loader's parent or {@code null}.
*/
public final ClassLoader getParent() {
return parent;
}
/**
* Returns the URL of the resource with the specified name. This
* implementation first tries to use the parent class loader to find the
* resource; if this fails then {@link #findResource(String)} is called to
* find the requested resource.
*
* @param resName
* the name of the resource to find.
* @return the {@code URL} object for the requested resource or {@code null}
* if the resource can not be found
* @see Class#getResource
*/
public URL getResource(String resName) {
URL resource = parent.getResource(resName);
if (resource == null) {
resource = findResource(resName);
}
return resource;
}
/**
* Returns an enumeration of URLs for the resource with the specified name.
* This implementation first uses this class loader's parent to find the
* resource, then it calls {@link #findResources(String)} to get additional
* URLs. The returned enumeration contains the {@code URL} objects of both
* find operations.
*
* @return an enumeration of {@code URL} objects for the requested resource.
* @param resName
* the name of the resource to find.
* @throws IOException
* if an I/O error occurs.
*/
public Enumeration<URL> getResources(String resName) throws IOException {
Enumeration<URL> first = parent.getResources(resName);
Enumeration<URL> second = findResources(resName);
return new TwoEnumerationsInOne(first, second);
}
/**
* Returns a stream for the resource with the specified name. See
* {@link #getResource(String)} for a description of the lookup algorithm
* used to find the resource.
*
* @return a stream for the resource or {@code null} if the resource can not be found
* @param resName
* the name of the resource to find.
* @see Class#getResourceAsStream
*/
public InputStream getResourceAsStream(String resName) {
try {
URL url = getResource(resName);
if (url != null) {
return url.openStream();
}
} catch (IOException ex) {
// Don't want to see the exception.
}
return null;
}
/**
* Loads the class with the specified name. Invoking this method is
* equivalent to calling {@code loadClass(className, false)}.
* <p>
* <strong>Note:</strong> In the Android reference implementation, the
* second parameter of {@link #loadClass(String, boolean)} is ignored
* anyway.
* </p>
*
* @return the {@code Class} object.
* @param className
* the name of the class to look for.
* @throws ClassNotFoundException
* if the class can not be found.
*/
public Class<?> loadClass(String className) throws ClassNotFoundException {
return loadClass(className, false);
}
/**
* Loads the class with the specified name, optionally linking it after
* loading. The following steps are performed:
* <ol>
* <li> Call {@link #findLoadedClass(String)} to determine if the requested
* class has already been loaded.</li>
* <li>If the class has not yet been loaded: Invoke this method on the
* parent class loader.</li>
* <li>If the class has still not been loaded: Call
* {@link #findClass(String)} to find the class.</li>
* </ol>
* <p>
* <strong>Note:</strong> In the Android reference implementation, the
* {@code resolve} parameter is ignored; classes are never linked.
* </p>
*
* @return the {@code Class} object.
* @param className
* the name of the class to look for.
* @param resolve
* Indicates if the class should be resolved after loading. This
* parameter is ignored on the Android reference implementation;
* classes are not resolved.
* @throws ClassNotFoundException
* if the class can not be found.
*/
protected Class<?> loadClass(String className, boolean resolve) throws ClassNotFoundException {
Class<?> clazz = findLoadedClass(className);
if (clazz == null) {
try {
clazz = parent.loadClass(className, false);
} catch (ClassNotFoundException e) {
// Don't want to see this.
}
if (clazz == null) {
clazz = findClass(className);
}
}
return clazz;
}
/**
* Forces a class to be linked (initialized). If the class has already been
* linked this operation has no effect.
* <p>
* <strong>Note:</strong> In the Android reference implementation, this
* method has no effect.
* </p>
*
* @param clazz
* the class to link.
*/
protected final void resolveClass(Class<?> clazz) {
// no-op, doesn't make sense on android.
}
/**
* Finds the URL of the resource with the specified name. This
* implementation just returns {@code null}; it should be overridden in
* subclasses.
*
* @param resName
* the name of the resource to find.
* @return the {@code URL} object for the requested resource.
*/
protected URL findResource(String resName) {
return null;
}
/**
* Finds an enumeration of URLs for the resource with the specified name.
* This implementation just returns an empty {@code Enumeration}; it should
* be overridden in subclasses.
*
* @param resName
* the name of the resource to find.
* @return an enumeration of {@code URL} objects for the requested resource.
* @throws IOException
* if an I/O error occurs.
*/
protected Enumeration<URL> findResources(String resName) throws IOException {
return Collections.emptyEnumeration();
}
/**
* Returns the absolute path of the native library with the specified name,
* or {@code null}. If this method returns {@code null} then the virtual
* machine searches the directories specified by the system property
* "java.library.path".
* <p>
* This implementation always returns {@code null}.
* </p>
*
* @param libName
* the name of the library to find.
* @return the absolute path of the library.
*/
protected String findLibrary(String libName) {
return null;
}
/**
* Returns the package with the specified name. Package information is
* searched in this class loader.
*
* @param name
* the name of the package to find.
* @return the package with the requested name; {@code null} if the package
* can not be found.
*/
protected Package getPackage(String name) {
synchronized (packages) {
return packages.get(name);
}
}
/**
* Returns all the packages known to this class loader.
*
* @return an array with all packages known to this class loader.
*/
protected Package[] getPackages() {
synchronized (packages) {
Collection<Package> col = packages.values();
Package[] result = new Package[col.size()];
col.toArray(result);
return result;
}
}
/**
* Defines and returns a new {@code Package} using the specified
* information. If {@code sealBase} is {@code null}, the package is left
* unsealed. Otherwise, the package is sealed using this URL.
*
* @param name
* the name of the package.
* @param specTitle
* the title of the specification.
* @param specVersion
* the version of the specification.
* @param specVendor
* the vendor of the specification.
* @param implTitle
* the implementation title.
* @param implVersion
* the implementation version.
* @param implVendor
* the specification vendor.
* @param sealBase
* the URL used to seal this package or {@code null} to leave the
* package unsealed.
* @return the {@code Package} object that has been created.
* @throws IllegalArgumentException
* if a package with the specified name already exists.
*/
protected Package definePackage(String name, String specTitle, String specVersion,
String specVendor, String implTitle, String implVersion, String implVendor, URL sealBase)
throws IllegalArgumentException {
throw new UnsupportedOperationException("new package definitions unsupported");
}
/**
* Sets the signers of the specified class. This implementation does
* nothing.
*
* @param c
* the {@code Class} object for which to set the signers.
* @param signers
* the signers for {@code c}.
*/
protected final void setSigners(Class<?> c, Object[] signers) {
}
/**
* Sets the assertion status of the class with the specified name.
* <p>
* <strong>Note: </strong>This method does nothing in the Android reference
* implementation.
* </p>
*
* @param cname
* the name of the class for which to set the assertion status.
* @param enable
* the new assertion status.
*/
public void setClassAssertionStatus(String cname, boolean enable) {
}
/**
* Sets the assertion status of the package with the specified name.
* <p>
* <strong>Note: </strong>This method does nothing in the Android reference
* implementation.
* </p>
*
* @param pname
* the name of the package for which to set the assertion status.
* @param enable
* the new assertion status.
*/
public void setPackageAssertionStatus(String pname, boolean enable) {
}
/**
* Sets the default assertion status for this class loader.
*
* @param enable
* the new assertion status.
*/
public void setDefaultAssertionStatus(boolean enable) {
}
/**
* Sets the default assertion status for this class loader to {@code false}
* and removes any package default and class assertion status settings.
* <p>
* <strong>Note:</strong> This method does nothing in the Android reference
* implementation.
* </p>
*/
public void clearAssertionStatus() {
}
}
/*
* Provides a helper class that combines two existing URL enumerations into one.
* It is required for the getResources() methods. Items are fetched from the
* first enumeration until it's empty, then from the second one.
*/
class TwoEnumerationsInOne implements Enumeration<URL> {
private Enumeration<URL> first;
private Enumeration<URL> second;
public TwoEnumerationsInOne(Enumeration<URL> first, Enumeration<URL> second) {
this.first = first;
this.second = second;
}
public boolean hasMoreElements() {
return first.hasMoreElements() || second.hasMoreElements();
}
public URL nextElement() {
if (first.hasMoreElements()) {
return first.nextElement();
} else {
return second.nextElement();
}
}
}
/**
* ClassLoader for iOS and OS X.
*/
class SystemClassLoader extends ClassLoader {
static ClassLoader loader = new SystemClassLoader();
SystemClassLoader() {
super(null, true);
}
@Override
protected native Class<?> findClass(String name) throws ClassNotFoundException /*-[
nil_chk(name);
return [IOSClass forName:name initialize:YES classLoader:self];
]-*/;
@Override
protected native URL findResource(String name) /*-[
if (!name) {
return nil;
}
NSBundle *bundle = [NSBundle mainBundle];
NSURL *nativeURL = [bundle URLForResource:name withExtension:nil];
return nativeURL ? AUTORELEASE([[JavaNetURL alloc] initWithNSString:[nativeURL description]])
: nil;
]-*/;
@Override
protected native Enumeration<URL> findResources(String name) throws IOException /*-[
if (!name) {
return [super findResourcesWithNSString:name];
}
JavaUtilArrayList *urls = AUTORELEASE([[JavaUtilArrayList alloc] init]);
for (NSBundle *bundle in [NSBundle allBundles]) {
NSURL *nativeURL = [bundle URLForResource:name withExtension:nil];
if (nativeURL) {
JavaNetURL *url =
AUTORELEASE([[JavaNetURL alloc] initWithNSString:[nativeURL description]]);
[urls addWithId:url];
}
}
return JavaUtilCollections_enumerationWithJavaUtilCollection_(urls);
]-*/;
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
// All iOS classes are resolved.
return findClass(name);
}
@Override
public URL getResource(String resName) {
return findResource(resName);
}
@Override
public Enumeration<URL> getResources(String resName) throws IOException {
return findResources(resName);
}
}
| |
package com.google.javascript.gents;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.javascript.jscomp.AbstractCompiler;
import com.google.javascript.jscomp.CompilerPass;
import com.google.javascript.jscomp.JSError;
import com.google.javascript.jscomp.NodeTraversal;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeUtil;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfo.Visibility;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Converts ES5 JavaScript classes and interfaces into ES6 JavaScript classes and TypeScript
* interfaces. Prototype declarations are converted into the new class definitions of ES6.
*/
public final class TypeConversionPass implements CompilerPass {
private final AbstractCompiler compiler;
private final NodeComments nodeComments;
private Map<String, Node> types;
public TypeConversionPass(AbstractCompiler compiler, NodeComments nodeComments) {
this.compiler = compiler;
this.nodeComments = nodeComments;
this.types = new LinkedHashMap<>();
}
@Override
public void process(Node externs, Node root) {
for (Node child : root.children()) {
// We convert each file independently to avoid merging class methods from different files.
if (child.isScript()) {
this.types = new LinkedHashMap<>();
NodeTraversal.traverseEs6(compiler, child, new TypeConverter());
NodeTraversal.traverseEs6(compiler, child, new TypeMemberConverter());
NodeTraversal.traverseEs6(compiler, child, new FieldOnThisConverter());
NodeTraversal.traverseEs6(compiler, child, new InheritanceConverter());
}
}
}
/** Converts @constructor annotated functions into classes. */
private class TypeConverter extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getToken()) {
case FUNCTION:
JSDocInfo bestJSDocInfo = NodeUtil.getBestJSDocInfo(n);
if (bestJSDocInfo != null
&& (bestJSDocInfo.isConstructor() || bestJSDocInfo.isInterface())) {
convertConstructorToClass(n, bestJSDocInfo);
}
break;
case CALL:
if ("goog.defineClass".equals(n.getFirstChild().getQualifiedName())) {
convertDefineClassToClass(n);
}
break;
case GETPROP:
case NAME:
case VAR:
case LET:
case CONST:
// Converts a typedef into an interface, which then later has its members converted in
// TypeAnnotationPass.
JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(n);
if (jsdoc != null && jsdoc.hasTypedefType()) {
String name;
if (n.getToken() == Token.NAME) {
name = n.getString();
} else if (n.getToken() == Token.GETPROP) {
name = n.getSecondChild().getString();
} else {
name = n.getFirstChild().getString();
}
Node typeDef = Node.newString(Token.TYPE_ALIAS, name);
types.put(name, typeDef);
typeDef.setJSDocInfo(jsdoc);
if (parent.getToken() == Token.EXPR_RESULT) {
parent.getParent().replaceChild(parent, typeDef);
} else {
parent.replaceChild(n, typeDef);
}
}
break;
default:
break;
}
}
}
/** Converts class prototype methods and static methods. */
private class TypeMemberConverter extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getToken()) {
case CLASS:
addClassToScope(n);
break;
case EXPR_RESULT:
ClassMemberDeclaration declaration = ClassMemberDeclaration.newDeclaration(n, types);
if (declaration == null) {
break;
}
if (declaration.rhs != null && declaration.rhs.isFunction()) {
moveMethodsIntoClasses(declaration);
} else {
// Ignore field declarations without a type annotation
if (declaration.jsDoc != null && declaration.jsDoc.getType() != null) {
moveFieldsIntoClasses(declaration);
}
}
break;
default:
break;
}
}
}
/** Converts fields declared internally inside a class using the "this" keyword. */
private class FieldOnThisConverter extends AbstractPostOrderCallback {
/** Map from class node to its field names. */
private final Multimap<Node, String> classFieldMap = HashMultimap.create();
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isExprResult()) {
ClassMemberDeclaration declaration = ClassMemberDeclaration.newDeclarationOnThis(n);
if (declaration == null || declarationHasBeenAdded(declaration)) {
return;
}
Node fnNode = NodeUtil.getEnclosingFunction(n);
String fnName = getEnclosingFunctionName(fnNode);
// TODO(gmoothart): in many cases we should be able to infer the type from the rhs if there
// is no jsDoc
// Convert fields to parameter properties when we are in the constructor and have a
// declaration of the form this.name = name;
if ("constructor".equals(fnName)
&& declaration.jsDoc != null
&& declaration.rhsEqualToField()) {
JSTypeExpression declarationType = declaration.jsDoc.getType();
Node params = fnNode.getSecondChild();
@Nullable JSDocInfo constructorJsDoc = NodeUtil.getBestJSDocInfo(fnNode);
for (Node param : params.children()) {
String paramName =
param.isDefaultValue() ? param.getFirstChild().getString() : param.getString();
@Nullable
JSTypeExpression paramType =
constructorJsDoc == null ? null : constructorJsDoc.getParameterType(paramName);
// Names must be equal. Types must be equal, or if the declaration has no type it is
// assumed to be the type of the parameter.
if (declaration.memberName.equals(paramName)
&& (declarationType == null || declarationType.equals(paramType))) {
// Add visibility directly to param if possible
moveAccessModifier(declaration, param);
markAsConst(declaration, param);
n.detachFromParent();
compiler.reportCodeChange();
return;
}
}
}
moveFieldsIntoClasses(declaration);
registerDeclaration(declaration);
}
}
private void registerDeclaration(ClassMemberDeclaration declaration) {
classFieldMap.put(declaration.classNode, declaration.memberName);
}
private boolean declarationHasBeenAdded(ClassMemberDeclaration declaration) {
Collection<String> classMembers = classFieldMap.get(declaration.classNode);
return classMembers != null && classMembers.contains(declaration.memberName);
}
/** Moves the access modifier from the original declaration to the constructor parameter */
void moveAccessModifier(ClassMemberDeclaration declaration, Node param) {
if (Visibility.PRIVATE.equals(declaration.jsDoc.getVisibility())) {
param.putProp(Node.ACCESS_MODIFIER, Visibility.PRIVATE);
} else if (Visibility.PROTECTED.equals(declaration.jsDoc.getVisibility())) {
param.putProp(Node.ACCESS_MODIFIER, Visibility.PROTECTED);
} else {
param.putProp(Node.ACCESS_MODIFIER, Visibility.PUBLIC);
}
}
/** Mark constructor parameter as constant, so it can be annotated readonly */
void markAsConst(ClassMemberDeclaration declaration, Node param) {
if (declaration.jsDoc != null && declaration.jsDoc.isConstant()) {
param.putBooleanProp(Node.IS_CONSTANT_NAME, true);
}
}
}
/** Converts inheritance and superclass calls. */
private class InheritanceConverter extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getToken()) {
case EXPR_RESULT:
maybeRemoveInherits(n);
break;
case CALL:
maybeReplaceSuperCall(n);
break;
default:
break;
}
}
}
/** Converts @constructor annotated functions into class definitions. */
void convertConstructorToClass(Node n, JSDocInfo jsDoc) {
Preconditions.checkState(n.isFunction());
Preconditions.checkState(n.getFirstChild().isName());
Preconditions.checkState(n.getSecondChild().isParamList());
Preconditions.checkState(n.getLastChild().isNormalBlock());
String typeName = NodeUtil.getName(n);
// Break up function
Node name = n.getFirstChild();
Node params = n.getSecondChild();
Node body = n.getLastChild();
n.detachChildren();
// The empty name corresponds to anonymous constructors.
// The name is usually located in the surrounding context.
// ie. /** @constructor */ var A = function() {};
// is converted to: var A = class {};
if (name.getString().isEmpty()) {
name = IR.empty();
}
// Superclass defaults to empty
Node superClass = IR.empty();
if (jsDoc.getBaseType() != null) {
// Fullname of superclass
// Closure Compiler generates non-nullable base classes:
// ie. A.B.C is parsed as !A.B.C
String superClassName =
jsDoc
.getBaseType()
.getRoot()
.getFirstChild() // ignore the ! node as we always output non nullable types
.getString();
superClass = NodeUtil.newQName(compiler, superClassName);
superClass.useSourceInfoFrom(n);
}
Node typeNode;
if (jsDoc.isInterface()) {
List<JSTypeExpression> interfaces = jsDoc.getImplementedInterfaces();
if (!interfaces.isEmpty()) {
Node superInterfaces = new Node(Token.INTERFACE_EXTENDS);
for (JSTypeExpression type : interfaces) {
superInterfaces.addChildToBack(type.getRoot());
}
superClass = superInterfaces;
}
typeNode = new Node(Token.INTERFACE, name, superClass, new Node(Token.INTERFACE_MEMBERS));
typeNode.useSourceInfoFromForTree(n);
// Must be registered here, as JSCompiler cannot extract names from INTERFACE nodes.
addTypeToScope(typeNode, typeName);
} else {
// Generate new class node with only a constructor method
Node constructor =
IR.memberFunctionDef("constructor", IR.function(IR.name(""), params, body));
constructor.useSourceInfoFrom(n);
// Sets jsdoc info to preserve type declarations on method
constructor.setJSDocInfo(jsDoc);
Node classMembers = new Node(Token.CLASS_MEMBERS, constructor);
typeNode = new Node(Token.CLASS, name, superClass, classMembers);
}
typeNode.setJSDocInfo(n.getJSDocInfo());
nodeComments.replaceWithComment(n, typeNode);
compiler.reportCodeChange();
}
/** Converts goog.defineClass calls into class definitions. */
void convertDefineClassToClass(Node n) {
Preconditions.checkState(n.isCall());
Node superClass = n.getSecondChild();
if (superClass.isNull()) {
superClass = IR.empty();
} else {
superClass.detachFromParent();
}
Node classMembers = new Node(Token.CLASS_MEMBERS);
classMembers.useSourceInfoFrom(n);
for (Node child : n.getLastChild().children()) {
if (child.isStringKey() || child.isMemberFunctionDef()) {
// Handle static methods
if ("statics".equals(child.getString())) {
for (Node child2 : child.getFirstChild().children()) {
convertObjectLiteral(classMembers, child2, true);
}
} else { // prototype methods
convertObjectLiteral(classMembers, child, false);
}
} else {
// Add all other members, such as EMPTY comment nodes, as is.
child.detachFromParent();
classMembers.addChildToBack(child);
}
}
Node classNode = new Node(Token.CLASS, IR.empty(), superClass, classMembers);
classNode.useSourceInfoFrom(n);
nodeComments.replaceWithComment(n, classNode);
compiler.reportCodeChange();
}
/**
* Converts functions and variables declared in object literals into member method and field
* definitions
*/
void convertObjectLiteral(Node classMembers, Node objectLiteralMember, boolean isStatic) {
Preconditions.checkState(
objectLiteralMember.isStringKey() || objectLiteralMember.isMemberFunctionDef());
Node value = objectLiteralMember.getFirstChild();
value.detachFromParent();
if (value.isFunction()) {
Node n = IR.memberFunctionDef(objectLiteralMember.getString(), value);
n.setJSDocInfo(objectLiteralMember.getJSDocInfo());
n.setStaticMember(isStatic);
// Methods added to back
classMembers.addChildToBack(n);
nodeComments.moveComment(objectLiteralMember, n);
} else {
Node n = Node.newString(Token.MEMBER_VARIABLE_DEF, objectLiteralMember.getString());
n.addChildToBack(value);
n.setJSDocInfo(objectLiteralMember.getJSDocInfo());
n.setStaticMember(isStatic);
// Fields added to front
addFieldToClassMembers(classMembers, n);
nodeComments.moveComment(objectLiteralMember, n);
}
}
/**
* Attempts to move a method declaration into a class definition. This generates a new
* MEMBER_FUNCTION_DEF Node while removing the old function node from the AST.
*/
void moveMethodsIntoClasses(ClassMemberDeclaration declaration) {
Node classMembers = declaration.classNode.getLastChild();
String fieldName = declaration.memberName;
// Detach nodes in order to move them around in the AST.
declaration.exprRoot.detachFromParent();
declaration.rhs.detachFromParent();
Node memberFunc = IR.memberFunctionDef(fieldName, declaration.rhs);
memberFunc.setStaticMember(declaration.isStatic);
memberFunc.setJSDocInfo(declaration.jsDoc);
if (declaration.classNode.getToken() == Token.INTERFACE) {
Node body = declaration.rhs.getLastChild();
Preconditions.checkState(body.isNormalBlock());
if (body.getChildCount() != 0) {
compiler.report(
JSError.make(
declaration.rhs,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format("Interface method %s should be empty.", declaration.memberName)));
}
declaration.rhs.replaceChild(body, new Node(Token.EMPTY));
}
// Append the new method to the class
classMembers.addChildToBack(memberFunc);
nodeComments.moveComment(declaration.exprRoot, memberFunc);
compiler.reportCodeChange();
}
/**
* Attempts to move a field declaration into a class definition. This generates a new
* MEMBER_VARIABLE_DEF Node while persisting the old node in the AST.
*/
void moveFieldsIntoClasses(ClassMemberDeclaration declaration) {
Node classMembers = declaration.classNode.getLastChild();
String fieldName = declaration.memberName;
Node fieldNode = Node.newString(Token.MEMBER_VARIABLE_DEF, fieldName);
fieldNode.setJSDocInfo(declaration.jsDoc);
fieldNode.setStaticMember(declaration.isStatic);
nodeComments.moveComment(declaration.exprRoot, fieldNode);
if (declaration.rhs == null) {
declaration.exprRoot.detachFromParent();
} else if (canPromoteFieldInitializer(declaration)) {
declaration.exprRoot.detachFromParent();
declaration.rhs.detachFromParent();
fieldNode.addChildToBack(declaration.rhs);
} else {
nodeComments.clearComment(declaration.exprRoot);
}
addFieldToClassMembers(classMembers, fieldNode);
compiler.reportCodeChange();
}
/**
* Check if we can safely generate a field initializer. We don't do this if the assignment rhs is
* not a literal or the enclosing function is not a constructor.
*/
private boolean canPromoteFieldInitializer(ClassMemberDeclaration declaration) {
if (!NodeUtil.isLiteralValue(declaration.rhs, false)) {
return false;
}
Node fnNode = NodeUtil.getEnclosingFunction(declaration.exprRoot);
if (fnNode != null) {
String fnName = getEnclosingFunctionName(fnNode);
if (!"constructor".equals(fnName)) {
return false;
}
}
return true;
}
/**
* Attempts to remove an inheritance statement. ex. goog.inherits(base, super)
*
* <p>This returns without any modification if the node is not an inheritance statement. This
* fails by reporting an error when the node is an invalid inheritance statement.
*/
void maybeRemoveInherits(Node exprNode) {
Preconditions.checkState(exprNode.isExprResult());
if (exprNode.getFirstChild().isCall()) {
Node callNode = exprNode.getFirstChild();
// Remove goog.inherits calls
if (!"goog.inherits".equals(callNode.getFirstChild().getQualifiedName())) {
return;
}
String className = callNode.getSecondChild().getQualifiedName();
String superClassName = callNode.getLastChild().getQualifiedName();
// Check that class exists
if (!types.containsKey(className)) {
compiler.report(
JSError.make(
exprNode,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format("Class %s could not be found.", className)));
return;
}
// Check that superclass is consistent
Node classNode = types.get(className);
String storedSuperClassName = classNode.getSecondChild().getQualifiedName();
if (classNode.getSecondChild().isEmpty() || !storedSuperClassName.equals(superClassName)) {
compiler.report(
JSError.make(
exprNode,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format("Invalid superclass for %s", className)));
return;
}
exprNode.detachFromParent();
compiler.reportCodeChange();
} else if (exprNode.getFirstChild().isAssign()) {
Node assignNode = exprNode.getFirstChild();
// Report error if trying to assign to prototype directly
Node lhs = assignNode.getFirstChild();
if (lhs.isGetProp() && "prototype".equals(lhs.getLastChild().getString())) {
compiler.report(
JSError.make(
exprNode,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format(
"Cannot directly assign to prototype for %s",
lhs.getFirstChild().getQualifiedName())));
}
}
}
/**
* Attempts to convert a ES5 superclass call into a ES6 super() call.
*
* <p>Examples:
*
* <pre>
* B.call(this, args) -> super(args);
* B.prototype.foo.call(this, args) ->super.foo(args);
* A.base(this, 'constructor', args) -> super(args);
* A.base(this, 'foo', args) -> super.foo(args);
* </pre>
*
* <p>This returns without any modification if the node is not an superclass call statement.
*/
void maybeReplaceSuperCall(Node callNode) {
Preconditions.checkState(callNode.isCall());
String callName = callNode.getFirstChild().getQualifiedName();
// First validate that we are inside a constructor call that extends another class
Node classNode = NodeUtil.getEnclosingClass(callNode);
if (callName == null || classNode == null) {
return;
}
String className = NodeUtil.getName(classNode);
// Translate super constructor or super method calls as follows:
// A.base(this, 'constructor', args) -> super(args);
// A.base(this, 'foo', args) -> super.foo(args);
if (callName.equals(className + ".base") && callNode.getSecondChild().isThis()) {
// Super calls for root classes are not converted
if (classNode.getSecondChild().isEmpty()) {
compiler.report(
JSError.make(
callNode,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format("Cannot call superclass in root class %s", className)));
return;
}
String methodName = callNode.getChildAtIndex(2).getString();
if ("constructor".equals(methodName)) {
nodeComments.replaceWithComment(callNode.getFirstChild(), IR.superNode());
} else {
nodeComments.replaceWithComment(
callNode.getFirstChild(), NodeUtil.newQName(compiler, "super." + methodName));
}
// Remove twice to get rid of "this" and the method name
callNode.removeChild(callNode.getSecondChild());
callNode.removeChild(callNode.getSecondChild());
compiler.reportCodeChange();
return;
}
String superClassName = classNode.getSecondChild().getQualifiedName();
// B.call(this, args) -> super(args);
if (callName.equals(superClassName + ".call") && callNode.getSecondChild().isThis()) {
nodeComments.replaceWithComment(callNode.getFirstChild(), IR.superNode());
callNode.removeChild(callNode.getSecondChild());
compiler.reportCodeChange();
return;
}
// B.prototype.foo.call(this, args) -> super.foo(args);
if (callName.startsWith(superClassName + ".prototype.") && callName.endsWith(".call")) {
if (callNode.getSecondChild().isThis()) {
// Determine name of method being called
Node nameNode = callNode.getFirstChild().getFirstChild();
Node n = nameNode;
while (!n.getLastChild().getString().equals("prototype")) {
n = n.getFirstChild();
}
nameNode.detachFromParent();
nodeComments.replaceWithComment(n, IR.superNode());
nodeComments.replaceWithComment(callNode.getFirstChild(), nameNode);
callNode.removeChild(callNode.getSecondChild());
compiler.reportCodeChange();
return;
}
}
}
/** Adds a field node before the first method node in classMembers */
void addFieldToClassMembers(Node classMembers, Node field) {
for (Node n : classMembers.children()) {
if (n.isMemberFunctionDef()) {
classMembers.addChildBefore(field, n);
return;
}
}
classMembers.addChildToBack(field);
}
/**
* Adds a class node to the top level scope.
*
* <p>This determines the classname using the nearest available name node.
*/
void addClassToScope(Node n) {
Preconditions.checkState(n.isClass());
String className = NodeUtil.getName(n);
if (className == null) {
// We do not emit an error here as there can be anonymous classes without names.
return;
}
addTypeToScope(n, className);
}
private void addTypeToScope(Node n, String typeName) {
if (types.containsKey(typeName)) {
compiler.report(
JSError.make(
n,
GentsErrorManager.GENTS_CLASS_PASS_ERROR,
String.format("Type %s has been defined multiple times.", typeName)));
return;
}
types.put(typeName, n);
}
private String getEnclosingFunctionName(Node fnNode) {
if (fnNode.isArrowFunction()) {
return null;
}
// Use the QualifiedName if the function is on an object/namespace: `foo.moreFoo()`;
// otherwise, use the string on the node: `foo` for `function foo()`
Node fnParent = fnNode.getParent();
if (fnParent.isGetProp() || fnParent.isCall()) {
return NodeUtil.getName(fnNode);
}
return fnParent.getString();
}
/** Represents a declaration of a class member. */
private static class ClassMemberDeclaration {
Node exprRoot;
Node rhs;
JSDocInfo jsDoc;
boolean isStatic;
Node classNode;
String memberName;
private ClassMemberDeclaration(Node n, boolean isStatic, Node classNode, String memberName) {
this.exprRoot = n;
this.rhs = getRhs(n);
this.jsDoc = NodeUtil.getBestJSDocInfo(n);
this.isStatic = isStatic;
this.classNode = classNode;
this.memberName = memberName;
}
/** Returns whether the rhs is the same as the method name being declared eg. this.a = a; */
boolean rhsEqualToField() {
return rhs != null && memberName.equals(rhs.getQualifiedName());
}
/**
* Factory method for creating a new ClassMemberDeclaration on a declaration external to a
* class.
*
* <ul>
* <li><code>A.prototype.foo = function() {...}</code>
* <li><code>A.prototype.w = 4</code>
* <li><code>A.prototype.x</code>
* <li><code>A.bar = function() {...}</code>
* <li><code>A.y = 6</code>
* <li><code>A.z</code>
* </ul>
*
* Returns null if the expression node is an invalid member declaration.
*/
@Nullable
static ClassMemberDeclaration newDeclaration(Node n, Map<String, Node> classes) {
Node fullName = getFullName(n);
// Node MUST NOT start with "this."
if (!fullName.isGetProp() || containsThis(fullName)) {
return null;
}
boolean isStatic = isStatic(fullName);
String className =
isStatic
? fullName.getFirstChild().getQualifiedName()
: fullName.getFirstFirstChild().getQualifiedName();
// Class must exist in scope
if (!classes.containsKey(className)) {
return null;
}
Node classNode = classes.get(className);
String memberName = fullName.getLastChild().getString();
return new ClassMemberDeclaration(n, isStatic, classNode, memberName);
}
/**
* Factory method for creating a new ClassMemberDeclarationOnThis on a declaration internal to a
* class via the "this" keyword.
*
* <ul>
* <li>{@code this.a = 5}
* <li>{@code this.b}
* </ul>
*
* Returns null if the expression node is an invalid member declaration.
*/
@Nullable
static ClassMemberDeclaration newDeclarationOnThis(Node n) {
Node fullName = getFullName(n);
// Node MUST start with "this." and be shallow, i.e. "this.foo".
// "this.foo.bar" and other nestings are not declarations and are ignored.
// fullName is a binary tree and multiple parts are represented by GETPROP
// nodes recursively on the left (first) child, so a first child of THIS is
// sufficient to ensure the name is of the form "this.foo".
if (!fullName.isGetProp() || !fullName.getFirstChild().isThis()) {
return null;
}
Node classNode = NodeUtil.getEnclosingClass(n);
String memberName = fullName.getLastChild().getString();
if (classNode == null) {
return null;
}
return new ClassMemberDeclaration(n, false, classNode, memberName);
}
/** Returns the full name of the class member being declared. */
static Node getFullName(Node n) {
return n.getFirstChild().isAssign() ? n.getFirstFirstChild() : n.getFirstChild();
}
/** Returns the right hand side of the member declaration. */
static Node getRhs(Node n) {
return n.getFirstChild().isAssign() ? n.getFirstChild().getLastChild() : null;
}
/** Returns whether a name starts with "this." */
static boolean containsThis(Node fullName) {
return fullName.isThis() || (fullName.isGetProp() && containsThis(fullName.getFirstChild()));
}
/** Returns if a name refers to a static member of a class. */
static boolean isStatic(Node fullName) {
return !(fullName.getFirstChild().isGetProp()
&& "prototype".equals(fullName.getFirstChild().getLastChild().getString()));
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.http;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder;
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import static java.util.Collections.singletonList;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
@ClusterScope(scope = SUITE)
public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
private static final List<RequestAndHeaders> requests = new CopyOnWriteArrayList<>();
private static final String CUSTOM_HEADER = "SomeCustomHeader";
private String randomHeaderValue = randomAlphaOfLength(20);
private String queryIndex = "query-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
private String lookupIndex = "lookup-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
@Override
protected boolean ignoreExternalCluster() {
return true;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(ActionLoggingPlugin.class);
plugins.add(CustomHeadersPlugin.class);
return plugins;
}
@Before
public void createIndices() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("location").field("type", "geo_shape").endObject()
.startObject("name").field("type", "text").endObject()
.endObject()
.endObject().endObject().string();
Settings settings = Settings.builder()
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
.build();
assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex)
.setSettings(settings).addMapping("type", mapping));
assertAcked(transportClient().admin().indices().prepareCreate(queryIndex)
.setSettings(settings).addMapping("type", mapping));
ensureGreen(queryIndex, lookupIndex);
requests.clear();
}
@After
public void checkAllRequestsContainHeaders() {
assertRequestsContainHeader(IndexRequest.class);
assertRequestsContainHeader(RefreshRequest.class);
}
public void testThatTermsLookupGetRequestContainsContextAndHeaders() throws Exception {
transportClient().prepareIndex(lookupIndex, "type", "1")
.setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get();
transportClient().prepareIndex(queryIndex, "type", "1")
.setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get();
transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get();
TermsLookup termsLookup = new TermsLookup(lookupIndex, "type", "1", "followers");
TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", termsLookup);
BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder);
SearchResponse searchResponse = transportClient()
.prepareSearch(queryIndex)
.setQuery(queryBuilder)
.get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, 1);
assertGetRequestsContainHeaders();
}
public void testThatGeoShapeQueryGetRequestContainsContextAndHeaders() throws Exception {
transportClient().prepareIndex(lookupIndex, "type", "1").setSource(jsonBuilder().startObject()
.field("name", "Munich Suburban Area")
.startObject("location")
.field("type", "polygon")
.startArray("coordinates").startArray()
.startArray().value(11.34).value(48.25).endArray()
.startArray().value(11.68).value(48.25).endArray()
.startArray().value(11.65).value(48.06).endArray()
.startArray().value(11.37).value(48.13).endArray()
.startArray().value(11.34).value(48.25).endArray() // close the polygon
.endArray().endArray()
.endObject()
.endObject())
.get();
// second document
transportClient().prepareIndex(queryIndex, "type", "1").setSource(jsonBuilder().startObject()
.field("name", "Munich Center")
.startObject("location")
.field("type", "point")
.startArray("coordinates").value(11.57).value(48.13).endArray()
.endObject()
.endObject())
.get();
transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get();
GeoShapeQueryBuilder queryBuilder = QueryBuilders.geoShapeQuery("location", "1", "type")
.indexedShapeIndex(lookupIndex)
.indexedShapePath("location");
SearchResponse searchResponse = transportClient()
.prepareSearch(queryIndex)
.setQuery(queryBuilder)
.get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, 1);
assertThat(requests, hasSize(greaterThan(0)));
assertGetRequestsContainHeaders();
}
public void testThatMoreLikeThisQueryMultiTermVectorRequestContainsContextAndHeaders() throws Exception {
transportClient().prepareIndex(lookupIndex, "type", "1")
.setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject())
.get();
transportClient().prepareIndex(queryIndex, "type", "1")
.setSource(jsonBuilder().startObject().field("name", "Jar Jar Binks - A horrible mistake").endObject())
.get();
transportClient().prepareIndex(queryIndex, "type", "2")
.setSource(jsonBuilder().startObject().field("name", "Star Wars - Return of the jedi").endObject())
.get();
transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get();
MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[]{"name"}, null,
new Item[]{new Item(lookupIndex, "type", "1")})
.minTermFreq(1)
.minDocFreq(1);
SearchResponse searchResponse = transportClient()
.prepareSearch(queryIndex)
.setQuery(moreLikeThisQueryBuilder)
.get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, 1);
assertRequestsContainHeader(MultiTermVectorsRequest.class);
}
public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws IOException {
final String IRRELEVANT_HEADER = "SomeIrrelevantHeader";
Response response = getRestClient().performRequest("GET", "/" + queryIndex + "/_search",
new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue));
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
List<RequestAndHeaders> searchRequests = getRequests(SearchRequest.class);
assertThat(searchRequests, hasSize(greaterThan(0)));
for (RequestAndHeaders requestAndHeaders : searchRequests) {
assertThat(requestAndHeaders.headers.containsKey(CUSTOM_HEADER), is(true));
// was not specified, thus is not included
assertThat(requestAndHeaders.headers.containsKey(IRRELEVANT_HEADER), is(false));
}
}
private List<RequestAndHeaders> getRequests(Class<?> clazz) {
List<RequestAndHeaders> results = new ArrayList<>();
for (RequestAndHeaders request : requests) {
if (request.request.getClass().equals(clazz)) {
results.add(request);
}
}
return results;
}
private void assertRequestsContainHeader(Class<? extends ActionRequest> clazz) {
List<RequestAndHeaders> classRequests = getRequests(clazz);
for (RequestAndHeaders request : classRequests) {
assertRequestContainsHeader(request.request, request.headers);
}
}
private void assertGetRequestsContainHeaders() {
assertGetRequestsContainHeaders(this.lookupIndex);
}
private void assertGetRequestsContainHeaders(String index) {
List<RequestAndHeaders> getRequests = getRequests(GetRequest.class);
assertThat(getRequests, hasSize(greaterThan(0)));
for (RequestAndHeaders request : getRequests) {
if (!((GetRequest)request.request).index().equals(index)) {
continue;
}
assertRequestContainsHeader(request.request, request.headers);
}
}
private void assertRequestContainsHeader(ActionRequest request, Map<String, String> context) {
String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", CUSTOM_HEADER, request.getClass().getName());
if (request instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) request;
msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", CUSTOM_HEADER,
indexRequest.index(), indexRequest.type(), indexRequest.id());
}
assertThat(msg, context.containsKey(CUSTOM_HEADER), is(true));
assertThat(context.get(CUSTOM_HEADER).toString(), is(randomHeaderValue));
}
/**
* a transport client that adds our random header
*/
private Client transportClient() {
return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(CUSTOM_HEADER, randomHeaderValue));
}
public static class ActionLoggingPlugin extends Plugin implements ActionPlugin {
@Override
public Collection<Module> createGuiceModules() {
return Collections.<Module>singletonList(new ActionLoggingModule());
}
@Override
public List<Class<? extends ActionFilter>> getActionFilters() {
return singletonList(LoggingFilter.class);
}
}
public static class ActionLoggingModule extends AbstractModule {
@Override
protected void configure() {
bind(LoggingFilter.class).asEagerSingleton();
}
}
public static class LoggingFilter extends ActionFilter.Simple {
private final ThreadPool threadPool;
@Inject
public LoggingFilter(Settings settings, ThreadPool pool) {
super(settings);
this.threadPool = pool;
}
@Override
public int order() {
return 999;
}
@Override
protected boolean apply(String action, ActionRequest request, ActionListener<?> listener) {
requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request));
return true;
}
}
private static class RequestAndHeaders {
final Map<String, String> headers;
final ActionRequest request;
private RequestAndHeaders(Map<String, String> headers, ActionRequest request) {
this.headers = headers;
this.request = request;
}
}
public static class CustomHeadersPlugin extends Plugin implements ActionPlugin {
public Collection<String> getRestHeaders() {
return Collections.singleton(CUSTOM_HEADER);
}
}
}
| |
import java.util.Scanner;
/**
* CIS20 Asg4
* ArrayUtil.java
* Purpose: Utilities to use on arrays.
*
* @version 1.1 3/13/05
* @author Ed Parrish
*/
public class ArrayUtil {
public static final int LOW = 0;
public static final int HIGH = 100;
private static int comparisons = 0;
/**
* The main method begins execution of Java application.
*
* @param args not used
*/
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
boolean valid = false;
int size = 0;
System.out.println(
"\nThis program requests an array size from the user,\n"
+ "creates an array of that size filled with random numbers,\n"
+ "and tests the array.\n");
while (!valid) {
System.out.print("Enter the array size to test (0 to exit): ");
size = input.nextInt();
if (size == 0) {
System.exit(0);
} else if (size > 0) {
valid = true;
} else {
System.out.println("Size cannot be a negative number!");
}
}
int[] array = makeArray(size);
runTests(array);
}
/**
* Accessor method for comparisons.
*
* @return Current count of comparisons.
*/
public static int getComparisons() {
return comparisons;
}
/**
* Runs tests on the int array.
*
* @param array The array to test.
*/
public static void runTests(int[] array) {
System.out.println("Original array elements are:");
showArray(array);
int[] arrayCopy = copyArray(array);
bubbleSort(arrayCopy);
System.out.println("\nBubble Sort comparisons needed: "
+ comparisons);
System.out.println("Sorted array elements are:");
showArray(arrayCopy);
arrayCopy = copyArray(array);
bubbleSortPlus(arrayCopy);
System.out.println("Enhanced Bubble Sort comparisons needed: "
+ comparisons);
showArray(arrayCopy, 0);
int item = (int) (Math.random() * (array.length - 1));
System.out.println("\nSearching for: " + array[item]);
linearSearch(arrayCopy, array[item]);
System.out.println("Linear search Comparisons needed: "
+ comparisons);
binarySearch(arrayCopy, array[item]);
System.out.println("Binary search Comparisons needed: "
+ comparisons);
}
/**
* Creates an array of size and initializes the array with random
* values between LOW and HIGH - 1.
*
* @param size The number of elements to create in the array.
*
* @return An array initialized with random values between LOW and HIGH - 1.
*/
public static int[] makeArray(int size) {
int[] array = new int[size];
for (int i = 0; i < array.length; i++) {
array[i] = (int) (Math.random() * (HIGH - LOW) + LOW);
}
return array;
}
/**
* Creates an copy of an array and returns the copy.
*
* @param array The original array to copy from.
*
* @return An new array initialized with the elements of the original array.
*/
public static int[] copyArray(int[] array) {
int[] newArray = new int[array.length];
for (int i = 0; i < array.length; i++)
newArray[i] = array[i];
return newArray;
}
/**
* Sorts an array using bubble sort.
*
* @param array The array to sort.
*/
public static void bubbleSort(int[] array) {
comparisons = 0;
for (int i = 0; i < array.length; i++) {
for (int j = 0; j < array.length - 1; j++) {
comparisons++;
if (array[j] > array[j + 1]) swap(array, j, j + 1);
}
}
}
/**
* Sorts an array using an enhanced bubble sort.
*
* @param array The array to sort.
*/
public static void bubbleSortPlus(int[] array) {
boolean didSwap; // boolean indicating if a swap took place during pass
comparisons = 0;
for (int pass = 1; pass < array.length; pass++) {
didSwap = false;
for (int element = 0; element < array.length - pass; element++) {
comparisons++;
if (array[element] > array[element + 1]) {
swap(array, element, element + 1);
didSwap = true;
}
}
// if no swaps, terminate bubble sort
if (!didSwap)
return;
}
}
/**
* Swap two elements in an array
*
* @param array -- array of integers being sorted
* @param first -- first position
* @param second -- second position
*/
static void swap(int array[], int first, int second) {
int temp = array[first];
array[first] = array[second];
array[second] = temp;
}
/**
* Searches an array for a specified key value using linear search.
*
* @param array The array to search.
* @param key The value for which to search.
*
* @return The index of the array element containing the key value,
* or -1 if the key was not found.
*/
public static int linearSearch(int[] array, int key) {
comparisons = 0;
for (int counter = 0; counter < array.length; counter++) {
comparisons++;
// if array element equals key value, return location
if (array[counter] == key)
return counter;
}
return -1; // key not found
}
/**
* Searches an array for a specified key value using binary search.
*
* @param array The array to search.
* @param key The value for which to search.
*
* @return The index of the array element containing the key value,
* or -1 if the key was not found.
*/
public static int binarySearch(int[] array, int key) {
int start = 0; // start element subscript
int end = array.length - 1; // end element subscript
int middle; // middle element subscript
comparisons = 0;
while (start <= end) {
comparisons++;
// determine middle element subscript
middle = (start + end) / 2;
// if key matches middle element, return middle location
if (key == array[middle]) {
return middle;
// if key less than middle element, set new end element
} else if (key < array[middle]) {
end = middle - 1;
// key greater than middle element, set new start element
} else {
start = middle + 1;
}
}
return -1; // key not found
}
/**
* Iteratively prints an array from start to the end.
*
* @param array The array to print.
*/
public static void showArray(int[] array) {
for (int i = 0; i < array.length - 1; i++) {
System.out.print(array[i] + ", ");
}
if (array.length > 0)
System.out.println(array[array.length - 1]);
}
/**
* Recursively prints an array from start to the end.
*
* @param array The array to print.
* @param start The starting position in the array to print.
*/
public static void showArray(int[] array, int start) {
if (start >= array.length - 1) {
if (array.length > 0)
System.out.println(array[array.length - 1]);
} else {
System.out.print(array[start] + ", ");
showArray(array, ++start);
}
}
}
| |
/*
* Abora-Gold
* Part of the Abora hypertext project: http://www.abora.org
* Copyright 2003, 2005 David G Jones
*
* Translated from Udanax-Gold source code: http://www.udanax.com
* Copyright 1979-1999 Udanax.com. All rights reserved
*/
package info.dgjones.abora.gold.spaces.cross;
import info.dgjones.abora.gold.collection.basic.Int32Array;
import info.dgjones.abora.gold.collection.basic.PrimIntArray;
import info.dgjones.abora.gold.collection.basic.PtrArray;
import info.dgjones.abora.gold.cross.CrossMapping;
import info.dgjones.abora.gold.cross.CrossOrderSpec;
import info.dgjones.abora.gold.java.AboraSupport;
import info.dgjones.abora.gold.java.HashHelper;
import info.dgjones.abora.gold.java.exception.PasseException;
import info.dgjones.abora.gold.java.exception.SubclassResponsibilityException;
import info.dgjones.abora.gold.java.missing.smalltalk.Set;
import info.dgjones.abora.gold.spaces.basic.CoordinateSpace;
import info.dgjones.abora.gold.spaces.basic.Mapping;
import info.dgjones.abora.gold.spaces.basic.XnRegion;
import info.dgjones.abora.gold.spaces.cross.CrossRegion;
import info.dgjones.abora.gold.spaces.cross.CrossSpace;
import info.dgjones.abora.gold.spaces.cross.GenericCrossSpace;
import info.dgjones.abora.gold.spaces.cross.Tuple;
import info.dgjones.abora.gold.x.PrimSpec;
import info.dgjones.abora.gold.xcvr.Rcvr;
import info.dgjones.abora.gold.xpp.basic.Heaper;
/**
* Represents the cross of several coordinate spaces.
*/
public class CrossSpace extends CoordinateSpace {
protected PtrArray mySubSpaces;
/*
udanax-top.st:14573:
CoordinateSpace subclass: #CrossSpace
instanceVariableNames: 'mySubSpaces {PtrArray of: CoordinateSpace}'
classVariableNames: ''
poolDictionaries: ''
category: 'Xanadu-Spaces-Cross'!
*/
/*
udanax-top.st:14577:
CrossSpace comment:
'Represents the cross of several coordinate spaces. '!
*/
/*
udanax-top.st:14579:
(CrossSpace getOrMakeCxxClassDescription)
friends:
'friend class BoxAccumulator;
friend class BoxStepper;
friend class GenericCrossSpace;
friend class GenericCrossRegion;
friend class BoxProjectionStepper;';
attributes: ((Set new) add: #ON.CLIENT; add: #DEFERRED; yourself)!
*/
/*
udanax-top.st:14724:
CrossSpace class
instanceVariableNames: ''!
*/
/*
udanax-top.st:14727:
(CrossSpace getOrMakeCxxClassDescription)
friends:
'friend class BoxAccumulator;
friend class BoxStepper;
friend class GenericCrossSpace;
friend class GenericCrossRegion;
friend class BoxProjectionStepper;';
attributes: ((Set new) add: #ON.CLIENT; add: #DEFERRED; yourself)!
*/
public static void initializeClassAttributes() {
AboraSupport.findAboraClass(CrossSpace.class).setAttributes( new Set().add("ONCLIENT").add("DEFERRED"));
/*
Generated during transformation: AddMethod
*/
}
/**
* Essential. The base spaces that I am a cross of.
*/
public PtrArray axes() {
return (PtrArray) mySubSpaces.copy();
/*
udanax-top.st:14590:CrossSpace methodsFor: 'accessing'!
{PtrArray CLIENT of: CoordinateSpace} axes
"Essential. The base spaces that I am a cross of."
^mySubSpaces copy cast: PtrArray!
*/
}
/**
* The sub coordinate space on the given axis
*/
public CoordinateSpace axis(int dimension) {
return (CoordinateSpace) (mySubSpaces.fetch(dimension));
/*
udanax-top.st:14595:CrossSpace methodsFor: 'accessing'!
{CoordinateSpace CLIENT} axis: dimension {Int32}
"The sub coordinate space on the given axis"
^(mySubSpaces fetch: dimension) cast: CoordinateSpace!
*/
}
/**
* The number of dimensions in this space
*/
public int axisCount() {
return mySubSpaces.count();
/*
udanax-top.st:14600:CrossSpace methodsFor: 'accessing'!
{Int32 CLIENT INLINE} axisCount
"The number of dimensions in this space"
^mySubSpaces count!
*/
}
public int actualHashForEqual() {
return mySubSpaces.contentsHash() ^ HashHelper.hashForEqual(this.getClass());
/*
udanax-top.st:14607:CrossSpace methodsFor: 'testing'!
{UInt32} actualHashForEqual
^mySubSpaces contentsHash bitXor: #cat.U.CrossSpace hashForEqual!
*/
}
public boolean isEqual(Heaper other) {
if (other instanceof CrossSpace) {
CrossSpace cross = (CrossSpace) other;
return cross.secretSubSpaces().contentsEqual(mySubSpaces);
}
else {
return false;
}
/*
udanax-top.st:14611:CrossSpace methodsFor: 'testing'!
{BooleanVar} isEqual: other {Heaper}
other
cast: CrossSpace into: [:cross |
^cross secretSubSpaces contentsEqual: mySubSpaces]
others: [^false].
^ false "compiler fodder"!
*/
}
/**
* Essential. Map each coordinate according to the mapping from its space. NULLs mean 'use
* the identity mapping'
*/
public Mapping crossOfMappings(PtrArray subMappings) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:14621:CrossSpace methodsFor: 'making'!
{Mapping CLIENT} crossOfMappings: subMappings {(PtrArray of: Mapping | NULL) default: NULL}
"Essential. Map each coordinate according to the mapping from its space. NULLs mean 'use the identity mapping'"
self subclassResponsibility!
*/
}
/**
* Essential. Make a lexical ordering of all elements in the space, using the given ordering
* for each sub space. If no sub space ordering is given, then it is in the order they are in
* the array.
* subSpaceOrdering lists the lexicographic order in which each dimension should be
* processed. Every dimension should be listed exactly one, from most significant (at index
* 0) to least significant.
* subOrderings are indexed by *dimension*, not by lexicographic order. In order to index by
* lex order, look up the dimension in subSpaceOrdering, and then look up the resulting
* dimension number in subOrderings.
*/
public CrossOrderSpec crossOfOrderSpecs(PtrArray subOrderings, PrimIntArray subSpaceOrdering) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:14626:CrossSpace methodsFor: 'making'!
{CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL) default: NULL}
with: subSpaceOrdering {PrimIntArray default: NULL}
"Essential. Make a lexical ordering of all elements in the space, using the given ordering for each sub space. If no sub space ordering is given, then it is in the order they are in the array.
subSpaceOrdering lists the lexicographic order in which each dimension should be processed. Every dimension should be listed exactly one, from most significant (at index 0) to least significant.
subOrderings are indexed by *dimension*, not by lexicographic order. In order to index by lex order, look up the dimension in subSpaceOrdering, and then look up the resulting dimension number in subOrderings."
self subclassResponsibility!
*/
}
/**
* Essential. Make an individual position
*/
public Tuple crossOfPositions(PtrArray coordinates) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:14636:CrossSpace methodsFor: 'making'!
{Tuple CLIENT} crossOfPositions: coordinates {PtrArray of: Position}
"Essential. Make an individual position"
self subclassResponsibility!
*/
}
/**
* Essential. Make a 'rectangular' region as a cross of all the given regions
*/
public CrossRegion crossOfRegions(PtrArray subRegions) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:14641:CrossSpace methodsFor: 'making'!
{CrossRegion CLIENT} crossOfRegions: subRegions {PtrArray of: XnRegion | NULL}
"Essential. Make a 'rectangular' region as a cross of all the given regions"
self subclassResponsibility!
*/
}
/**
* Return a region whose projection is 'subRegion' along 'dimension', but is full on all
* other dimensions
*/
public CrossRegion extrusion(int dimension, XnRegion subRegion) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:14646:CrossSpace methodsFor: 'making'!
{CrossRegion CLIENT} extrusion: dimension {Int32} with: subRegion {XnRegion}
"Return a region whose projection is 'subRegion' along 'dimension', but is full on all other dimensions"
self subclassResponsibility!
*/
}
/**
* @deprecated
*/
public int count() {
throw new PasseException();
/*
udanax-top.st:14653:CrossSpace methodsFor: 'smalltalk: passe'!
{IntegerVar} count
self passe "axisCount"!
*/
}
/**
* @deprecated
*/
public int intCount() {
throw new PasseException();
/*
udanax-top.st:14657:CrossSpace methodsFor: 'smalltalk: passe'!
{Int32} intCount
self passe "axisCount"!
*/
}
/**
* @deprecated
*/
public CrossMapping makeCrossMapping(PtrArray subMappings) {
throw new PasseException();
/*
udanax-top.st:14661:CrossSpace methodsFor: 'smalltalk: passe'!
{CrossMapping} makeCrossMapping: subMappings {PtrArray of: Mapping}
self passe!
*/
}
/**
* Make a lexical ordering of all elements in the space, using the given ordering for each
* sub space. If no sub space ordering is given, then it is in the order they are in the
* array
* @deprecated
*/
public CrossOrderSpec makeCrossOrderSpec(PtrArray subOrderings, Int32Array subSpaceOrdering) {
throw new PasseException();
/*
udanax-top.st:14665:CrossSpace methodsFor: 'smalltalk: passe'!
{CrossOrderSpec} makeCrossOrderSpec: subOrderings {PtrArray of: OrderSpec | NULL}
with: subSpaceOrdering {Int32Array default: NULL}
"Make a lexical ordering of all elements in the space, using the given ordering for each sub space. If no sub space ordering is given, then it is in the order they are in the array"
self passe!
*/
}
/**
* Make a 'rectangular' region as a cross of all the given regions
* @deprecated
*/
public CrossRegion makeCrossRegion(PtrArray subRegions) {
throw new PasseException();
/*
udanax-top.st:14671:CrossSpace methodsFor: 'smalltalk: passe'!
{CrossRegion} makeCrossRegion: subRegions {PtrArray of: XnRegion | NULL}
"Make a 'rectangular' region as a cross of all the given regions"
self passe!
*/
}
/**
* Make an individual position
* @deprecated
*/
public Tuple makeTuple(PtrArray coordinates) {
throw new PasseException();
/*
udanax-top.st:14676:CrossSpace methodsFor: 'smalltalk: passe'!
{Tuple} makeTuple: coordinates {PtrArray of: Position}
"Make an individual position"
self passe!
*/
}
/**
* @deprecated
*/
public CoordinateSpace subSpace(int dimension) {
throw new PasseException();
/*
udanax-top.st:14681:CrossSpace methodsFor: 'smalltalk: passe'!
{CoordinateSpace} subSpace: dimension {Int32}
self passe "axis"!
*/
}
/**
* @deprecated
*/
public PtrArray subSpaces() {
throw new PasseException();
/*
udanax-top.st:14685:CrossSpace methodsFor: 'smalltalk: passe'!
{PtrArray of: CoordinateSpace} subSpaces
self passe "axes"!
*/
}
public Mapping crossOfMappings() {
return crossOfMappings(null);
/*
udanax-top.st:14691:CrossSpace methodsFor: 'smalltalk: defaults'!
{Mapping CLIENT} crossOfMappings
^self crossOfMappings: NULL!
*/
}
public CrossOrderSpec crossOfOrderSpecs() {
return crossOfOrderSpecs(null, null);
/*
udanax-top.st:14695:CrossSpace methodsFor: 'smalltalk: defaults'!
{CrossOrderSpec CLIENT} crossOfOrderSpecs
^self crossOfOrderSpecs: NULL with: NULL!
*/
}
public CrossOrderSpec crossOfOrderSpecs(PtrArray subOrderings) {
return crossOfOrderSpecs(subOrderings, null);
/*
udanax-top.st:14699:CrossSpace methodsFor: 'smalltalk: defaults'!
{CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL) default: NULL}
^self crossOfOrderSpecs: subOrderings with: NULL!
*/
}
/**
* The actual array of sub spaces. DO NOT MODIFY
*/
public PtrArray secretSubSpaces() {
return mySubSpaces;
/*
udanax-top.st:14706:CrossSpace methodsFor: 'protected: accessing'!
{PtrArray INLINE of: CoordinateSpace} secretSubSpaces
"The actual array of sub spaces. DO NOT MODIFY"
^mySubSpaces!
*/
}
public CrossSpace() {
super();
mySubSpaces = null;
/*
udanax-top.st:14713:CrossSpace methodsFor: 'protected: creation'!
create
super create.
mySubSpaces := NULL.!
*/
}
public CrossSpace(PtrArray subSpaces) {
super();
mySubSpaces = subSpaces;
/*
udanax-top.st:14718:CrossSpace methodsFor: 'protected: creation'!
create: subSpaces {PtrArray of: CoordinateSpace}
super create.
mySubSpaces := subSpaces.!
*/
}
/**
* Make a cross space with the given list of subspaces
*/
public static CrossSpace make(PtrArray subSpaces) {
/* Should use middlemen. Just hard code special cases for now */
return GenericCrossSpace.make(((PtrArray) subSpaces.copy()));
/*
udanax-top.st:14738:CrossSpace class methodsFor: 'creation'!
{CrossSpace CLIENT} make: subSpaces {PtrArray of: CoordinateSpace}
"Make a cross space with the given list of subspaces"
"Should use middlemen. Just hard code special cases for now"
^GenericCrossSpace make: (subSpaces copy cast: PtrArray)!
*/
}
/**
* Cross two sub spaces
*/
public static CrossSpace make(CoordinateSpace zeroSpace, CoordinateSpace oneSpace) {
return new GenericCrossSpace(((PtrArray) (PrimSpec.pointer().arrayWithTwo(zeroSpace, oneSpace))));
/*
udanax-top.st:14744:CrossSpace class methodsFor: 'creation'!
make: zeroSpace {CoordinateSpace} with: oneSpace {CoordinateSpace}
"Cross two sub spaces"
^GenericCrossSpace create: ((PrimSpec pointer
arrayWithTwo: zeroSpace with: oneSpace) cast: PtrArray)!
*/
}
/**
* {PtrArray CLIENT of: CoordinateSpace} axes
* {CoordinateSpace CLIENT} axis: dimension {Int32}
* {Int32 CLIENT} axisCount
* {Mapping CLIENT} crossOfMappings
* {Mapping CLIENT} crossOfMappings: subMappings {(PtrArray of: Mapping | NULL)
* default: NULL}
* {CrossOrderSpec CLIENT} crossOfOrderSpecs
* {CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL)
* default: NULL}
* {CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL)
* default: NULL} with: subSpaceOrdering {Int32Array default: NULL}
* {Tuple CLIENT} crossOfPositions: coordinates {PtrArray of: Position}
* {CrossRegion CLIENT} crossOfRegions: subRegions {PtrArray of: XuRegion | NULL}
* {CrossRegion CLIENT} extrusion: dimension {Int32} with: subRegion {XuRegion}
*/
public static void infostProtocol() {
/*
udanax-top.st:14752:CrossSpace class methodsFor: 'smalltalk: system'!
info.stProtocol
"{PtrArray CLIENT of: CoordinateSpace} axes
{CoordinateSpace CLIENT} axis: dimension {Int32}
{Int32 CLIENT} axisCount
{Mapping CLIENT} crossOfMappings
{Mapping CLIENT} crossOfMappings: subMappings {(PtrArray of: Mapping | NULL)
default: NULL}
{CrossOrderSpec CLIENT} crossOfOrderSpecs
{CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL)
default: NULL}
{CrossOrderSpec CLIENT} crossOfOrderSpecs: subOrderings {(PtrArray of: OrderSpec | NULL)
default: NULL} with: subSpaceOrdering {Int32Array default: NULL}
{Tuple CLIENT} crossOfPositions: coordinates {PtrArray of: Position}
{CrossRegion CLIENT} crossOfRegions: subRegions {PtrArray of: XuRegion | NULL}
{CrossRegion CLIENT} extrusion: dimension {Int32} with: subRegion {XuRegion}
"!
*/
}
public CrossSpace(Rcvr receiver) {
super(receiver);
/*
Generated during transformation
*/
}
}
| |
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.store.readonly.mr;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.log4j.Logger;
import voldemort.VoldemortException;
import voldemort.store.readonly.ReadOnlyUtils;
import voldemort.store.readonly.checksum.CheckSum;
import voldemort.store.readonly.checksum.CheckSum.CheckSumType;
import voldemort.utils.ByteUtils;
/**
* Take key md5s and value bytes and build a read-only store from these values
*/
@SuppressWarnings("deprecation")
public class HadoopStoreBuilderReducer extends AbstractStoreBuilderConfigurable implements
Reducer<BytesWritable, BytesWritable, Text, Text> {
private static final Logger logger = Logger.getLogger(HadoopStoreBuilderReducer.class);
private DataOutputStream indexFileStream = null;
private DataOutputStream valueFileStream = null;
private int position;
private String taskId = null;
private int nodeId = -1;
private int partitionId = -1;
private int chunkId = -1;
private int replicaType = -1;
private Path taskIndexFileName;
private Path taskValueFileName;
private JobConf conf;
private CheckSumType checkSumType;
private CheckSum checkSumDigestIndex;
private CheckSum checkSumDigestValue;
private String outputDir;
private FileSystem fs;
protected static enum CollisionCounter {
NUM_COLLISIONS,
MAX_COLLISIONS;
}
/**
* Reduce should get sorted MD5 of Voldemort key ( either 16 bytes if saving
* keys is disabled, else 8 bytes ) as key and for value (a) node-id,
* partition-id, value - if saving keys is disabled (b) node-id,
* partition-id, replica-type, [key-size, value-size, key, value]* if saving
* keys is enabled
*/
public void reduce(BytesWritable key,
Iterator<BytesWritable> iterator,
OutputCollector<Text, Text> output,
Reporter reporter) throws IOException {
// Write key and position
this.indexFileStream.write(key.get(), 0, key.getSize());
this.indexFileStream.writeInt(this.position);
// Run key through checksum digest
if(this.checkSumDigestIndex != null) {
this.checkSumDigestIndex.update(key.get(), 0, key.getSize());
this.checkSumDigestIndex.update(this.position);
}
short numTuples = 0;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
DataOutputStream valueStream = new DataOutputStream(stream);
while(iterator.hasNext()) {
BytesWritable writable = iterator.next();
byte[] valueBytes = writable.get();
int offsetTillNow = 0;
// Read node Id
if(this.nodeId == -1)
this.nodeId = ByteUtils.readInt(valueBytes, offsetTillNow);
offsetTillNow += ByteUtils.SIZE_OF_INT;
// Read partition id
if(this.partitionId == -1)
this.partitionId = ByteUtils.readInt(valueBytes, offsetTillNow);
offsetTillNow += ByteUtils.SIZE_OF_INT;
// Read chunk id
if(this.chunkId == -1)
this.chunkId = ReadOnlyUtils.chunk(key.get(), getNumChunks());
// Read replica type
if(getSaveKeys()) {
if(this.replicaType == -1)
this.replicaType = (int) ByteUtils.readBytes(valueBytes,
offsetTillNow,
ByteUtils.SIZE_OF_BYTE);
offsetTillNow += ByteUtils.SIZE_OF_BYTE;
}
int valueLength = writable.getSize() - offsetTillNow;
if(getSaveKeys()) {
// Write ( key_length, value_length, key,
// value )
valueStream.write(valueBytes, offsetTillNow, valueLength);
} else {
// Write (value_length + value)
valueStream.writeInt(valueLength);
valueStream.write(valueBytes, offsetTillNow, valueLength);
}
numTuples++;
// If we have multiple values for this md5 that is a collision,
// throw an exception--either the data itself has duplicates, there
// are trillions of keys, or someone is attempting something
// malicious ( We obviously expect collisions when we save keys )
if(!getSaveKeys() && numTuples > 1)
throw new VoldemortException("Duplicate keys detected for md5 sum "
+ ByteUtils.toHexString(ByteUtils.copy(key.get(),
0,
key.getSize())));
}
if(numTuples < 0) {
// Overflow
throw new VoldemortException("Found too many collisions: chunk " + chunkId
+ " has exceeded " + Short.MAX_VALUE + " collisions.");
} else if(numTuples > 1) {
// Update number of collisions + max keys per collision
reporter.incrCounter(CollisionCounter.NUM_COLLISIONS, 1);
long numCollisions = reporter.getCounter(CollisionCounter.MAX_COLLISIONS).getCounter();
if(numTuples > numCollisions) {
reporter.incrCounter(CollisionCounter.MAX_COLLISIONS, numTuples - numCollisions);
}
}
// Flush the value
valueStream.flush();
byte[] value = stream.toByteArray();
// Start writing to file now
// First, if save keys flag set the number of keys
if(getSaveKeys()) {
this.valueFileStream.writeShort(numTuples);
this.position += ByteUtils.SIZE_OF_SHORT;
if(this.checkSumDigestValue != null) {
this.checkSumDigestValue.update(numTuples);
}
}
this.valueFileStream.write(value);
this.position += value.length;
if(this.checkSumDigestValue != null) {
this.checkSumDigestValue.update(value);
}
if(this.position < 0)
throw new VoldemortException("Chunk overflow exception: chunk " + chunkId
+ " has exceeded " + Integer.MAX_VALUE + " bytes.");
}
@Override
public void configure(JobConf job) {
super.configure(job);
try {
this.conf = job;
this.position = 0;
this.outputDir = job.get("final.output.dir");
this.taskId = job.get("mapred.task.id");
this.checkSumType = CheckSum.fromString(job.get("checksum.type"));
this.checkSumDigestIndex = CheckSum.getInstance(checkSumType);
this.checkSumDigestValue = CheckSum.getInstance(checkSumType);
this.taskIndexFileName = new Path(FileOutputFormat.getOutputPath(job), getStoreName()
+ "."
+ this.taskId
+ ".index");
this.taskValueFileName = new Path(FileOutputFormat.getOutputPath(job), getStoreName()
+ "."
+ this.taskId
+ ".data");
if(this.fs == null)
this.fs = this.taskIndexFileName.getFileSystem(job);
this.indexFileStream = fs.create(this.taskIndexFileName);
this.valueFileStream = fs.create(this.taskValueFileName);
logger.info("Opening " + this.taskIndexFileName + " and " + this.taskValueFileName
+ " for writing.");
} catch(IOException e) {
throw new RuntimeException("Failed to open Input/OutputStream", e);
}
}
@Override
public void close() throws IOException {
this.indexFileStream.close();
this.valueFileStream.close();
if(this.nodeId == -1 || this.chunkId == -1 || this.partitionId == -1) {
// Issue 258 - No data was read in the reduce phase, do not create
// any output
return;
}
// If the replica type read was not valid, shout out
if(getSaveKeys() && this.replicaType == -1) {
throw new RuntimeException("Could not read the replica type correctly for node "
+ nodeId + " ( partition - " + this.partitionId + " )");
}
String fileNamePrefix = null;
if(getSaveKeys()) {
fileNamePrefix = new String(Integer.toString(this.partitionId) + "_"
+ Integer.toString(this.replicaType) + "_"
+ Integer.toString(this.chunkId));
} else {
fileNamePrefix = new String(Integer.toString(this.partitionId) + "_"
+ Integer.toString(this.chunkId));
}
// Initialize the node directory
Path nodeDir = new Path(this.outputDir, "node-" + this.nodeId);
// Create output directory, if it doesn't exist
FileSystem outputFs = nodeDir.getFileSystem(this.conf);
outputFs.mkdirs(nodeDir);
// Write the checksum and output files
if(this.checkSumType != CheckSumType.NONE) {
if(this.checkSumDigestIndex != null && this.checkSumDigestValue != null) {
Path checkSumIndexFile = new Path(nodeDir, fileNamePrefix + ".index.checksum");
Path checkSumValueFile = new Path(nodeDir, fileNamePrefix + ".data.checksum");
FSDataOutputStream output = outputFs.create(checkSumIndexFile);
output.write(this.checkSumDigestIndex.getCheckSum());
output.close();
output = outputFs.create(checkSumValueFile);
output.write(this.checkSumDigestValue.getCheckSum());
output.close();
} else {
throw new RuntimeException("Failed to open checksum digest for node " + nodeId
+ " ( partition - " + this.partitionId + ", chunk - "
+ chunkId + " )");
}
}
// Generate the final chunk files
Path indexFile = new Path(nodeDir, fileNamePrefix + ".index");
Path valueFile = new Path(nodeDir, fileNamePrefix + ".data");
logger.info("Moving " + this.taskIndexFileName + " to " + indexFile);
outputFs.rename(taskIndexFileName, indexFile);
logger.info("Moving " + this.taskValueFileName + " to " + valueFile);
outputFs.rename(this.taskValueFileName, valueFile);
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree;
import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesKey;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Class which contains the Nodes for the 'By Mime Type' view located in the
* File Types view, shows all files with a mime type. Will initially be empty
* until file type identification has been performed. Contains a Property Change
* Listener which is checking for changes in IngestJobEvent Completed or
* Canceled and IngestModuleEvent Content Changed.
*/
public final class FileTypesByMimeType extends Observable implements AutopsyVisitableItem {
private final static Logger logger = Logger.getLogger(FileTypesByMimeType.class.getName());
private final SleuthkitCase skCase;
/**
* The nodes of this tree will be determined dynamically by the mimetypes
* which exist in the database. This hashmap will store them with the media
* type as the key and a Map, from media subtype to count, as the value.
*/
private final HashMap<String, Map<String, Long>> existingMimeTypeCounts = new HashMap<>();
/**
* Root of the File Types tree. Used to provide single answer to question:
* Should the child counts be shown next to the nodes?
*/
private final FileTypes typesRoot;
/**
* The pcl is in the class because it has the easiest mechanisms to add and
* remove itself during its life cycles.
*/
private final PropertyChangeListener pcl;
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.DATA_SOURCE_ADDED, Case.Events.CURRENT_CASE);
/**
* Create the base expression used as the where clause in the queries for
* files by mime type. Filters out certain kinds of files and directories,
* and known/slack files based on user preferences.
*
* @return The base expression to be used in the where clause of queries for
* files by mime type.
*/
static private String createBaseWhereExpr() {
return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
+ " AND (type IN ("
+ TskData.TSK_DB_FILES_TYPE_ENUM.FS.ordinal() + ","
+ TskData.TSK_DB_FILES_TYPE_ENUM.CARVED.ordinal() + ","
+ TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.ordinal() + ","
+ TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.ordinal()
+ (hideSlackFilesInViewsTree() ? "" : ("," + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.ordinal()))
+ "))"
+ (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "");
}
private void removeListeners() {
deleteObservers();
IngestManager.getInstance().removeIngestJobEventListener(pcl);
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
}
/**
* Performs the query on the database to get all distinct MIME types of
* files in it, and populate the hashmap with those results.
*/
private void populateHashMap() {
String query = "SELECT mime_type, count(*) AS count FROM tsk_files "
+ " WHERE mime_type IS NOT null "
+ " AND " + createBaseWhereExpr()
+ " GROUP BY mime_type";
synchronized (existingMimeTypeCounts) {
existingMimeTypeCounts.clear();
if (skCase == null) {
return;
}
try (SleuthkitCase.CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
final String mime_type = resultSet.getString("mime_type"); //NON-NLS
if (!mime_type.isEmpty()) {
//if the mime_type contained multiple slashes then everything after the first slash will become the subtype
final String mediaType = StringUtils.substringBefore(mime_type, "/");
final String subType = StringUtils.removeStart(mime_type, mediaType + "/");
if (!mediaType.isEmpty() && !subType.isEmpty()) {
final long count = resultSet.getLong("count");
existingMimeTypeCounts.computeIfAbsent(mediaType, t -> new HashMap<>())
.put(subType, count);
}
}
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.SEVERE, "Unable to populate File Types by MIME Type tree view from DB: ", ex); //NON-NLS
}
}
setChanged();
notifyObservers();
}
FileTypesByMimeType(FileTypes typesRoot) {
this.skCase = typesRoot.getSleuthkitCase();
this.typesRoot = typesRoot;
this.pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
|| eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked out.
* Currently, remote events may be received for a case that is
* already closed.
*/
try {
Case.getCurrentCase();
typesRoot.updateShowCounts();
populateHashMap();
} catch (IllegalStateException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
if (evt.getNewValue() == null) {
removeListeners();
}
}
};
IngestManager.getInstance().addIngestJobEventListener(pcl);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
populateHashMap();
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
/**
* Method to check if the node in question is a ByMimeTypeNode which is
* empty.
*
* @param node the Node which you wish to check.
*
* @return True if originNode is an instance of ByMimeTypeNode and is empty,
* false otherwise.
*/
public static boolean isEmptyMimeTypeNode(Node node) {
boolean isEmptyMimeNode = false;
if (node instanceof FileTypesByMimeType.ByMimeTypeNode && ((FileTypesByMimeType.ByMimeTypeNode) node).isEmpty()) {
isEmptyMimeNode = true;
}
return isEmptyMimeNode;
}
/**
* Class which represents the root node of the "By MIME Type" tree, will
* have children of each media type present in the database or no children
* when the file detection module has not been run and MIME type is
* currently unknown.
*/
class ByMimeTypeNode extends DisplayableItemNode {
@NbBundle.Messages({"FileTypesByMimeType.name.text=By MIME Type"})
final String NAME = Bundle.FileTypesByMimeType_name_text();
ByMimeTypeNode() {
super(Children.create(new ByMimeTypeNodeChildren(), true), Lookups.singleton(Bundle.FileTypesByMimeType_name_text()));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png");
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public String getItemType() {
return getClass().getName();
}
boolean isEmpty() {
synchronized (existingMimeTypeCounts) {
return existingMimeTypeCounts.isEmpty();
}
}
}
/**
* Creates the children for the "By MIME Type" node these children will each
* represent a distinct media type present in the DB
*/
private class ByMimeTypeNodeChildren extends ChildFactory<String> implements Observer {
private ByMimeTypeNodeChildren() {
super();
addObserver(this);
}
@Override
protected boolean createKeys(List<String> mediaTypeNodes) {
final List<String> keylist;
synchronized (existingMimeTypeCounts) {
keylist = new ArrayList<>(existingMimeTypeCounts.keySet());
}
Collections.sort(keylist);
mediaTypeNodes.addAll(keylist);
return true;
}
@Override
protected Node createNodeForKey(String key) {
return new MediaTypeNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* The Media type node created by the ByMimeTypeNodeChildren and contains
* one of the unique media types present in the database for this case.
*/
class MediaTypeNode extends DisplayableItemNode {
@NbBundle.Messages({"FileTypesByMimeTypeNode.createSheet.mediaType.name=Type",
"FileTypesByMimeTypeNode.createSheet.mediaType.displayName=Type",
"FileTypesByMimeTypeNode.createSheet.mediaType.desc=no description"})
MediaTypeNode(String name) {
super(Children.create(new MediaTypeNodeChildren(name), true), Lookups.singleton(name));
setName(name);
setDisplayName(name);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png");
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.name"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.desc"), getDisplayName()));
return s;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
/**
* Creates children for media type nodes, children will be MediaSubTypeNodes
* and represent one of the subtypes which are present in the database of
* their media type.
*/
private class MediaTypeNodeChildren extends ChildFactory<String> implements Observer {
String mediaType;
MediaTypeNodeChildren(String name) {
addObserver(this);
this.mediaType = name;
}
@Override
protected boolean createKeys(List<String> mediaTypeNodes) {
mediaTypeNodes.addAll(existingMimeTypeCounts.get(mediaType).keySet());
return true;
}
@Override
protected Node createNodeForKey(String subtype) {
String mimeType = mediaType + "/" + subtype;
return new MediaSubTypeNode(mimeType);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* Node which represents the media sub type in the By MIME type tree, the
* media subtype is the portion of the MIME type following the /.
*/
class MediaSubTypeNode extends FileTypes.BGCountUpdatingNode {
@NbBundle.Messages({"FileTypesByMimeTypeNode.createSheet.mediaSubtype.name=Subtype",
"FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName=Subtype",
"FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc=no description"})
private final String mimeType;
private final String subType;
private MediaSubTypeNode(String mimeType) {
super(typesRoot, Children.create(new MediaSubTypeNodeChildren(mimeType), true), Lookups.singleton(mimeType));
this.mimeType = mimeType;
this.subType = StringUtils.substringAfter(mimeType, "/");
super.setName(mimeType);
super.setDisplayName(subType);
updateDisplayName();
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-filter-icon.png"); //NON-NLS
addObserver(this);
}
/**
* This returns true because any MediaSubTypeNode that exists is going
* to be a bottom level node in the Tree view on the left of Autopsy.
*
* @return true
*/
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor< T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.name"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc"), getDisplayName()));
return s;
}
@Override
public String getItemType() {
return getClass().getName();
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
@Override
String getDisplayNameBase() {
return subType;
}
@Override
long calculateChildCount() {
return existingMimeTypeCounts.get(StringUtils.substringBefore(mimeType, "/")).get(subType);
}
}
/**
* Factory for populating the contents of the Media Sub Type Node with the
* files that match MimeType which is represented by this position in the
* tree.
*/
private class MediaSubTypeNodeChildren extends ChildFactory.Detachable<FileTypesKey> implements Observer {
private final String mimeType;
private MediaSubTypeNodeChildren(String mimeType) {
super();
addObserver(this);
this.mimeType = mimeType;
}
@Override
protected boolean createKeys(List<FileTypesKey> list) {
try {
list.addAll(skCase.findAllFilesWhere(createBaseWhereExpr() + " AND mime_type = '" + mimeType + "'")
.stream().map(f -> new FileTypesKey(f)).collect(Collectors.toList())); //NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Couldn't get search results", ex); //NON-NLS
}
return true;
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
@Override
protected Node createNodeForKey(FileTypesKey key) {
return key.accept(new FileTypes.FileNodeCreationVisitor());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.elasticsearch;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.ssl.SSLContextService;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.OngoingStubbing;
import okhttp3.Call;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Protocol;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.ResponseBody;
public class TestQueryElasticsearchHttp {
private TestRunner runner;
@After
public void teardown() {
runner = null;
}
@Test
public void testQueryElasticsearchOnTrigger_withInput() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY,
"source:Twitter AND identifier:\"${identifier}\"");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.PAGE_SIZE, "2");
runner.assertValid();
runAndVerifySuccess(true);
}
@Test
public void testQueryElasticsearchOnTrigger_withInput_withQueryInAttrs() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setValidateExpressionUsage(true);
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY,
"source:Twitter AND identifier:\"${identifier}\"");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.PAGE_SIZE, "2");
runner.assertValid();
runAndVerifySuccess(true);
}
@Test
public void testQueryElasticsearchOnTrigger_withInput_EL() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "${es.url}");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY,
"source:Twitter AND identifier:\"${identifier}\"");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.PAGE_SIZE, "2");
runner.assertValid();
runner.setProperty(AbstractElasticsearchHttpProcessor.CONNECT_TIMEOUT, "${connect.timeout}");
runner.assertValid();
runner.setVariable("es.url", "http://127.0.0.1:9200");
runAndVerifySuccess(true);
}
@Test
public void testQueryElasticsearchOnTrigger_withInput_attributeTarget() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY,
"source:Twitter AND identifier:\"${identifier}\"");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.PAGE_SIZE, "2");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.TARGET,
QueryElasticsearchHttp.TARGET_FLOW_FILE_ATTRIBUTES);
runAndVerifySuccess(false);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_SUCCESS).get(0);
assertNotNull(out);
assertEquals("blah", new String(out.toByteArray()));
assertEquals("arrays,are,supported,too", out.getAttribute("es.result.tags"));
assertEquals("Twitter", out.getAttribute("es.result.source"));
}
@Test
public void testQueryElasticsearchOnTrigger_withNoInput() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY,
"source:Twitter AND identifier:\"${identifier}\"");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.PAGE_SIZE, "2");
runner.assertValid();
runner.setIncomingConnection(false);
runAndVerifySuccess(true);
}
private void runAndVerifySuccess(int expectedResults, boolean targetIsContent) {
runner.enqueue("blah".getBytes(), new HashMap<String, String>() {
{
put("identifier", "28039652140");
}
});
// Running once should page through all 3 docs
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_SUCCESS, expectedResults);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_SUCCESS).get(0);
assertNotNull(out);
if (targetIsContent) {
out.assertAttributeEquals("filename", "abc-97b-ASVsZu_"
+ "vShwtGCJpGOObmuSqUJRUC3L_-SEND-S3");
}
out.assertAttributeExists("es.query.url");
}
// By default, 3 files should go to Success
private void runAndVerifySuccess(boolean targetIsContent) {
runAndVerifySuccess(3, targetIsContent);
}
@Test
public void testQueryElasticsearchOnTriggerWithFields() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.FIELDS, "id,, userinfo.location");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.SORT, "timestamp:asc,identifier:desc");
runner.assertValid();
runAndVerifySuccess(true);
}
@Test
public void testQueryElasticsearchOnTriggerWithLimit() throws IOException {
runner = TestRunners.newTestRunner(new QueryElasticsearchHttpTestProcessor());
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.assertNotValid();
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.FIELDS, "id,, userinfo.location");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.SORT, "timestamp:asc,identifier:desc");
runner.assertValid();
runner.setProperty(QueryElasticsearchHttp.LIMIT, "2");
runAndVerifySuccess(2, true);
}
@Test
public void testQueryElasticsearchOnTriggerWithServerErrorRetry() throws IOException {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
processor.setStatus(500, "Server error");
runner = TestRunners.newTestRunner(processor); // simulate doc not found
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.enqueue("".getBytes(), new HashMap<String, String>() {
{
put("identifier", "28039652140");
}
});
runner.run(1, true, true);
// This test generates a HTTP 500 "Server error"
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_RETRY, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_RETRY).get(0);
assertNotNull(out);
}
@Test
public void testQueryElasticsearchOnTriggerWithServerFail() throws IOException {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
processor.setStatus(100, "Should fail");
runner = TestRunners.newTestRunner(processor); // simulate doc not found
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.enqueue("".getBytes(), new HashMap<String, String>() {
{
put("identifier", "28039652140");
}
});
runner.run(1, true, true);
// This test generates a HTTP 100 "Should fail"
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_FAILURE, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_FAILURE).get(0);
assertNotNull(out);
}
@Test
public void testQueryElasticsearchOnTriggerWithIOException() throws IOException {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
processor.setExceptionToThrow(new IOException("Error reading from disk"));
runner = TestRunners.newTestRunner(processor); // simulate doc not found
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.enqueue("".getBytes(), new HashMap<String, String>() {
{
put("identifier", "28039652140");
}
});
runner.run(1, true, true);
// This test generates a HTTP 100 "Should fail"
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_RETRY, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_RETRY).get(0);
assertNotNull(out);
}
@Test
public void testQueryElasticsearchOnTriggerWithServerFailAfterSuccess() throws IOException {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
processor.setStatus(100, "Should fail", 2);
runner = TestRunners.newTestRunner(processor); // simulate doc not found
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.enqueue("".getBytes(), new HashMap<String, String>() {
{
put("identifier", "28039652140");
}
});
runner.run(1, true, true);
// This test generates a HTTP 100 "Should fail"
runner.assertTransferCount(QueryElasticsearchHttp.REL_SUCCESS, 2);
runner.assertTransferCount(QueryElasticsearchHttp.REL_FAILURE, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(
QueryElasticsearchHttp.REL_FAILURE).get(0);
assertNotNull(out);
}
@Test
public void testQueryElasticsearchOnTriggerWithServerFailNoIncomingFlowFile() throws IOException {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
processor.setStatus(100, "Should fail", 1);
runner = TestRunners.newTestRunner(processor); // simulate doc not found
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.setIncomingConnection(false);
runner.run(1, true, true);
// This test generates a HTTP 100 with no incoming flow file, so nothing should be transferred
processor.getRelationships().forEach(relationship -> runner.assertTransferCount(relationship, 0));
runner.assertTransferCount(QueryElasticsearchHttp.REL_FAILURE, 0);
}
@Test
public void testSetupSecureClient() throws Exception {
QueryElasticsearchHttpTestProcessor processor = new QueryElasticsearchHttpTestProcessor();
runner = TestRunners.newTestRunner(processor);
SSLContextService sslService = mock(SSLContextService.class);
when(sslService.getIdentifier()).thenReturn("ssl-context");
runner.addControllerService("ssl-context", sslService);
runner.enableControllerService(sslService);
runner.setProperty(QueryElasticsearchHttp.PROP_SSL_CONTEXT_SERVICE, "ssl-context");
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
// Allow time for the controller service to fully initialize
Thread.sleep(500);
runner.enqueue("".getBytes(), new HashMap<String, String>() {
{
put("doc_id", "28039652140");
}
});
runner.run(1, true, true);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Integration test section below
//
// The tests below are meant to run on real ES instances, and are thus @Ignored during normal test execution.
// However if you wish to execute them as part of a test phase, comment out the @Ignored line for each
// desired test.
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
@Test
@Ignore("Un-authenticated proxy : Comment this out if you want to run against local proxied ES.")
public void testQueryElasticsearchBasicBehindProxy() {
System.out.println("Starting test " + new Object() {
}.getClass().getEnclosingMethod().getName());
final TestRunner runner = TestRunners.newTestRunner(new QueryElasticsearchHttp());
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.setProperty(QueryElasticsearchHttp.FIELDS, "id,, userinfo.location");
runner.setProperty(QueryElasticsearchHttp.PROXY_HOST, "localhost");
runner.setProperty(QueryElasticsearchHttp.PROXY_PORT, "3228");
runner.setProperty(QueryElasticsearchHttp.ES_URL, "http://172.18.0.2:9200");
runner.enqueue("".getBytes(), new HashMap<String, String>() {{
put("doc_id", "28039652140");
}});
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_SUCCESS, 1);
}
@Test
@Ignore("Authenticated Proxy : Comment this out if you want to run against local proxied ES.")
public void testQueryElasticsearchBasicBehindAuthenticatedProxy() {
System.out.println("Starting test " + new Object() {
}.getClass().getEnclosingMethod().getName());
final TestRunner runner = TestRunners.newTestRunner(new QueryElasticsearchHttp());
runner.setValidateExpressionUsage(true);
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "${doc_id}");
runner.setProperty(QueryElasticsearchHttp.FIELDS, "id,, userinfo.location");
runner.setProperty(QueryElasticsearchHttp.PROXY_HOST, "localhost");
runner.setProperty(QueryElasticsearchHttp.PROXY_PORT, "3328");
runner.setProperty(QueryElasticsearchHttp.PROXY_USERNAME, "squid");
runner.setProperty(QueryElasticsearchHttp.PROXY_PASSWORD, "changeme");
runner.setProperty(QueryElasticsearchHttp.ES_URL, "http://172.18.0.2:9200");
runner.enqueue("".getBytes(), new HashMap<String, String>() {{
put("doc_id", "28039652140");
}});
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(QueryElasticsearchHttp.REL_SUCCESS, 1);
}
@Test
public void testQueryElasticsearchOnTrigger_withQueryParameters() throws IOException {
QueryElasticsearchHttpTestProcessor p = new QueryElasticsearchHttpTestProcessor();
p.setExpectedParam("myparam=myvalue");
runner = TestRunners.newTestRunner(p);
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(QueryElasticsearchHttp.INDEX, "doc");
runner.setProperty(QueryElasticsearchHttp.TYPE, "status");
runner.setProperty(QueryElasticsearchHttp.QUERY, "source:Twitter");
// Set dynamic property, to be added to the URL as a query parameter
runner.setProperty("myparam", "myvalue");
runAndVerifySuccess(true);
}
/**
* A Test class that extends the processor in order to inject/mock behavior
*/
private static class QueryElasticsearchHttpTestProcessor extends QueryElasticsearchHttp {
Exception exceptionToThrow = null;
OkHttpClient client;
int goodStatusCode = 200;
String goodStatusMessage = "OK";
int badStatusCode;
String badStatusMessage;
int runNumber;
List<String> pages = Arrays.asList(getDoc("query-page1.json"), getDoc("query-page2.json"),
getDoc("query-page3.json"));
String expectedParam = null;
public void setExceptionToThrow(Exception exceptionToThrow) {
this.exceptionToThrow = exceptionToThrow;
}
/**
* Sets the status code and message for the 1st query
*
* @param code
* The status code to return
* @param message
* The status message
*/
void setStatus(int code, String message) {
this.setStatus(code, message, 1);
}
/**
* Sets an query parameter (name=value) expected to be at the end of the URL for the query operation
*
* @param param
* The parameter to expect
*/
void setExpectedParam(String param) {
expectedParam = param;
}
/**
* Sets the status code and message for the runNumber-th query
*
* @param code
* The status code to return
* @param message
* The status message
* @param runNumber
* The run number for which to set this status
*/
void setStatus(int code, String message, int runNumber) {
badStatusCode = code;
badStatusMessage = message;
this.runNumber = runNumber;
}
@Override
protected void createElasticsearchClient(ProcessContext context) throws ProcessException {
client = mock(OkHttpClient.class);
OngoingStubbing<Call> stub = when(client.newCall(any(Request.class)));
for (int i = 0; i < pages.size(); i++) {
String page = pages.get(i);
if (runNumber == i + 1) {
stub = mockReturnDocument(stub, page, badStatusCode, badStatusMessage);
} else {
stub = mockReturnDocument(stub, page, goodStatusCode, goodStatusMessage);
}
}
}
private OngoingStubbing<Call> mockReturnDocument(OngoingStubbing<Call> stub,
final String document, int statusCode, String statusMessage) {
return stub.thenAnswer(new Answer<Call>() {
@Override
public Call answer(InvocationOnMock invocationOnMock) throws Throwable {
Request realRequest = (Request) invocationOnMock.getArguments()[0];
assertTrue((expectedParam == null) || (realRequest.url().toString().endsWith(expectedParam)));
Response mockResponse = new Response.Builder()
.request(realRequest)
.protocol(Protocol.HTTP_1_1)
.code(statusCode)
.message(statusMessage)
.body(ResponseBody.create(MediaType.parse("application/json"), document))
.build();
final Call call = mock(Call.class);
if (exceptionToThrow != null) {
when(call.execute()).thenThrow(exceptionToThrow);
} else {
when(call.execute()).thenReturn(mockResponse);
}
return call;
}
});
}
@Override
protected OkHttpClient getClient() {
return client;
}
}
private static String getDoc(String filename) {
try {
return IOUtils.toString(QueryElasticsearchHttp.class.getClassLoader().getResourceAsStream(filename), StandardCharsets.UTF_8);
} catch (IOException e) {
System.out.println("Error reading document " + filename);
return "";
}
}
}
| |
package shedar.mods.ic2.nuclearcontrol.gui;
import ic2.api.network.NetworkHelper;
import ic2.core.IC2;
import ic2.core.network.NetworkManager;
import java.util.List;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.GuiTextField;
import net.minecraft.inventory.Container;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
import org.lwjgl.opengl.GL11;
import shedar.mods.ic2.nuclearcontrol.IC2NuclearControl;
import shedar.mods.ic2.nuclearcontrol.api.IAdvancedCardSettings;
import shedar.mods.ic2.nuclearcontrol.api.ICardGui;
import shedar.mods.ic2.nuclearcontrol.api.ICardSettingsWrapper;
import shedar.mods.ic2.nuclearcontrol.api.ICardWrapper;
import shedar.mods.ic2.nuclearcontrol.api.IPanelDataSource;
import shedar.mods.ic2.nuclearcontrol.api.IPanelMultiCard;
import shedar.mods.ic2.nuclearcontrol.api.PanelSetting;
import shedar.mods.ic2.nuclearcontrol.gui.controls.GuiInfoPanelCheckBox;
import shedar.mods.ic2.nuclearcontrol.gui.controls.IconButton;
import shedar.mods.ic2.nuclearcontrol.panel.CardSettingsWrapperImpl;
import shedar.mods.ic2.nuclearcontrol.panel.CardWrapperImpl;
import shedar.mods.ic2.nuclearcontrol.tileentities.TileEntityAdvancedInfoPanel;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class GuiAdvancedInfoPanel extends GuiInfoPanel{
private static final String TEXTURE_FILE = "nuclearcontrol:textures/gui/GUIAdvancedInfoPanel.png";
private static final ResourceLocation TEXTURE_LOCATION = new ResourceLocation(TEXTURE_FILE);
private static final int ID_LABELS = 1;
private static final int ID_SLOPE = 2;
private static final int ID_COLORS = 3;
private static final int ID_POWER = 4;
private static final int ID_SETTINGS = 5;
private byte activeTab;
private boolean initialized;
public GuiAdvancedInfoPanel(Container container){
super(container);
ySize = 212;
activeTab = 0;
initialized = false;
name = StatCollector.translateToLocal("tile.blockAdvancedInfoPanel.name");
isColored = this.container.panel.getColored();
}
@Override
protected void drawGuiContainerBackgroundLayer(float var1, int var2, int var3){
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
mc.renderEngine.bindTexture(TEXTURE_LOCATION);
int left = (width - xSize) / 2;
int top = (height - ySize) / 2;
drawTexturedModalRect(left, top, 0, 0, xSize, ySize);
drawTexturedModalRect(left + 24, top + 62 + activeTab * 14, 182, 0, 1, 15);
}
@Override
protected void drawGuiContainerForegroundLayer(int par1, int par2)
{
super.drawGuiContainerForegroundLayer(par1, par2);
}
@SuppressWarnings("unchecked")
@Override
protected void initControls()
{
ItemStack card = getActiveCard();
if ((card == null && prevCard == null && initialized) || (card!=null && card.equals(prevCard)))
return;
initialized = true;
int h = fontRendererObj.FONT_HEIGHT + 1;
buttonList.clear();
prevCard = card;
//labels
buttonList.add(new IconButton(ID_LABELS, guiLeft + 83 , guiTop + 42, 16, 16, TEXTURE_LOCATION, 192-16, getIconLabelsTopOffset(container.panel.getShowLabels())));
//slope
buttonList.add(new IconButton(ID_SLOPE, guiLeft + 83 + 17*1, guiTop + 42, 16, 16, TEXTURE_LOCATION, 192, 15));
//colors
buttonList.add(new IconButton(ID_COLORS, guiLeft + 83 + 17*2, guiTop + 42, 16, 16, TEXTURE_LOCATION, 192, 15 + 16));
//power
buttonList.add(new IconButton(ID_POWER, guiLeft + 83 + 17*3, guiTop + 42, 16, 16, TEXTURE_LOCATION, 192-16,
getIconPowerTopOffset(((TileEntityAdvancedInfoPanel)container.panel).getPowerMode())));
if (card != null && card.getItem() instanceof IPanelDataSource)
{
byte slot = container.panel.getIndexOfCard(card);
IPanelDataSource source = (IPanelDataSource)card.getItem();
if (source instanceof IAdvancedCardSettings)
{
//settings
buttonList.add(new IconButton(ID_SETTINGS, guiLeft + 83 + 17*4, guiTop + 42, 16, 16, TEXTURE_LOCATION, 192, 15 + 16*2));
}
int row = 0;
List<PanelSetting> settingsList = null;
if (card.getItem() instanceof IPanelMultiCard)
{
settingsList = ((IPanelMultiCard)source).getSettingsList(new CardWrapperImpl(card, activeTab));
}
else
{
settingsList = source.getSettingsList();
}
if (settingsList != null)
for (PanelSetting panelSetting : settingsList)
{
buttonList.add(new GuiInfoPanelCheckBox(0, guiLeft + 32, guiTop + 60 + h*row, panelSetting, container.panel, slot, fontRendererObj));
row++;
}
if (!modified)
{
textboxTitle = new GuiTextField(fontRendererObj, 7, 16, 162, 18);
textboxTitle.setFocused(true);
textboxTitle.setText(new CardWrapperImpl(card, activeTab).getTitle());
}
}
else
{
modified = false;
textboxTitle = null;
}
}
@Override
protected ItemStack getActiveCard()
{
return container.panel.getCards().get(activeTab);
}
@Override
public void setWorldAndResolution(net.minecraft.client.Minecraft par1Minecraft, int par2, int par3)
{
initialized = false;
super.setWorldAndResolution(par1Minecraft, par2, par3);
}
private int getIconLabelsTopOffset(boolean checked)
{
return checked?15:31;
}
private int getIconPowerTopOffset(byte mode)
{
switch (mode)
{
case TileEntityAdvancedInfoPanel.POWER_REDSTONE:
return 15 + 16*2;
case TileEntityAdvancedInfoPanel.POWER_INVERTED:
return 15 + 16*3;
case TileEntityAdvancedInfoPanel.POWER_ON:
return 15 + 16*4;
case TileEntityAdvancedInfoPanel.POWER_OFF:
return 15 + 16*5;
}
return 15 + 16*2;
}
@Override
protected void actionPerformed(GuiButton button){
switch(button.id){
case ID_COLORS:
GuiScreen colorGui = new GuiScreenColor(this, container.panel);
mc.displayGuiScreen(colorGui);
break;
case ID_SETTINGS:
ItemStack card = getActiveCard();
if (card == null) return;
if (card != null && card.getItem() instanceof IAdvancedCardSettings){
ICardWrapper helper = new CardWrapperImpl(card, activeTab);
Object guiObject = ((IAdvancedCardSettings)card.getItem()).getSettingsScreen(helper);
if (!(guiObject instanceof GuiScreen)){
IC2NuclearControl.logger.warn("Invalid card, getSettingsScreen method should return GuiScreen object");
return;
}
GuiScreen gui = (GuiScreen)guiObject;
ICardSettingsWrapper wrapper = new CardSettingsWrapperImpl(card, container.panel, this, activeTab);
((ICardGui)gui).setCardSettingsHelper(wrapper);
mc.displayGuiScreen(gui);
}
break;
case ID_LABELS:
boolean checked = !container.panel.getShowLabels();
if (button instanceof IconButton){
IconButton iButton = (IconButton)button;
iButton.textureTop = getIconLabelsTopOffset(checked);
}
int value = checked ? -1 : -2;
container.panel.setShowLabels(checked);
((NetworkManager)IC2.network.get()).initiateClientTileEntityEvent(container.panel, value);
break;
case ID_POWER:
byte mode = ((TileEntityAdvancedInfoPanel)container.panel).getNextPowerMode();
if(button instanceof IconButton){
IconButton iButton = (IconButton)button;
iButton.textureTop = getIconPowerTopOffset(mode);
}
((NetworkManager)IC2.network.get()).initiateClientTileEntityEvent(container.panel, mode);
break;
case ID_SLOPE:
GuiPanelSlope slopeGui = new GuiPanelSlope(this, (TileEntityAdvancedInfoPanel)container.panel);
mc.displayGuiScreen(slopeGui);
break;
}
}
@Override
protected void mouseClicked(int x, int y, int par3)
{
super.mouseClicked(x, y, par3);
if (x >= guiLeft+7 && x <= guiLeft + 24 && y >= guiTop + 62 && y <= guiTop + 104)
{
byte newTab = (byte) ((y - guiTop - 62) / 14);
if (newTab > 2)
newTab = 2;
if (newTab != activeTab && modified)
updateTitle();
activeTab = newTab;
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableColumn;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* Ingest job settings panel for hash lookup file ingest modules.
*/
public final class HashLookupModuleSettingsPanel extends IngestModuleIngestJobSettingsPanel implements PropertyChangeListener {
private final HashDbManager hashDbManager = HashDbManager.getInstance();
private final List<HashSetModel> knownHashSetModels = new ArrayList<>();
private final HashSetsTableModel knownHashSetsTableModel = new HashSetsTableModel(knownHashSetModels);
private final List<HashSetModel> knownBadHashSetModels = new ArrayList<>();
private final HashSetsTableModel knownBadHashSetsTableModel = new HashSetsTableModel(knownBadHashSetModels);
HashLookupModuleSettingsPanel(HashLookupModuleSettings settings) {
initializeHashSetModels(settings);
initComponents();
customizeComponents(settings);
}
private void initializeHashSetModels(HashLookupModuleSettings settings) {
initializeHashSetModels(settings, hashDbManager.getKnownFileHashSets(), knownHashSetModels);
initializeHashSetModels(settings, hashDbManager.getKnownBadFileHashSets(), knownBadHashSetModels);
}
private void initializeHashSetModels(HashLookupModuleSettings settings, List<HashDb> hashDbs, List<HashSetModel> hashSetModels) {
hashSetModels.clear();
for (HashDb db : hashDbs) {
String name = db.getHashSetName();
hashSetModels.add(new HashSetModel(name, settings.isHashSetEnabled(name), isHashDbIndexed(db)));
}
}
private void customizeComponents(HashLookupModuleSettings settings) {
customizeHashSetsTable(jScrollPane1, knownHashTable, knownHashSetsTableModel);
customizeHashSetsTable(jScrollPane2, knownBadHashTable, knownBadHashSetsTableModel);
alwaysCalcHashesCheckbox.setSelected(settings.shouldCalculateHashes());
hashDbManager.addPropertyChangeListener(this);
alwaysCalcHashesCheckbox.setText("<html>" + org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text") + "</html>"); // NOI18N NON-NLS
}
private void customizeHashSetsTable(JScrollPane scrollPane, JTable table, HashSetsTableModel tableModel) {
table.setModel(tableModel);
table.setTableHeader(null);
table.setRowSelectionAllowed(false);
final int width1 = scrollPane.getPreferredSize().width;
knownHashTable.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN);
TableColumn column;
for (int i = 0; i < table.getColumnCount(); i++) {
column = table.getColumnModel().getColumn(i);
if (i == 0) {
column.setPreferredWidth(((int) (width1 * 0.07)));
} else {
column.setPreferredWidth(((int) (width1 * 0.92)));
}
}
}
@Override
public void propertyChange(PropertyChangeEvent event) {
if (event.getPropertyName().equals(HashDbManager.SetEvt.DB_ADDED.name())
|| event.getPropertyName().equals(HashDbManager.SetEvt.DB_DELETED.name())
|| event.getPropertyName().equals(HashDbManager.SetEvt.DB_INDEXED.name())) {
update();
}
}
@Override
public IngestModuleIngestJobSettings getSettings() {
List<String> enabledKnownHashSetNames = new ArrayList<>();
List<String> disabledKnownHashSetNames = new ArrayList<>();
List<String> enabledKnownBadHashSetNames = new ArrayList<>();
List<String> disabledKnownBadHashSetNames = new ArrayList<>();
getHashSetNames(knownHashSetModels, enabledKnownHashSetNames, disabledKnownHashSetNames);
getHashSetNames(knownBadHashSetModels, enabledKnownBadHashSetNames, disabledKnownBadHashSetNames);
return new HashLookupModuleSettings(alwaysCalcHashesCheckbox.isSelected(),
enabledKnownHashSetNames, enabledKnownBadHashSetNames,
disabledKnownHashSetNames, disabledKnownBadHashSetNames);
}
private void getHashSetNames(List<HashSetModel> hashSetModels, List<String> enabledHashSetNames, List<String> disabledHashSetNames) {
for (HashSetModel model : hashSetModels) {
if (model.isEnabled() && model.isIndexed()) {
enabledHashSetNames.add(model.getName());
} else {
disabledHashSetNames.add(model.getName());
}
}
}
void update() {
updateHashSetModels();
knownHashSetsTableModel.fireTableDataChanged();
knownBadHashSetsTableModel.fireTableDataChanged();
}
private void updateHashSetModels() {
updateHashSetModels(hashDbManager.getKnownFileHashSets(), knownHashSetModels);
updateHashSetModels(hashDbManager.getKnownBadFileHashSets(), knownBadHashSetModels);
}
void updateHashSetModels(List<HashDb> hashDbs, List<HashSetModel> hashSetModels) {
Map<String, HashDb> hashSetDbs = new HashMap<>();
for (HashDb db : hashDbs) {
hashSetDbs.put(db.getHashSetName(), db);
}
// Update the hash sets and detect deletions.
List<HashSetModel> deletedHashSetModels = new ArrayList<>();
for (HashSetModel model : hashSetModels) {
String hashSetName = model.getName();
if (hashSetDbs.containsKey(hashSetName)) {
HashDb db = hashSetDbs.get(hashSetName);
model.setIndexed(isHashDbIndexed(db));
hashSetDbs.remove(hashSetName);
} else {
deletedHashSetModels.add(model);
}
}
// Remove the deleted hash sets.
for (HashSetModel model : deletedHashSetModels) {
hashSetModels.remove(model);
}
// Add any new hash sets. All new sets are enabled by default.
for (HashDb db : hashSetDbs.values()) {
String name = db.getHashSetName();
hashSetModels.add(new HashSetModel(name, true, isHashDbIndexed(db)));
}
}
void reset(HashLookupModuleSettings newSettings) {
initializeHashSetModels(newSettings);
knownHashSetsTableModel.fireTableDataChanged();
knownBadHashSetsTableModel.fireTableDataChanged();
}
private boolean isHashDbIndexed(HashDb hashDb) {
boolean indexed = false;
try {
indexed = hashDb.hasIndex();
} catch (TskCoreException ex) {
Logger.getLogger(HashLookupModuleSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting indexed status info for hash set (name = " + hashDb.getHashSetName() + ")", ex); //NON-NLS
}
return indexed;
}
private static final class HashSetModel {
private final String name;
private boolean indexed;
private boolean enabled;
HashSetModel(String name, boolean enabled, boolean indexed) {
this.name = name;
this.enabled = enabled;
this.indexed = indexed;
}
String getName() {
return name;
}
void setEnabled(boolean enabled) {
this.enabled = enabled;
}
boolean isEnabled() {
return enabled;
}
void setIndexed(boolean indexed) {
this.indexed = indexed;
}
boolean isIndexed() {
return indexed;
}
}
private static final class HashSetsTableModel extends AbstractTableModel {
private final List<HashSetModel> hashSets;
HashSetsTableModel(List<HashSetModel> hashSets) {
this.hashSets = hashSets;
}
@Override
public int getRowCount() {
return hashSets.size();
}
@Override
public int getColumnCount() {
return 2;
}
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
if (columnIndex == 0) {
return hashSets.get(rowIndex).isEnabled();
} else {
return hashSets.get(rowIndex).getName();
}
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
return (columnIndex == 0 && hashSets.get(rowIndex).isIndexed());
}
@Override
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
if (columnIndex == 0) {
hashSets.get(rowIndex).setEnabled((Boolean) aValue);
}
}
@Override
public Class<?> getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPane1 = new javax.swing.JScrollPane();
knownHashTable = new javax.swing.JTable();
knownBadHashDbsLabel = new javax.swing.JLabel();
knownHashDbsLabel = new javax.swing.JLabel();
alwaysCalcHashesCheckbox = new javax.swing.JCheckBox();
jScrollPane2 = new javax.swing.JScrollPane();
knownBadHashTable = new javax.swing.JTable();
setPreferredSize(new java.awt.Dimension(292, 150));
jScrollPane1.setBorder(javax.swing.BorderFactory.createEtchedBorder());
knownHashTable.setBackground(new java.awt.Color(240, 240, 240));
knownHashTable.setShowHorizontalLines(false);
knownHashTable.setShowVerticalLines(false);
jScrollPane1.setViewportView(knownHashTable);
knownBadHashDbsLabel.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.knownBadHashDbsLabel.text")); // NOI18N
knownHashDbsLabel.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.knownHashDbsLabel.text")); // NOI18N
alwaysCalcHashesCheckbox.setText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.text")); // NOI18N
alwaysCalcHashesCheckbox.setToolTipText(org.openide.util.NbBundle.getMessage(HashLookupModuleSettingsPanel.class, "HashLookupModuleSettingsPanel.alwaysCalcHashesCheckbox.toolTipText")); // NOI18N
alwaysCalcHashesCheckbox.setMaximumSize(new java.awt.Dimension(290, 35));
alwaysCalcHashesCheckbox.setMinimumSize(new java.awt.Dimension(290, 35));
alwaysCalcHashesCheckbox.setPreferredSize(new java.awt.Dimension(271, 35));
alwaysCalcHashesCheckbox.setVerticalAlignment(javax.swing.SwingConstants.TOP);
alwaysCalcHashesCheckbox.setVerticalTextPosition(javax.swing.SwingConstants.TOP);
jScrollPane2.setBorder(javax.swing.BorderFactory.createEtchedBorder());
knownBadHashTable.setBackground(new java.awt.Color(240, 240, 240));
knownBadHashTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
}
));
knownBadHashTable.setShowHorizontalLines(false);
knownBadHashTable.setShowVerticalLines(false);
jScrollPane2.setViewportView(knownBadHashTable);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(knownHashDbsLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 272, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(knownBadHashDbsLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGap(10, 10, 10)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)))
.addComponent(alwaysCalcHashesCheckbox, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(2, 2, 2)
.addComponent(knownHashDbsLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 58, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(knownBadHashDbsLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 58, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(alwaysCalcHashesCheckbox, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JCheckBox alwaysCalcHashesCheckbox;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JLabel knownBadHashDbsLabel;
private javax.swing.JTable knownBadHashTable;
private javax.swing.JLabel knownHashDbsLabel;
private javax.swing.JTable knownHashTable;
// End of variables declaration//GEN-END:variables
}
| |
/*****************************************************************
<copyright>
Morozko Java Library
Copyright (c) 2007 Morozko
All rights reserved. This program and the accompanying materials
are made available under the terms of the Apache License v2.0
which accompanies this distribution, and is available at
http://www.apache.org/licenses/
(txt version : http://www.apache.org/licenses/LICENSE-2.0.txt
html version : http://www.apache.org/licenses/LICENSE-2.0.html)
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
</copyright>
*****************************************************************/
/*
* @(#)StreamIO.java
*
* @project : org.morozko.java.core
* @package : org.morozko.java.core.io
* @creation : 13/lug/07
* @release : xxxx.xx.xx
*/
package org.morozko.java.core.io;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import org.morozko.java.core.lang.helpers.ClassHelper;
/**
* <p>/p>
*
* @author Morozko
*
*/
public class StreamIO {
public static final String PATH_CLASSLOADER = "cl://";
public static final String PATH_JNDI = "jndi://";
public static final String PATH_FILE = "file://";
public static InputStream resolveStream( String path ) throws Exception {
return resolveStream( path, null );
}
public static InputStream resolveStream( String path, String basePath ) throws Exception {
return resolveStream( path, basePath, StreamIO.class );
}
public static InputStream resolveStream( String path, String basePath, Class c ) throws Exception {
InputStream is = null;
if ( path.indexOf( PATH_CLASSLOADER ) == 0 ) {
// class loader
path = path.substring( PATH_CLASSLOADER.length() );
is = ClassHelper.getResourceStream(path, c);
} else {
// default : file
if ( path.indexOf( PATH_FILE ) == 0 ) {
path = path.substring( PATH_FILE.length() );
}
File f = new File( path );
if ( !f.exists() ) {
f = new File( basePath, path );
}
if ( !f.exists() ) {
throw ( new FileNotFoundException( f.getAbsolutePath() ) );
} else {
is = new FileInputStream( f );
}
}
return is;
}
public static String readString( InputStream is ) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
pipeStream( is , baos, MODE_CLOSE_BOTH);
return baos.toString();
}
public static byte[] readBytes( Reader r ) throws IOException {
StringWriter w = new StringWriter();
pipeChar( r , w, MODE_CLOSE_BOTH);
return w.toString().getBytes();
}
public static String readString( Reader r ) throws IOException {
StringWriter w = new StringWriter();
pipeChar( r , w, MODE_CLOSE_BOTH);
return w.toString();
}
public static byte[] readBytes( InputStream is ) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
pipeStream( is , baos, MODE_CLOSE_BOTH);
return baos.toByteArray();
}
/**
* <p>Riversa un Reader in un Writer.</p>
*
* @param src la sorgente
* @param dst la destinazione
* @param mode la modalita di riversamento
* @param buffer le dimensioni del buffer
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeChar(Reader src, Writer dst, int mode, int buffer) throws IOException {
return ((new StreamIO(mode, buffer)).pipe(src, dst));
}
/**
* <p>Riversa un Reader in un Writer (Viene usato il buffer di
* default).</p>
*
* @param src la sorgente
* @param dst la destinazione
* @param mode la modalita di riversamento
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeChar(Reader src, Writer dst, int mode) throws IOException {
return pipeChar(src, dst, mode, BUFFERSIZE_DEFAULT);
}
/**
* <p>Riversa un Reader in un Writer (Viene usato il buffer di
* default e non viene chiuso nessuno dei due stream).</p>
*
* @param src la sorgente
* @param dst la destinazione
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeChar(Reader src, Writer dst) throws IOException {
return pipeChar(src, dst, MODE_CLOSE_NONE, BUFFERSIZE_DEFAULT);
}
/**
* <p>Riversa un InputStream in un OutputStream.</p>
*
* @param src la sorgente
* @param dst la destinazione
* @param mode la modalita di riversamento
* @param buffer le dimensioni del buffer
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeStream(InputStream src, OutputStream dst, int mode, int buffer) throws IOException {
return ((new StreamIO(mode, buffer)).pipe(src, dst));
}
/**
* <p>Riversa un InputStream in un OutputStream (Viene usato il buffer di
* default).</p>
*
* @param src la sorgente
* @param dst la destinazione
* @param mode la modalita di riversamento
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeStream(InputStream src, OutputStream dst, int mode) throws IOException {
return pipeStream(src, dst, mode, BUFFERSIZE_DEFAULT);
}
/**
* <p>Riversa un InputStream in un OutputStream (Viene usato il buffer di
* default e non viene chiuso nessuno dei due stream).</p>
*
* @param src la sorgente
* @param dst la destinazione
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public static int pipeStream(InputStream src, OutputStream dst) throws IOException {
return pipeStream(src, dst, MODE_CLOSE_NONE, BUFFERSIZE_DEFAULT);
}
/**
* <p>Modalita che lascia aperti entrambi gli stream alla fine dell'operazione.</p>
*/
public static final int MODE_CLOSE_NONE = 0;
/**
* <p>Modalita che chiude entrambi gli stream alla fine dell'operazione.</p>
*/
public static final int MODE_CLOSE_BOTH = 4;
/**
* <p>Modalita che chiude il solo stream di input alla fine dell'operazione.</p>
*/
public static final int MODE_CLOSE_IN_ONLY = 1;
/**
* <p>Modalita che chiude il solo stream di output alla fine dell'operazione.</p>
*/
public static final int MODE_CLOSE_OUT_ONLY = 2;
/**
* <p>Buffer di piccole dimensioni.</p>
*/
public static final int BUFFERSIZE_LOW = 512;
/**
* <p>Buffer di medie dimensioni.</p>
*/
public static final int BUFFERSIZE_MEDIUM = 1024;
/**
* <p>Buffer di grandi dimensioni.</p>
*/
public static final int BUFFERSIZE_HIGH = 2048;
/**
* <p>Buffer di dimensioni predefinite.</p>
*/
public static final int BUFFERSIZE_DEFAULT = BUFFERSIZE_MEDIUM;
/**
* <p>Nessun buffer.</p>
*/
public static final int BUFFERSIZE_NOBUFFER = 1;
public int mode; // la modalita di riversamento
public int bufferSize; // le dimensioni del buffer di lettura usato
/**
* <p>Restituisce il valore di mode.</p>
*
* @return il valore di mode.
*/
public int getMode() {
return mode;
}
/**
* <p>Restituisce il valore di bufferSize.</p>
*
* @return il valore di bufferSize.
*/
public int getBufferSize() {
return bufferSize;
}
/**
* <p>Crea un nuovo StreamIO</p>
*
*
*/
public StreamIO(int mode, int bufferSize) {
this.mode = mode;
this.bufferSize = bufferSize;
}
/**
* <p>Riversa il contenuto di un InputStream in un OutputStream.</p>
*
* @param src la sorgente
* @param dst la destinazione
* @return il numero di byte effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public int pipe(InputStream src, OutputStream dst) throws IOException {
int result = 0;
byte[] buffer = new byte[bufferSize];
int read = src.read(buffer);
while (read>0) {
dst.write(buffer, 0, read);
result+=read;
read = src.read(buffer);
}
if (this.isModeCloseIn()) {
src.close();
}
if (this.isModeCloseOut()) {
dst.close();
}
return result;
}
/**
* <p>Riversa il contenuto di un Reader in un Writer.</p>
*
* @param src la sorgente
* @param dst la destinazione
* @return il numero di char effettivamente riversati
* @throws IOException se si verificano problemi durante la scrittura
*/
public int pipe(Reader src, Writer dst) throws IOException {
int result = 0;
char[] buffer = new char[bufferSize];
int read = src.read(buffer);
while (read>0) {
dst.write(buffer, 0, read);
result+=read;
read = src.read(buffer);
}
if (this.isModeCloseIn()) {
src.close();
}
if (this.isModeCloseOut()) {
dst.close();
}
return result;
}
public boolean isModeCloseNone() {
return this.mode==MODE_CLOSE_NONE;
}
public boolean isModeCloseBoth() {
return this.mode==MODE_CLOSE_BOTH;
}
public boolean isModeCloseOut() {
return this.mode==MODE_CLOSE_OUT_ONLY || this.mode==MODE_CLOSE_BOTH;
}
public boolean isModeCloseIn() {
return this.mode==MODE_CLOSE_IN_ONLY || this.mode==MODE_CLOSE_BOTH;
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.util;
import com.google.common.base.Objects;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
import static org.apache.commons.lang3.ArrayUtils.reverse;
/**
* Immutable sequence of bytes, assumed to represent a value in
* {@link ByteOrder#BIG_ENDIAN BIG_ENDIAN} order.
* <p>
* Sequences can be created copying from an already existing representation of a
* sequence of bytes, such as {@link ByteBuffer} or {@code byte[]}; or by
* copying bytes from a primitive data type, such as {@code long}, {@code int}
* or {@code short}. In the first case, bytes are assumed to be already given in
* big-endian order, while in the second case big-endianness is enforced by this
* class.
*/
public final class ImmutableByteSequence {
/*
Actual bytes are backed by a byte buffer.
The order of a newly-created byte buffer is always BIG_ENDIAN.
*/
private ByteBuffer value;
/**
* Private constructor.
* Creates a new byte sequence object backed by the passed ByteBuffer.
*
* @param value a byte buffer
*/
private ImmutableByteSequence(ByteBuffer value) {
this.value = value;
// Rewind buffer so it's ready to be read.
// No write operation should be performed on it from now on.
this.value.rewind();
}
/**
* Creates a new immutable byte sequence with the same content and order of
* the passed byte array.
*
* @param original a byte array value
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(byte[] original) {
checkArgument(original != null && original.length > 0,
"Cannot copy from an empty or null array");
return new ImmutableByteSequence(
ByteBuffer.allocate(original.length).put(original));
}
/**
* Creates a new immutable byte sequence with the same content and order of
* the passed byte array, from/to the given indexes (inclusive).
*
* @param original a byte array value
* @param fromIdx starting index
* @param toIdx ending index
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(byte[] original, int fromIdx, int toIdx) {
checkArgument(original != null && original.length > 0,
"Cannot copy from an empty or null array");
checkArgument(toIdx >= fromIdx && toIdx < original.length, "invalid indexes");
ByteBuffer buffer = ByteBuffer.allocate((toIdx - fromIdx) + 1);
for (int i = fromIdx; i <= toIdx; i++) {
buffer.put(original[i]);
}
return new ImmutableByteSequence(buffer);
}
/**
* Creates a new immutable byte sequence copying bytes from the given
* ByteBuffer {@link ByteBuffer}. If the byte buffer order is not big-endian
* bytes will be copied in reverse order.
*
* @param original a byte buffer
* @return a new byte buffer object
*/
public static ImmutableByteSequence copyFrom(ByteBuffer original) {
checkArgument(original != null && original.capacity() > 0,
"Cannot copy from an empty or null byte buffer");
byte[] bytes = new byte[original.capacity()];
// copy bytes from original buffer
original.rewind();
original.get(bytes);
if (original.order() == ByteOrder.LITTLE_ENDIAN) {
// FIXME: this can be improved, e.g. read bytes in reverse order from original
reverse(bytes);
}
return new ImmutableByteSequence(ByteBuffer.wrap(bytes));
}
/**
* Creates a new byte sequence of 8 bytes containing the given long value.
*
* @param original a long value
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(long original) {
return new ImmutableByteSequence(
ByteBuffer.allocate(Long.BYTES).putLong(original));
}
/**
* Creates a new byte sequence of 4 bytes containing the given int value.
*
* @param original an int value
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(int original) {
return new ImmutableByteSequence(
ByteBuffer.allocate(Integer.BYTES).putInt(original));
}
/**
* Creates a new byte sequence of 2 bytes containing the given short value.
*
* @param original a short value
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(short original) {
return new ImmutableByteSequence(
ByteBuffer.allocate(Short.BYTES).putShort(original));
}
/**
* Creates a new byte sequence of 1 byte containing the given value.
*
* @param original a byte value
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence copyFrom(byte original) {
return new ImmutableByteSequence(
ByteBuffer.allocate(Byte.BYTES).put(original));
}
/**
* Creates a new byte sequence of the given size where alla bits are 0.
*
* @param size number of bytes
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence ofZeros(int size) {
byte[] bytes = new byte[size];
Arrays.fill(bytes, (byte) 0);
return new ImmutableByteSequence(ByteBuffer.wrap(bytes));
}
/**
* Creates a new byte sequence of the given size where alla bits are 1.
*
* @param size number of bytes
* @return a new immutable byte sequence
*/
public static ImmutableByteSequence ofOnes(int size) {
byte[] bytes = new byte[size];
Arrays.fill(bytes, (byte) 0xFF);
return new ImmutableByteSequence(ByteBuffer.wrap(bytes));
}
/**
* Returns a view of this sequence as a read-only {@link ByteBuffer}.
* <p>
* The returned buffer will have position 0, while limit and capacity will
* be set to this sequence {@link #size()}. The buffer order will be
* big-endian.
*
* @return a read-only byte buffer
*/
public ByteBuffer asReadOnlyBuffer() {
// position, limit and capacity set rewind at constructor
return value.asReadOnlyBuffer();
}
/**
* Gets the number of bytes in this sequence.
*
* @return an integer value
*/
public int size() {
return this.value.capacity();
}
/**
* Creates a new byte array view of this sequence.
*
* @return a new byte array
*/
public byte[] asArray() {
ByteBuffer bb = asReadOnlyBuffer();
byte[] bytes = new byte[size()];
bb.get(bytes);
return bytes;
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final ImmutableByteSequence other = (ImmutableByteSequence) obj;
return Objects.equal(this.value, other.value);
}
@Override
public String toString() {
return HexString.toHexString(value.array());
}
/**
* Trims or expands the given byte sequence so to fit a given bit-width. When trimming, the
* operations is deemed to be safe only if the trimmed bits are zero, otherwise an exception
* will be thrown. When expanding, the sequence will be padded with zeros. The returned byte
* sequence will have minimum size to contain the given bit-width.
*
* @param original a byte sequence
* @param bitWidth a non-zero positive integer
* @return a new byte sequence
* @throws ByteSequenceTrimException if the byte sequence cannot be fitted
*/
public static ImmutableByteSequence fit(ImmutableByteSequence original, int bitWidth)
throws ByteSequenceTrimException {
checkNotNull(original, "byte sequence cannot be null");
checkArgument(bitWidth > 0, "bit-width must be a non-zero positive integer");
int newByteWidth = (int) Math.ceil((double) bitWidth / 8);
byte[] originalBytes = original.asArray();
if (newByteWidth > original.size()) {
// pad missing bytes with zeros
return ImmutableByteSequence.copyFrom(Arrays.copyOf(originalBytes, newByteWidth));
}
byte[] newBytes = new byte[newByteWidth];
// ImmutableByteSequence is always big-endian, hence check the array in reverse order
int diff = originalBytes.length - newByteWidth;
for (int i = originalBytes.length - 1; i >= 0; i--) {
byte ob = originalBytes[i]; // original byte
byte nb; // new byte
if (i > diff) {
// no need to truncate, copy as is
nb = ob;
} else if (i == diff) {
// truncate this byte, check if we're loosing something
byte mask = (byte) ((1 >> ((bitWidth % 8) + 1)) - 1);
if ((ob & ~mask) != 0) {
throw new ByteSequenceTrimException(originalBytes, bitWidth);
} else {
nb = (byte) (ob & mask);
}
} else {
// drop this byte, check if we're loosing something
if (originalBytes[i] != 0) {
throw new ByteSequenceTrimException(originalBytes, bitWidth);
} else {
continue;
}
}
newBytes[i - diff] = nb;
}
return ImmutableByteSequence.copyFrom(newBytes);
}
/**
* Signals that a byte sequence cannot be trimmed.
*/
public static class ByteSequenceTrimException extends Exception {
ByteSequenceTrimException(byte[] bytes, int bitWidth) {
super(format("cannot trim %s into a %d long bits value",
HexString.toHexString(bytes), bitWidth));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mongodb;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.WriteResult;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.impl.DefaultExchange;
import org.apache.camel.impl.DefaultMessage;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@UriEndpoint(scheme = "mongodb", syntax = "mongodb:connectionBean", consumerClass = MongoDbTailableCursorConsumer.class, label = "database,nosql")
public class MongoDbEndpoint extends DefaultEndpoint {
private static final Logger LOG = LoggerFactory.getLogger(MongoDbEndpoint.class);
private Mongo mongoConnection;
@UriPath @Metadata(required = "true")
private String connectionBean;
@UriParam
private String database;
@UriParam
private String collection;
@UriParam
private String collectionIndex;
@UriParam
private MongoDbOperation operation;
@UriParam(defaultValue = "true")
private boolean createCollection = true;
@UriParam
private boolean invokeGetLastError;
@UriParam
private WriteConcern writeConcern;
private WriteConcern writeConcernRef;
@UriParam
private ReadPreference readPreference;
@UriParam
private boolean dynamicity;
@UriParam
private boolean writeResultAsHeader;
// tailable cursor consumer by default
private MongoDbConsumerType consumerType;
@UriParam(defaultValue = "1000")
private long cursorRegenerationDelay = 1000L;
@UriParam
private String tailTrackIncreasingField;
// persitent tail tracking
@UriParam
private boolean persistentTailTracking;
@UriParam
private String persistentId;
@UriParam
private String tailTrackDb;
@UriParam
private String tailTrackCollection;
@UriParam
private String tailTrackField;
private MongoDbTailTrackingConfig tailTrackingConfig;
private DBCollection dbCollection;
private DB db;
// ======= Constructors ===============================================
public MongoDbEndpoint() {
}
public MongoDbEndpoint(String uri, MongoDbComponent component) {
super(uri, component);
}
@SuppressWarnings("deprecation")
public MongoDbEndpoint(String endpointUri) {
super(endpointUri);
}
// ======= Implementation methods =====================================
public Producer createProducer() throws Exception {
validateOptions('P');
initializeConnection();
return new MongoDbProducer(this);
}
public Consumer createConsumer(Processor processor) throws Exception {
validateOptions('C');
// we never create the collection
createCollection = false;
initializeConnection();
// select right consumer type
if (consumerType == null) {
consumerType = MongoDbConsumerType.tailable;
}
Consumer consumer;
if (consumerType == MongoDbConsumerType.tailable) {
consumer = new MongoDbTailableCursorConsumer(this, processor);
} else {
throw new CamelMongoDbException("Consumer type not supported: " + consumerType);
}
configureConsumer(consumer);
return consumer;
}
private void validateOptions(char role) throws IllegalArgumentException {
// make our best effort to validate, options with defaults are checked against their defaults, which is not always a guarantee that
// they haven't been explicitly set, but it is enough
if (role == 'P') {
if (!ObjectHelper.isEmpty(consumerType) || persistentTailTracking || !ObjectHelper.isEmpty(tailTrackDb)
|| !ObjectHelper.isEmpty(tailTrackCollection) || !ObjectHelper.isEmpty(tailTrackField) || cursorRegenerationDelay != 1000L) {
throw new IllegalArgumentException("consumerType, tailTracking, cursorRegenerationDelay options cannot appear on a producer endpoint");
}
} else if (role == 'C') {
if (!ObjectHelper.isEmpty(operation) || !ObjectHelper.isEmpty(writeConcern) || writeConcernRef != null
|| dynamicity || invokeGetLastError) {
throw new IllegalArgumentException("operation, writeConcern, writeConcernRef, dynamicity, invokeGetLastError "
+ "options cannot appear on a consumer endpoint");
}
if (consumerType == MongoDbConsumerType.tailable) {
if (tailTrackIncreasingField == null) {
throw new IllegalArgumentException("tailTrackIncreasingField option must be set for tailable cursor MongoDB consumer endpoint");
}
if (persistentTailTracking && (ObjectHelper.isEmpty(persistentId))) {
throw new IllegalArgumentException("persistentId is compulsory for persistent tail tracking");
}
}
} else {
throw new IllegalArgumentException("Unknown endpoint role");
}
}
public boolean isSingleton() {
return true;
}
/**
* Initialises the MongoDB connection using the Mongo object provided to the endpoint
*
* @throws CamelMongoDbException
*/
public void initializeConnection() throws CamelMongoDbException {
LOG.info("Initialising MongoDb endpoint: {}", this.toString());
if (database == null || (collection == null && !(MongoDbOperation.getDbStats.equals(operation) || MongoDbOperation.command.equals(operation)))) {
throw new CamelMongoDbException("Missing required endpoint configuration: database and/or collection");
}
db = mongoConnection.getDB(database);
if (db == null) {
throw new CamelMongoDbException("Could not initialise MongoDbComponent. Database " + database + " does not exist.");
}
if (collection != null) {
if (!createCollection && !db.collectionExists(collection)) {
throw new CamelMongoDbException("Could not initialise MongoDbComponent. Collection " + collection + " and createCollection is false.");
}
dbCollection = db.getCollection(collection);
LOG.debug("MongoDb component initialised and endpoint bound to MongoDB collection with the following parameters. Address list: {}, Db: {}, Collection: {}",
new Object[]{mongoConnection.getAllAddress().toString(), db.getName(), dbCollection.getName()});
try {
if (ObjectHelper.isNotEmpty(collectionIndex)) {
ensureIndex(dbCollection, createIndex());
}
} catch (Exception e) {
throw new CamelMongoDbException("Error creating index", e);
}
}
}
/**
* Add Index
*
* @param collection
*/
public void ensureIndex(DBCollection collection, List<DBObject> dynamicIndex) {
if (dynamicIndex != null && !dynamicIndex.isEmpty()) {
for (DBObject index : dynamicIndex) {
LOG.debug("create BDObject Index {}", index);
collection.createIndex(index);
}
}
}
/**
* Create technical list index
*
* @return technical list index
*/
@SuppressWarnings("unchecked")
public List<DBObject> createIndex() throws Exception {
List<DBObject> indexList = new ArrayList<DBObject>();
if (ObjectHelper.isNotEmpty(collectionIndex)) {
HashMap<String, String> indexMap = new ObjectMapper().readValue(collectionIndex, HashMap.class);
for (Map.Entry<String, String> set : indexMap.entrySet()) {
DBObject index = new BasicDBObject();
// MongoDB 2.4 upwards is restrictive about the type of the 'single field index' being
// in use below (set.getValue())) as only an integer value type is accepted, otherwise
// server will throw an exception, see more details:
// http://docs.mongodb.org/manual/release-notes/2.4/#improved-validation-of-index-types
index.put(set.getKey(), set.getValue());
indexList.add(index);
}
}
return indexList;
}
/**
* Applies validation logic specific to this endpoint type. If everything succeeds, continues initialization
*/
@Override
protected void doStart() throws Exception {
if (writeConcern != null && writeConcernRef != null) {
String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
+ ", " + writeConcernRef + ". Aborting initialization.";
throw new IllegalArgumentException(msg);
}
setWriteReadOptionsOnConnection();
super.doStart();
}
public Exchange createMongoDbExchange(DBObject dbObj) {
Exchange exchange = new DefaultExchange(this.getCamelContext(), getExchangePattern());
Message message = new DefaultMessage();
message.setHeader(MongoDbConstants.DATABASE, database);
message.setHeader(MongoDbConstants.COLLECTION, collection);
message.setHeader(MongoDbConstants.FROM_TAILABLE, true);
message.setBody(dbObj);
exchange.setIn(message);
return exchange;
}
private void setWriteReadOptionsOnConnection() {
// Set the WriteConcern
if (writeConcern != null) {
mongoConnection.setWriteConcern(writeConcern);
} else if (writeConcernRef != null) {
mongoConnection.setWriteConcern(writeConcernRef);
}
// Set the ReadPreference
if (readPreference != null) {
mongoConnection.setReadPreference(readPreference);
}
}
// ======= Getters and setters ===============================================
public String getConnectionBean() {
return connectionBean;
}
/**
* Name of {@link com.mongodb.Mongo} to use.
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
/**
* Sets the name of the MongoDB collection to bind to this endpoint
*
* @param collection collection name
*/
public void setCollection(String collection) {
this.collection = collection;
}
public String getCollection() {
return collection;
}
/**
* Sets the collection index (JSON FORMAT : { "field1" : order1, "field2" : order2})
*/
public void setCollectionIndex(String collectionIndex) {
this.collectionIndex = collectionIndex;
}
public String getCollectionIndex() {
return collectionIndex;
}
/**
* Sets the operation this endpoint will execute against MongoDB. For possible values, see {@link MongoDbOperation}.
* @param operation name of the operation as per catalogued values
*
* @throws CamelMongoDbException
*/
public void setOperation(String operation) throws CamelMongoDbException {
try {
this.operation = MongoDbOperation.valueOf(operation);
} catch (IllegalArgumentException e) {
throw new CamelMongoDbException("Operation not supported", e);
}
}
public MongoDbOperation getOperation() {
return operation;
}
/**
* Sets the name of the MongoDB database to target
*
* @param database name of the MongoDB database
*/
public void setDatabase(String database) {
this.database = database;
}
public String getDatabase() {
return database;
}
/**
* Create collection during initialisation if it doesn't exist. Default is true.
*
* @param createCollection true or false
*/
public void setCreateCollection(boolean createCollection) {
this.createCollection = createCollection;
}
public boolean isCreateCollection() {
return createCollection;
}
public DB getDb() {
return db;
}
public DBCollection getDbCollection() {
return dbCollection;
}
/**
* Sets the Mongo instance that represents the backing connection
*
* @param mongoConnection the connection to the database
*/
public void setMongoConnection(Mongo mongoConnection) {
this.mongoConnection = mongoConnection;
}
public Mongo getMongoConnection() {
return mongoConnection;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
* Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
*
* @param writeConcern the standard name of the WriteConcern
* @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
*/
public void setWriteConcern(String writeConcern) {
this.writeConcern = WriteConcern.valueOf(writeConcern);
}
public WriteConcern getWriteConcern() {
return writeConcern;
}
/**
* Instructs this endpoint to invoke {@link WriteResult#getLastError()} with every operation. By default, MongoDB does not wait
* for the write operation to occur before returning. If set to true, each exchange will only return after the write operation
* has actually occurred in MongoDB.
*
* @param invokeGetLastError true or false
*/
public void setInvokeGetLastError(boolean invokeGetLastError) {
this.invokeGetLastError = invokeGetLastError;
}
public boolean isInvokeGetLastError() {
return invokeGetLastError;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
* You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
*
* @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
*/
public void setWriteConcernRef(String writeConcernRef) {
WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
if (wc == null) {
String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
+ "provided bean name (" + writeConcernRef + ") is correct. Aborting initialization.";
throw new IllegalArgumentException(msg);
}
this.writeConcernRef = wc;
}
public WriteConcern getWriteConcernRef() {
return writeConcernRef;
}
/**
* Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
* overridden by this setting.
* <p/>
* The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
* value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
*
* @param readPreference the name of the read preference to set
*/
public void setReadPreference(String readPreference) {
this.readPreference = ReadPreference.valueOf(readPreference);
}
public ReadPreference getReadPreference() {
return readPreference;
}
/**
* Sets whether this endpoint will attempt to dynamically resolve the target database and collection from the incoming Exchange properties.
* Can be used to override at runtime the database and collection specified on the otherwise static endpoint URI.
* It is disabled by default to boost performance. Enabling it will take a minimal performance hit.
*
* @see MongoDbConstants#DATABASE
* @see MongoDbConstants#COLLECTION
* @param dynamicity true or false indicated whether target database and collection should be calculated dynamically based on Exchange properties.
*/
public void setDynamicity(boolean dynamicity) {
this.dynamicity = dynamicity;
}
public boolean isDynamicity() {
return dynamicity;
}
/**
* Reserved for future use, when more consumer types are supported.
*
* @param consumerType key of the consumer type
* @throws CamelMongoDbException
*/
public void setConsumerType(String consumerType) throws CamelMongoDbException {
try {
this.consumerType = MongoDbConsumerType.valueOf(consumerType);
} catch (IllegalArgumentException e) {
throw new CamelMongoDbException("Consumer type not supported", e);
}
}
public MongoDbConsumerType getConsumerType() {
return consumerType;
}
public String getTailTrackDb() {
return tailTrackDb;
}
/**
* Indicates what database the tail tracking mechanism will persist to. If not specified, the current database will
* be picked by default. Dynamicity will not be taken into account even if enabled, i.e. the tail tracking database
* will not vary past endpoint initialisation.
*
* @param tailTrackDb database name
*/
public void setTailTrackDb(String tailTrackDb) {
this.tailTrackDb = tailTrackDb;
}
public String getTailTrackCollection() {
return tailTrackCollection;
}
/**
* Collection where tail tracking information will be persisted. If not specified, {@link MongoDbTailTrackingConfig#DEFAULT_COLLECTION}
* will be used by default.
*
* @param tailTrackCollection collection name
*/
public void setTailTrackCollection(String tailTrackCollection) {
this.tailTrackCollection = tailTrackCollection;
}
public String getTailTrackField() {
return tailTrackField;
}
/**
* Field where the last tracked value will be placed. If not specified, {@link MongoDbTailTrackingConfig#DEFAULT_FIELD}
* will be used by default.
*
* @param tailTrackField field name
*/
public void setTailTrackField(String tailTrackField) {
this.tailTrackField = tailTrackField;
}
/**
* Enable persistent tail tracking, which is a mechanism to keep track of the last consumed message across system restarts.
* The next time the system is up, the endpoint will recover the cursor from the point where it last stopped slurping records.
*
* @param persistentTailTracking true or false
*/
public void setPersistentTailTracking(boolean persistentTailTracking) {
this.persistentTailTracking = persistentTailTracking;
}
public boolean isPersistentTailTracking() {
return persistentTailTracking;
}
/**
* Correlation field in the incoming record which is of increasing nature and will be used to position the tailing cursor every
* time it is generated.
* The cursor will be (re)created with a query of type: tailTrackIncreasingField > lastValue (possibly recovered from persistent
* tail tracking).
* Can be of type Integer, Date, String, etc.
* NOTE: No support for dot notation at the current time, so the field should be at the top level of the document.
*
* @param tailTrackIncreasingField
*/
public void setTailTrackIncreasingField(String tailTrackIncreasingField) {
this.tailTrackIncreasingField = tailTrackIncreasingField;
}
public String getTailTrackIncreasingField() {
return tailTrackIncreasingField;
}
public MongoDbTailTrackingConfig getTailTrackingConfig() {
if (tailTrackingConfig == null) {
tailTrackingConfig = new MongoDbTailTrackingConfig(persistentTailTracking, tailTrackIncreasingField, tailTrackDb == null ? database : tailTrackDb, tailTrackCollection,
tailTrackField, getPersistentId());
}
return tailTrackingConfig;
}
/**
* MongoDB tailable cursors will block until new data arrives. If no new data is inserted, after some time the cursor will be automatically
* freed and closed by the MongoDB server. The client is expected to regenerate the cursor if needed. This value specifies the time to wait
* before attempting to fetch a new cursor, and if the attempt fails, how long before the next attempt is made. Default value is 1000ms.
*
* @param cursorRegenerationDelay delay specified in milliseconds
*/
public void setCursorRegenerationDelay(long cursorRegenerationDelay) {
this.cursorRegenerationDelay = cursorRegenerationDelay;
}
public long getCursorRegenerationDelay() {
return cursorRegenerationDelay;
}
/**
* One tail tracking collection can host many trackers for several tailable consumers.
* To keep them separate, each tracker should have its own unique persistentId.
*
* @param persistentId the value of the persistent ID to use for this tailable consumer
*/
public void setPersistentId(String persistentId) {
this.persistentId = persistentId;
}
public String getPersistentId() {
return persistentId;
}
public boolean isWriteResultAsHeader() {
return writeResultAsHeader;
}
/**
* In write operations, it determines whether instead of returning {@link WriteResult} as the body of the OUT
* message, we transfer the IN message to the OUT and attach the WriteResult as a header.
*
* @param writeResultAsHeader flag to indicate if this option is enabled
*/
public void setWriteResultAsHeader(boolean writeResultAsHeader) {
this.writeResultAsHeader = writeResultAsHeader;
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.firestore.local;
import android.util.SparseArray;
import androidx.annotation.Nullable;
import com.google.firebase.firestore.FirebaseFirestoreSettings;
import com.google.firebase.firestore.core.ListenSequence;
import com.google.firebase.firestore.util.AsyncQueue;
import com.google.firebase.firestore.util.Logger;
import java.util.Comparator;
import java.util.Locale;
import java.util.PriorityQueue;
import java.util.concurrent.TimeUnit;
/** Implements the steps for LRU garbage collection. */
public class LruGarbageCollector {
/** How long we wait to try running LRU GC after SDK initialization. */
private static final long INITIAL_GC_DELAY_MS = TimeUnit.MINUTES.toMillis(1);
/** Minimum amount of time between GC checks, after the first one. */
private static final long REGULAR_GC_DELAY_MS = TimeUnit.MINUTES.toMillis(5);
public static class Params {
private static final long COLLECTION_DISABLED = FirebaseFirestoreSettings.CACHE_SIZE_UNLIMITED;
private static final long DEFAULT_CACHE_SIZE_BYTES = 100 * 1024 * 1024; // 100mb
/**
* The following two constants are estimates for how we want to tune the garbage collector. If
* we encounter a large cache, we don't want to spend a large chunk of time GCing all of it, we
* would rather make some progress and then try again later. We also don't want to collect
* everything that we possibly could, as our thesis is that recently used items are more likely
* to be used again.
*/
private static final int DEFAULT_COLLECTION_PERCENTILE = 10;
private static final int DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT = 1000;
public static Params Default() {
return new Params(
DEFAULT_CACHE_SIZE_BYTES,
DEFAULT_COLLECTION_PERCENTILE,
DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT);
}
public static Params Disabled() {
return new Params(COLLECTION_DISABLED, 0, 0);
}
public static Params WithCacheSizeBytes(long cacheSizeBytes) {
return new Params(cacheSizeBytes, 10, 1000);
}
final long minBytesThreshold;
final int percentileToCollect;
final int maximumSequenceNumbersToCollect;
Params(long minBytesThreshold, int percentileToCollect, int maximumSequenceNumbersToCollect) {
this.minBytesThreshold = minBytesThreshold;
this.percentileToCollect = percentileToCollect;
this.maximumSequenceNumbersToCollect = maximumSequenceNumbersToCollect;
}
}
public static class Results {
private final boolean hasRun;
private final int sequenceNumbersCollected;
private final int targetsRemoved;
private final int documentsRemoved;
static Results DidNotRun() {
return new Results(/* hasRun= */ false, 0, 0, 0);
}
Results(
boolean hasRun, int sequenceNumbersCollected, int targetsRemoved, int documentsRemoved) {
this.hasRun = hasRun;
this.sequenceNumbersCollected = sequenceNumbersCollected;
this.targetsRemoved = targetsRemoved;
this.documentsRemoved = documentsRemoved;
}
public boolean hasRun() {
return hasRun;
}
public int getSequenceNumbersCollected() {
return sequenceNumbersCollected;
}
public int getTargetsRemoved() {
return targetsRemoved;
}
public int getDocumentsRemoved() {
return documentsRemoved;
}
}
/**
* This class is responsible for the scheduling of LRU garbage collection. It handles checking
* whether or not GC is enabled, as well as which delay to use before the next run.
*/
public class GCScheduler implements Scheduler {
private final AsyncQueue asyncQueue;
private final LocalStore localStore;
private boolean hasRun = false;
@Nullable private AsyncQueue.DelayedTask gcTask;
public GCScheduler(AsyncQueue asyncQueue, LocalStore localStore) {
this.asyncQueue = asyncQueue;
this.localStore = localStore;
}
@Override
public void start() {
if (params.minBytesThreshold != Params.COLLECTION_DISABLED) {
scheduleGC();
}
}
@Override
public void stop() {
if (gcTask != null) {
gcTask.cancel();
}
}
private void scheduleGC() {
long delay = hasRun ? REGULAR_GC_DELAY_MS : INITIAL_GC_DELAY_MS;
gcTask =
asyncQueue.enqueueAfterDelay(
AsyncQueue.TimerId.GARBAGE_COLLECTION,
delay,
() -> {
localStore.collectGarbage(LruGarbageCollector.this);
hasRun = true;
scheduleGC();
});
}
}
private final LruDelegate delegate;
private final Params params;
LruGarbageCollector(LruDelegate delegate, Params params) {
this.delegate = delegate;
this.params = params;
}
/** A helper method to create a new scheduler. */
public GCScheduler newScheduler(AsyncQueue asyncQueue, LocalStore localStore) {
return new GCScheduler(asyncQueue, localStore);
}
/** Given a percentile of target to collect, returns the number of targets to collect. */
int calculateQueryCount(int percentile) {
long targetCount = delegate.getSequenceNumberCount();
return (int) ((percentile / 100.0f) * targetCount);
}
/**
* Used to calculate the nth sequence number. Keeps a rolling buffer of the lowest n values passed
* to addElement, and finally reports the largest of them in getMaxValue().
*/
private static class RollingSequenceNumberBuffer {
// Invert the comparison because we want to keep the smallest values.
private static final Comparator<Long> COMPARATOR = (Long a, Long b) -> b.compareTo(a);
private final PriorityQueue<Long> queue;
private final int maxElements;
RollingSequenceNumberBuffer(int count) {
this.maxElements = count;
this.queue = new PriorityQueue<>(count, COMPARATOR);
}
void addElement(Long sequenceNumber) {
if (queue.size() < maxElements) {
queue.add(sequenceNumber);
} else {
Long highestValue = queue.peek();
if (sequenceNumber < highestValue) {
queue.poll();
queue.add(sequenceNumber);
}
}
}
long getMaxValue() {
return queue.peek();
}
}
/** Returns the nth sequence number, counting in order from the smallest. */
long getNthSequenceNumber(int count) {
if (count == 0) {
return ListenSequence.INVALID;
}
RollingSequenceNumberBuffer buffer = new RollingSequenceNumberBuffer(count);
delegate.forEachTarget((targetData) -> buffer.addElement(targetData.getSequenceNumber()));
delegate.forEachOrphanedDocumentSequenceNumber(buffer::addElement);
return buffer.getMaxValue();
}
/**
* Removes targets with a sequence number equal to or less than the given upper bound, and removes
* document associations with those targets.
*/
int removeTargets(long upperBound, SparseArray<?> activeTargetIds) {
return delegate.removeTargets(upperBound, activeTargetIds);
}
/**
* Removes documents that have a sequence number equal to or less than the upper bound and are not
* otherwise pinned.
*/
int removeOrphanedDocuments(long upperBound) {
return delegate.removeOrphanedDocuments(upperBound);
}
Results collect(SparseArray<?> activeTargetIds) {
if (params.minBytesThreshold == Params.COLLECTION_DISABLED) {
Logger.debug("LruGarbageCollector", "Garbage collection skipped; disabled");
return Results.DidNotRun();
}
long cacheSize = getByteSize();
if (cacheSize < params.minBytesThreshold) {
Logger.debug(
"LruGarbageCollector",
"Garbage collection skipped; Cache size "
+ cacheSize
+ " is lower than threshold "
+ params.minBytesThreshold);
return Results.DidNotRun();
} else {
return runGarbageCollection(activeTargetIds);
}
}
private Results runGarbageCollection(SparseArray<?> liveTargetIds) {
long startTs = System.currentTimeMillis();
int sequenceNumbers = calculateQueryCount(params.percentileToCollect);
// Cap at the configured max
if (sequenceNumbers > params.maximumSequenceNumbersToCollect) {
Logger.debug(
"LruGarbageCollector",
"Capping sequence numbers to collect down to the maximum of "
+ params.maximumSequenceNumbersToCollect
+ " from "
+ sequenceNumbers);
sequenceNumbers = params.maximumSequenceNumbersToCollect;
}
long countedTargetsTs = System.currentTimeMillis();
long upperBound = getNthSequenceNumber(sequenceNumbers);
long foundUpperBoundTs = System.currentTimeMillis();
int numTargetsRemoved = removeTargets(upperBound, liveTargetIds);
long removedTargetsTs = System.currentTimeMillis();
int numDocumentsRemoved = removeOrphanedDocuments(upperBound);
long removedDocumentsTs = System.currentTimeMillis();
if (Logger.isDebugEnabled()) {
String desc = "LRU Garbage Collection:\n";
desc += "\tCounted targets in " + (countedTargetsTs - startTs) + "ms\n";
desc +=
String.format(
Locale.ROOT,
"\tDetermined least recently used %d sequence numbers in %dms\n",
sequenceNumbers,
(foundUpperBoundTs - countedTargetsTs));
desc +=
String.format(
Locale.ROOT,
"\tRemoved %d targets in %dms\n",
numTargetsRemoved,
(removedTargetsTs - foundUpperBoundTs));
desc +=
String.format(
Locale.ROOT,
"\tRemoved %d documents in %dms\n",
numDocumentsRemoved,
(removedDocumentsTs - removedTargetsTs));
desc += String.format(Locale.ROOT, "Total Duration: %dms", (removedDocumentsTs - startTs));
Logger.debug("LruGarbageCollector", desc);
}
return new Results(/* hasRun= */ true, sequenceNumbers, numTargetsRemoved, numDocumentsRemoved);
}
long getByteSize() {
return delegate.getByteSize();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.profile;
import org.apache.calcite.materialize.Lattice;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.JsonBuilder;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.SortedSet;
import javax.annotation.Nonnull;
/**
* Analyzes data sets.
*/
public interface Profiler {
/** Creates a profile of a data set.
*
* @param rows List of rows. Can be iterated over more than once (maybe not
* cheaply)
* @param columns Column definitions
*
* @param initialGroups List of combinations of columns that should be
* profiled early, because they may be interesting
*
* @return A profile describing relationships within the data set
*/
Profile profile(Iterable<List<Comparable>> rows, List<Column> columns,
Collection<ImmutableBitSet> initialGroups);
/** Column. */
class Column implements Comparable<Column> {
public final int ordinal;
public final String name;
/** Creates a Column.
*
* @param ordinal Unique and contiguous within a particular data set
* @param name Name of the column
*/
public Column(int ordinal, String name) {
this.ordinal = ordinal;
this.name = name;
}
static ImmutableBitSet toOrdinals(Iterable<Column> columns) {
final ImmutableBitSet.Builder builder = ImmutableBitSet.builder();
for (Column column : columns) {
builder.set(column.ordinal);
}
return builder.build();
}
@Override public int hashCode() {
return ordinal;
}
@Override public boolean equals(Object o) {
return this == o
|| o instanceof Column
&& ordinal == ((Column) o).ordinal;
}
@Override public int compareTo(@Nonnull Column column) {
return Integer.compare(ordinal, column.ordinal);
}
@Override public String toString() {
return name;
}
}
/** Statistic produced by the profiler. */
interface Statistic {
Object toMap(JsonBuilder jsonBuilder);
}
/** Whole data set. */
class RowCount implements Statistic {
final int rowCount;
public RowCount(int rowCount) {
this.rowCount = rowCount;
}
public Object toMap(JsonBuilder jsonBuilder) {
final Map<String, Object> map = jsonBuilder.map();
map.put("type", "rowCount");
map.put("rowCount", rowCount);
return map;
}
}
/** Unique key. */
class Unique implements Statistic {
final NavigableSet<Column> columns;
public Unique(SortedSet<Column> columns) {
this.columns = ImmutableSortedSet.copyOf(columns);
}
public Object toMap(JsonBuilder jsonBuilder) {
final Map<String, Object> map = jsonBuilder.map();
map.put("type", "unique");
map.put("columns", FunctionalDependency.getObjects(jsonBuilder, columns));
return map;
}
}
/** Functional dependency. */
class FunctionalDependency implements Statistic {
final NavigableSet<Column> columns;
final Column dependentColumn;
FunctionalDependency(SortedSet<Column> columns, Column dependentColumn) {
this.columns = ImmutableSortedSet.copyOf(columns);
this.dependentColumn = dependentColumn;
}
public Object toMap(JsonBuilder jsonBuilder) {
final Map<String, Object> map = jsonBuilder.map();
map.put("type", "fd");
map.put("columns", getObjects(jsonBuilder, columns));
map.put("dependentColumn", dependentColumn.name);
return map;
}
private static List<Object> getObjects(JsonBuilder jsonBuilder,
NavigableSet<Column> columns) {
final List<Object> list = jsonBuilder.list();
for (Column column : columns) {
list.add(column.name);
}
return list;
}
}
/** Value distribution, including cardinality and optionally values, of a
* column or set of columns. If the set of columns is empty, it describes
* the number of rows in the entire data set. */
class Distribution implements Statistic {
final NavigableSet<Column> columns;
final NavigableSet<Comparable> values;
final double cardinality;
final int nullCount;
final double expectedCardinality;
final boolean minimal;
/** Creates a Distribution.
*
* @param columns Column or columns being described
* @param values Values of columns, or null if there are too many
* @param cardinality Number of distinct values
* @param nullCount Number of rows where this column had a null value;
* @param expectedCardinality Expected cardinality
* @param minimal Whether the distribution is not implied by a unique
* or functional dependency
*/
public Distribution(SortedSet<Column> columns, SortedSet<Comparable> values,
double cardinality, int nullCount, double expectedCardinality,
boolean minimal) {
this.columns = ImmutableSortedSet.copyOf(columns);
this.values = values == null ? null : ImmutableSortedSet.copyOf(values);
this.cardinality = cardinality;
this.nullCount = nullCount;
this.expectedCardinality = expectedCardinality;
this.minimal = minimal;
}
public Object toMap(JsonBuilder jsonBuilder) {
final Map<String, Object> map = jsonBuilder.map();
map.put("type", "distribution");
map.put("columns", FunctionalDependency.getObjects(jsonBuilder, columns));
if (values != null) {
List<Object> list = jsonBuilder.list();
for (Comparable value : values) {
if (value instanceof java.sql.Date) {
value = value.toString();
}
list.add(value);
}
map.put("values", list);
}
map.put("cardinality", cardinality);
if (nullCount > 0) {
map.put("nullCount", nullCount);
}
map.put("expectedCardinality", expectedCardinality);
map.put("surprise", surprise());
return map;
}
ImmutableBitSet columnOrdinals() {
return Column.toOrdinals(columns);
}
double surprise() {
return SimpleProfiler.surprise(expectedCardinality, cardinality);
}
}
/** The result of profiling, contains various statistics about the
* data in a table. */
class Profile {
public final RowCount rowCount;
public final List<FunctionalDependency> functionalDependencyList;
public final List<Distribution> distributionList;
public final List<Unique> uniqueList;
private final Map<ImmutableBitSet, Distribution> distributionMap;
private final List<Distribution> singletonDistributionList;
Profile(List<Column> columns, RowCount rowCount,
Iterable<FunctionalDependency> functionalDependencyList,
Iterable<Distribution> distributionList, Iterable<Unique> uniqueList) {
this.rowCount = rowCount;
this.functionalDependencyList =
ImmutableList.copyOf(functionalDependencyList);
this.distributionList = ImmutableList.copyOf(distributionList);
this.uniqueList = ImmutableList.copyOf(uniqueList);
final ImmutableMap.Builder<ImmutableBitSet, Distribution> m =
ImmutableMap.builder();
for (Distribution distribution : distributionList) {
m.put(distribution.columnOrdinals(), distribution);
}
distributionMap = m.build();
final ImmutableList.Builder<Distribution> b = ImmutableList.builder();
for (int i = 0; i < columns.size(); i++) {
b.add(distributionMap.get(ImmutableBitSet.of(i)));
}
singletonDistributionList = b.build();
}
public List<Statistic> statistics() {
return ImmutableList.<Statistic>builder()
.add(rowCount)
.addAll(functionalDependencyList)
.addAll(distributionList)
.addAll(uniqueList)
.build();
}
public double cardinality(ImmutableBitSet columnOrdinals) {
final ImmutableBitSet originalOrdinals = columnOrdinals;
for (;;) {
final Distribution distribution = distributionMap.get(columnOrdinals);
if (distribution != null) {
if (columnOrdinals == originalOrdinals) {
return distribution.cardinality;
} else {
final List<Double> cardinalityList = new ArrayList<>();
cardinalityList.add(distribution.cardinality);
for (int ordinal : originalOrdinals.except(columnOrdinals)) {
final Distribution d = singletonDistributionList.get(ordinal);
cardinalityList.add(d.cardinality);
}
return Lattice.getRowCount(rowCount.rowCount, cardinalityList);
}
}
// Clear the last bit and iterate.
// Better would be to combine all of our nearest ancestors.
final List<Integer> list = columnOrdinals.asList();
columnOrdinals = columnOrdinals.clear(Util.last(list));
}
}
}
}
// End Profiler.java
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.internal.PlatformDependent;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
/**
* A NIO {@link ByteBuffer} based buffer. It is recommended to use {@link Unpooled#directBuffer(int)}
* and {@link Unpooled#wrappedBuffer(ByteBuffer)} instead of calling the
* constructor explicitly.
*/
public class UnpooledUnsafeDirectByteBuf extends AbstractReferenceCountedByteBuf {
private final ByteBufAllocator alloc;
private long memoryAddress;
private ByteBuffer buffer;
private ByteBuffer tmpNioBuf;
private int capacity;
private boolean doNotFree;
/**
* Creates a new direct buffer.
*
* @param initialCapacity the initial capacity of the underlying direct buffer
* @param maxCapacity the maximum capacity of the underlying direct buffer
*/
protected UnpooledUnsafeDirectByteBuf(ByteBufAllocator alloc, int initialCapacity, int maxCapacity) {
super(maxCapacity);
if (alloc == null) {
throw new NullPointerException("alloc");
}
if (initialCapacity < 0) {
throw new IllegalArgumentException("initialCapacity: " + initialCapacity);
}
if (maxCapacity < 0) {
throw new IllegalArgumentException("maxCapacity: " + maxCapacity);
}
if (initialCapacity > maxCapacity) {
throw new IllegalArgumentException(String.format(
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
}
this.alloc = alloc;
setByteBuffer(allocateDirect(initialCapacity));
}
/**
* Creates a new direct buffer by wrapping the specified initial buffer.
*
* @param maxCapacity the maximum capacity of the underlying direct buffer
*/
protected UnpooledUnsafeDirectByteBuf(ByteBufAllocator alloc, ByteBuffer initialBuffer, int maxCapacity) {
super(maxCapacity);
if (alloc == null) {
throw new NullPointerException("alloc");
}
if (initialBuffer == null) {
throw new NullPointerException("initialBuffer");
}
if (!initialBuffer.isDirect()) {
throw new IllegalArgumentException("initialBuffer is not a direct buffer.");
}
if (initialBuffer.isReadOnly()) {
throw new IllegalArgumentException("initialBuffer is a read-only buffer.");
}
int initialCapacity = initialBuffer.remaining();
if (initialCapacity > maxCapacity) {
throw new IllegalArgumentException(String.format(
"initialCapacity(%d) > maxCapacity(%d)", initialCapacity, maxCapacity));
}
this.alloc = alloc;
doNotFree = true;
setByteBuffer(initialBuffer.slice().order(ByteOrder.BIG_ENDIAN));
writerIndex(initialCapacity);
}
/**
* Allocate a new direct {@link ByteBuffer} with the given initialCapacity.
*/
protected ByteBuffer allocateDirect(int initialCapacity) {
return ByteBuffer.allocateDirect(initialCapacity);
}
/**
* Free a direct {@link ByteBuffer}
*/
protected void freeDirect(ByteBuffer buffer) {
PlatformDependent.freeDirectBuffer(buffer);
}
private void setByteBuffer(ByteBuffer buffer) {
ByteBuffer oldBuffer = this.buffer;
if (oldBuffer != null) {
if (doNotFree) {
doNotFree = false;
} else {
freeDirect(oldBuffer);
}
}
this.buffer = buffer;
memoryAddress = PlatformDependent.directBufferAddress(buffer);
tmpNioBuf = null;
capacity = buffer.remaining();
}
@Override
public boolean isDirect() {
return true;
}
@Override
public int capacity() {
return capacity;
}
@Override
public ByteBuf capacity(int newCapacity) {
ensureAccessible();
if (newCapacity < 0 || newCapacity > maxCapacity()) {
throw new IllegalArgumentException("newCapacity: " + newCapacity);
}
int readerIndex = readerIndex();
int writerIndex = writerIndex();
int oldCapacity = capacity;
if (newCapacity > oldCapacity) {
ByteBuffer oldBuffer = buffer;
ByteBuffer newBuffer = allocateDirect(newCapacity);
oldBuffer.position(0).limit(oldBuffer.capacity());
newBuffer.position(0).limit(oldBuffer.capacity());
newBuffer.put(oldBuffer);
newBuffer.clear();
setByteBuffer(newBuffer);
} else if (newCapacity < oldCapacity) {
ByteBuffer oldBuffer = buffer;
ByteBuffer newBuffer = allocateDirect(newCapacity);
if (readerIndex < newCapacity) {
if (writerIndex > newCapacity) {
writerIndex(writerIndex = newCapacity);
}
oldBuffer.position(readerIndex).limit(writerIndex);
newBuffer.position(readerIndex).limit(writerIndex);
newBuffer.put(oldBuffer);
newBuffer.clear();
} else {
setIndex(newCapacity, newCapacity);
}
setByteBuffer(newBuffer);
}
return this;
}
@Override
public ByteBufAllocator alloc() {
return alloc;
}
@Override
public ByteOrder order() {
return ByteOrder.BIG_ENDIAN;
}
@Override
public boolean hasArray() {
return false;
}
@Override
public byte[] array() {
throw new UnsupportedOperationException("direct buffer");
}
@Override
public int arrayOffset() {
throw new UnsupportedOperationException("direct buffer");
}
@Override
public boolean hasMemoryAddress() {
return true;
}
@Override
public long memoryAddress() {
ensureAccessible();
return memoryAddress;
}
@Override
protected byte _getByte(int index) {
return UnsafeByteBufUtil.getByte(addr(index));
}
@Override
protected short _getShort(int index) {
return UnsafeByteBufUtil.getShort(addr(index));
}
@Override
protected short _getShortLE(int index) {
return UnsafeByteBufUtil.getShortLE(addr(index));
}
@Override
protected int _getUnsignedMedium(int index) {
return UnsafeByteBufUtil.getUnsignedMedium(addr(index));
}
@Override
protected int _getUnsignedMediumLE(int index) {
return UnsafeByteBufUtil.getUnsignedMediumLE(addr(index));
}
@Override
protected int _getInt(int index) {
return UnsafeByteBufUtil.getInt(addr(index));
}
@Override
protected int _getIntLE(int index) {
return UnsafeByteBufUtil.getIntLE(addr(index));
}
@Override
protected long _getLong(int index) {
return UnsafeByteBufUtil.getLong(addr(index));
}
@Override
protected long _getLongLE(int index) {
return UnsafeByteBufUtil.getLongLE(addr(index));
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
UnsafeByteBufUtil.getBytes(this, addr(index), index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
UnsafeByteBufUtil.getBytes(this, addr(index), index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
UnsafeByteBufUtil.getBytes(this, addr(index), index, dst);
return this;
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
int length = dst.remaining();
checkReadableBytes(length);
getBytes(readerIndex, dst);
readerIndex += length;
return this;
}
@Override
protected void _setByte(int index, int value) {
UnsafeByteBufUtil.setByte(addr(index), value);
}
@Override
protected void _setShort(int index, int value) {
UnsafeByteBufUtil.setShort(addr(index), value);
}
@Override
protected void _setShortLE(int index, int value) {
UnsafeByteBufUtil.setShortLE(addr(index), value);
}
@Override
protected void _setMedium(int index, int value) {
UnsafeByteBufUtil.setMedium(addr(index), value);
}
@Override
protected void _setMediumLE(int index, int value) {
UnsafeByteBufUtil.setMediumLE(addr(index), value);
}
@Override
protected void _setInt(int index, int value) {
UnsafeByteBufUtil.setInt(addr(index), value);
}
@Override
protected void _setIntLE(int index, int value) {
UnsafeByteBufUtil.setIntLE(addr(index), value);
}
@Override
protected void _setLong(int index, long value) {
UnsafeByteBufUtil.setLong(addr(index), value);
}
@Override
protected void _setLongLE(int index, long value) {
UnsafeByteBufUtil.setLongLE(addr(index), value);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
UnsafeByteBufUtil.setBytes(this, addr(index), index, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
UnsafeByteBufUtil.setBytes(this, addr(index), index, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
UnsafeByteBufUtil.setBytes(this, addr(index), index, src);
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
UnsafeByteBufUtil.getBytes(this, addr(index), index, out, length);
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
return getBytes(index, out, length, false);
}
private int getBytes(int index, GatheringByteChannel out, int length, boolean internal) throws IOException {
ensureAccessible();
if (length == 0) {
return 0;
}
ByteBuffer tmpBuf;
if (internal) {
tmpBuf = internalNioBuffer();
} else {
tmpBuf = buffer.duplicate();
}
tmpBuf.clear().position(index).limit(index + length);
return out.write(tmpBuf);
}
@Override
public int getBytes(int index, FileChannel out, long position, int length) throws IOException {
return getBytes(index, out, position, length, false);
}
private int getBytes(int index, FileChannel out, long position, int length, boolean internal) throws IOException {
ensureAccessible();
if (length == 0) {
return 0;
}
ByteBuffer tmpBuf = internal ? internalNioBuffer() : buffer.duplicate();
tmpBuf.clear().position(index).limit(index + length);
return out.write(tmpBuf, position);
}
@Override
public int readBytes(GatheringByteChannel out, int length) throws IOException {
checkReadableBytes(length);
int readBytes = getBytes(readerIndex, out, length, true);
readerIndex += readBytes;
return readBytes;
}
@Override
public int readBytes(FileChannel out, long position, int length) throws IOException {
checkReadableBytes(length);
int readBytes = getBytes(readerIndex, out, position, length, true);
readerIndex += readBytes;
return readBytes;
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
return UnsafeByteBufUtil.setBytes(this, addr(index), index, in, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
ensureAccessible();
ByteBuffer tmpBuf = internalNioBuffer();
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf);
} catch (ClosedChannelException ignored) {
return -1;
}
}
@Override
public int setBytes(int index, FileChannel in, long position, int length) throws IOException {
ensureAccessible();
ByteBuffer tmpBuf = internalNioBuffer();
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf, position);
} catch (ClosedChannelException ignored) {
return -1;
}
}
@Override
public int nioBufferCount() {
return 1;
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
return new ByteBuffer[] { nioBuffer(index, length) };
}
@Override
public ByteBuf copy(int index, int length) {
return UnsafeByteBufUtil.copy(this, addr(index), index, length);
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
checkIndex(index, length);
return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length);
}
private ByteBuffer internalNioBuffer() {
ByteBuffer tmpNioBuf = this.tmpNioBuf;
if (tmpNioBuf == null) {
this.tmpNioBuf = tmpNioBuf = buffer.duplicate();
}
return tmpNioBuf;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex(index, length);
return ((ByteBuffer) buffer.duplicate().position(index).limit(index + length)).slice();
}
@Override
protected void deallocate() {
ByteBuffer buffer = this.buffer;
if (buffer == null) {
return;
}
this.buffer = null;
if (!doNotFree) {
freeDirect(buffer);
}
}
@Override
public ByteBuf unwrap() {
return null;
}
long addr(int index) {
return memoryAddress + index;
}
@Override
protected SwappedByteBuf newSwappedByteBuf() {
if (PlatformDependent.isUnaligned()) {
// Only use if unaligned access is supported otherwise there is no gain.
return new UnsafeDirectSwappedByteBuf(this);
}
return super.newSwappedByteBuf();
}
}
| |
package org.mp4parser.streaming.output.mp4;
import org.mp4parser.Box;
import org.mp4parser.boxes.iso14496.part12.*;
import org.mp4parser.streaming.StreamingSample;
import org.mp4parser.streaming.StreamingTrack;
import org.mp4parser.streaming.extensions.CompositionTimeSampleExtension;
import org.mp4parser.streaming.extensions.CompositionTimeTrackExtension;
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
import org.mp4parser.streaming.extensions.TrackIdTrackExtension;
import org.mp4parser.streaming.output.SampleSink;
import org.mp4parser.tools.Mp4Arrays;
import org.mp4parser.tools.Mp4Math;
import org.mp4parser.tools.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import static org.mp4parser.tools.CastUtils.l2i;
/**
* Creates an MP4 file with ftyp, mdat+, moov order.
* A very special property of this variant is that it written sequentially. You can start transferring the
* data while the <code>sink</code> receives it. (in contrast to typical implementations which need random
* access to write length fields at the beginning of the file)
*/
public class StandardMp4Writer extends DefaultBoxes implements SampleSink {
public static final Object OBJ = new Object();
private static Logger LOG = LoggerFactory.getLogger(FragmentedMp4Writer.class.getName());
protected final WritableByteChannel sink;
protected List<StreamingTrack> source;
protected Date creationTime = new Date();
protected Map<StreamingTrack, CountDownLatch> congestionControl = new ConcurrentHashMap<StreamingTrack, CountDownLatch>();
/**
* Contains the start time of the next segment in line that will be created.
*/
protected Map<StreamingTrack, Long> nextChunkCreateStartTime = new ConcurrentHashMap<StreamingTrack, Long>();
/**
* Contains the start time of the next segment in line that will be written.
*/
protected Map<StreamingTrack, Long> nextChunkWriteStartTime = new ConcurrentHashMap<StreamingTrack, Long>();
/**
* Contains the next sample's start time.
*/
protected Map<StreamingTrack, Long> nextSampleStartTime = new HashMap<StreamingTrack, Long>();
/**
* Buffers the samples per track until there are enough samples to form a Segment.
*/
protected Map<StreamingTrack, List<StreamingSample>> sampleBuffers = new HashMap<StreamingTrack, List<StreamingSample>>();
protected Map<StreamingTrack, TrackBox> trackBoxes = new HashMap<StreamingTrack, TrackBox>();
/**
* Buffers segements until it's time for a segment to be written.
*/
protected Map<StreamingTrack, Queue<ChunkContainer>> chunkBuffers = new ConcurrentHashMap<StreamingTrack, Queue<ChunkContainer>>();
protected Map<StreamingTrack, Long> chunkNumbers = new HashMap<StreamingTrack, Long>();
protected Map<StreamingTrack, Long> sampleNumbers = new HashMap<StreamingTrack, Long>();
long bytesWritten = 0;
volatile boolean headerWritten = false;
public StandardMp4Writer(List<StreamingTrack> source, WritableByteChannel sink) {
this.source = new ArrayList<StreamingTrack>(source);
this.sink = sink;
HashSet<Long> trackIds = new HashSet<Long>();
for (StreamingTrack streamingTrack : source) {
streamingTrack.setSampleSink(this);
chunkNumbers.put(streamingTrack, 1L);
sampleNumbers.put(streamingTrack, 1L);
nextSampleStartTime.put(streamingTrack, 0L);
nextChunkCreateStartTime.put(streamingTrack, 0L);
nextChunkWriteStartTime.put(streamingTrack, 0L);
congestionControl.put(streamingTrack, new CountDownLatch(0));
sampleBuffers.put(streamingTrack, new ArrayList<StreamingSample>());
chunkBuffers.put(streamingTrack, new LinkedList<ChunkContainer>());
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) != null) {
TrackIdTrackExtension trackIdTrackExtension = streamingTrack.getTrackExtension(TrackIdTrackExtension.class);
assert trackIdTrackExtension != null;
if (trackIds.contains(trackIdTrackExtension.getTrackId())) {
throw new RuntimeException("There may not be two tracks with the same trackID within one file");
}
}
}
for (StreamingTrack streamingTrack : source) {
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) == null) {
long maxTrackId = 0;
for (Long trackId : trackIds) {
maxTrackId = Math.max(trackId, maxTrackId);
}
TrackIdTrackExtension tiExt = new TrackIdTrackExtension(maxTrackId + 1);
trackIds.add(tiExt.getTrackId());
streamingTrack.addTrackExtension(tiExt);
}
}
}
public void close() throws IOException {
for (StreamingTrack streamingTrack : source) {
writeChunkContainer(createChunkContainer(streamingTrack));
streamingTrack.close();
}
write(sink, createMoov());
}
protected Box createMoov() {
MovieBox movieBox = new MovieBox();
movieBox.addBox(createMvhd());
for (StreamingTrack streamingTrack : source) {
movieBox.addBox(trackBoxes.get(streamingTrack));
}
// metadata here
return movieBox;
}
private void sortTracks() {
Collections.sort(source, new Comparator<StreamingTrack>() {
public int compare(StreamingTrack o1, StreamingTrack o2) {
// compare times and account for timestamps!
long a = nextChunkWriteStartTime.get(o1) * o2.getTimescale();
long b = nextChunkWriteStartTime.get(o2) * o1.getTimescale();
double d = Math.signum(a - b);
return (int) d;
}
});
}
protected Box createMvhd() {
MovieHeaderBox mvhd = new MovieHeaderBox();
mvhd.setVersion(1);
mvhd.setCreationTime(creationTime);
mvhd.setModificationTime(creationTime);
long[] timescales = new long[0];
long maxTrackId = 0;
double duration = 0;
for (StreamingTrack streamingTrack : source) {
duration = Math.max((double) nextSampleStartTime.get(streamingTrack) / streamingTrack.getTimescale(), duration);
timescales = Mp4Arrays.copyOfAndAppend(timescales, streamingTrack.getTimescale());
maxTrackId = Math.max(streamingTrack.getTrackExtension(TrackIdTrackExtension.class).getTrackId(), maxTrackId);
}
mvhd.setTimescale(Mp4Math.lcm(timescales));
mvhd.setDuration((long) (Mp4Math.lcm(timescales) * duration));
// find the next available trackId
mvhd.setNextTrackId(maxTrackId + 1);
return mvhd;
}
protected void write(WritableByteChannel out, Box... boxes) throws IOException {
for (Box box1 : boxes) {
box1.getBox(out);
bytesWritten += box1.getSize();
}
}
/**
* Tests if the currently received samples for a given track
* are already a 'chunk' as we want to have it. The next
* sample will not be part of the chunk
* will be added to the fragment buffer later.
*
* @param streamingTrack track to test
* @param next the lastest samples
* @return true if a chunk is to b e created.
*/
protected boolean isChunkReady(StreamingTrack streamingTrack, StreamingSample next) {
long ts = nextSampleStartTime.get(streamingTrack);
long cfst = nextChunkCreateStartTime.get(streamingTrack);
return (ts >= cfst + 2 * streamingTrack.getTimescale());
// chunk interleave of 2 seconds
}
protected void writeChunkContainer(ChunkContainer chunkContainer) throws IOException {
TrackBox tb = trackBoxes.get(chunkContainer.streamingTrack);
ChunkOffsetBox stco = Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]/stco[0]");
assert stco != null;
stco.setChunkOffsets(Mp4Arrays.copyOfAndAppend(stco.getChunkOffsets(), bytesWritten + 8));
write(sink, chunkContainer.mdat);
}
public void acceptSample(StreamingSample streamingSample, StreamingTrack streamingTrack) throws IOException {
TrackBox tb = trackBoxes.get(streamingTrack);
if (tb == null) {
tb = new TrackBox();
tb.addBox(createTkhd(streamingTrack));
tb.addBox(createMdia(streamingTrack));
trackBoxes.put(streamingTrack, tb);
}
// We might want to do that when the chunk is created to save memory copy
synchronized (OBJ) {
// need to synchronized here - I don't want two headers written under any circumstances
if (!headerWritten) {
boolean allTracksAtLeastOneSample = true;
for (StreamingTrack track : source) {
allTracksAtLeastOneSample &= (nextSampleStartTime.get(track) > 0 || track == streamingTrack);
}
if (allTracksAtLeastOneSample) {
write(sink, createFtyp());
headerWritten = true;
}
}
}
try {
CountDownLatch cdl = congestionControl.get(streamingTrack);
if (cdl.getCount() > 0) {
cdl.await();
}
} catch (InterruptedException e) {
// don't care just move on
}
if (isChunkReady(streamingTrack, streamingSample)) {
ChunkContainer chunkContainer = createChunkContainer(streamingTrack);
//System.err.println("Creating fragment for " + streamingTrack);
sampleBuffers.get(streamingTrack).clear();
nextChunkCreateStartTime.put(streamingTrack, nextChunkCreateStartTime.get(streamingTrack) + chunkContainer.duration);
Queue<ChunkContainer> chunkQueue = chunkBuffers.get(streamingTrack);
chunkQueue.add(chunkContainer);
synchronized (OBJ) {
if (headerWritten && this.source.get(0) == streamingTrack) {
Queue<ChunkContainer> tracksFragmentQueue;
StreamingTrack currentStreamingTrack;
// This will write AT LEAST the currently created fragment and possibly a few more
while (!(tracksFragmentQueue = chunkBuffers.get(
(currentStreamingTrack = this.source.get(0))
)).isEmpty()) {
ChunkContainer currentFragmentContainer = tracksFragmentQueue.remove();
writeChunkContainer(currentFragmentContainer);
congestionControl.get(currentStreamingTrack).countDown();
long ts = nextChunkWriteStartTime.get(currentStreamingTrack) + currentFragmentContainer.duration;
nextChunkWriteStartTime.put(currentStreamingTrack, ts);
if (LOG.isTraceEnabled()) {
LOG.trace(currentStreamingTrack + " advanced to " + (double) ts / currentStreamingTrack.getTimescale());
}
sortTracks();
}
} else {
if (chunkQueue.size() > 10) {
// if there are more than 10 fragments in the queue we don't want more samples of this track
// System.err.println("Stopping " + streamingTrack);
congestionControl.put(streamingTrack, new CountDownLatch(chunkQueue.size()));
}
}
}
}
sampleBuffers.get(streamingTrack).add(streamingSample);
nextSampleStartTime.put(streamingTrack, nextSampleStartTime.get(streamingTrack) + streamingSample.getDuration());
}
private ChunkContainer createChunkContainer(StreamingTrack streamingTrack) {
List<StreamingSample> samples = sampleBuffers.get(streamingTrack);
long chunkNumber = chunkNumbers.get(streamingTrack);
chunkNumbers.put(streamingTrack, chunkNumber + 1);
ChunkContainer cc = new ChunkContainer();
cc.streamingTrack = streamingTrack;
cc.mdat = new Mdat(samples);
cc.duration = nextSampleStartTime.get(streamingTrack) - nextChunkCreateStartTime.get(streamingTrack);
TrackBox tb = trackBoxes.get(streamingTrack);
SampleTableBox stbl = Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]");
assert stbl != null;
SampleToChunkBox stsc = Path.getPath(stbl, "stsc[0]");
assert stsc != null;
if (stsc.getEntries().isEmpty()) {
List<SampleToChunkBox.Entry> entries = new ArrayList<SampleToChunkBox.Entry>();
stsc.setEntries(entries);
entries.add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
} else {
SampleToChunkBox.Entry e = stsc.getEntries().get(stsc.getEntries().size() - 1);
if (e.getSamplesPerChunk() != samples.size()) {
stsc.getEntries().add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
}
}
long sampleNumber = sampleNumbers.get(streamingTrack);
SampleSizeBox stsz = Path.getPath(stbl, "stsz[0]");
TimeToSampleBox stts = Path.getPath(stbl, "stts[0]");
SyncSampleBox stss = Path.getPath(stbl, "stss[0]");
CompositionTimeToSample ctts = Path.getPath(stbl, "ctts[0]");
if (streamingTrack.getTrackExtension(CompositionTimeTrackExtension.class) != null) {
if (ctts == null) {
ctts = new CompositionTimeToSample();
ctts.setEntries(new ArrayList<CompositionTimeToSample.Entry>());
ArrayList<Box> bs = new ArrayList<Box>(stbl.getBoxes());
bs.add(bs.indexOf(stts), ctts);
}
}
long[] sampleSizes = new long[samples.size()];
int i = 0;
for (StreamingSample sample : samples) {
sampleSizes[i++] = sample.getContent().limit();
if (ctts != null) {
ctts.getEntries().add(
new CompositionTimeToSample.Entry(1, l2i(sample.getSampleExtension(CompositionTimeSampleExtension.class).getCompositionTimeOffset())));
}
assert stts != null;
if (stts.getEntries().isEmpty()) {
ArrayList<TimeToSampleBox.Entry> entries = new ArrayList<TimeToSampleBox.Entry>(stts.getEntries());
entries.add(new TimeToSampleBox.Entry(1, sample.getDuration()));
stts.setEntries(entries);
} else {
TimeToSampleBox.Entry sttsEntry = stts.getEntries().get(stts.getEntries().size() - 1);
if (sttsEntry.getDelta() == sample.getDuration()) {
sttsEntry.setCount(sttsEntry.getCount() + 1);
} else {
stts.getEntries().add(new TimeToSampleBox.Entry(1, sample.getDuration()));
}
}
SampleFlagsSampleExtension sampleFlagsSampleExtension = sample.getSampleExtension(SampleFlagsSampleExtension.class);
if (sampleFlagsSampleExtension != null && sampleFlagsSampleExtension.isSyncSample()) {
if (stss == null) {
stss = new SyncSampleBox();
stbl.addBox(stss);
}
stss.setSampleNumber(Mp4Arrays.copyOfAndAppend(stss.getSampleNumber(), sampleNumber));
}
sampleNumber++;
}
assert stsz != null;
stsz.setSampleSizes(Mp4Arrays.copyOfAndAppend(stsz.getSampleSizes(), sampleSizes));
sampleNumbers.put(streamingTrack, sampleNumber);
samples.clear();
LOG.debug("CC created. mdat size: " + cc.mdat.size);
return cc;
}
protected Box createMdhd(StreamingTrack streamingTrack) {
MediaHeaderBox mdhd = new MediaHeaderBox();
mdhd.setCreationTime(creationTime);
mdhd.setModificationTime(creationTime);
mdhd.setDuration(nextSampleStartTime.get(streamingTrack));
mdhd.setTimescale(streamingTrack.getTimescale());
mdhd.setLanguage(streamingTrack.getLanguage());
return mdhd;
}
private class Mdat implements Box {
ArrayList<StreamingSample> samples;
long size;
public Mdat(List<StreamingSample> samples) {
this.samples = new ArrayList<StreamingSample>(samples);
size = 8;
for (StreamingSample sample : samples) {
size += sample.getContent().limit();
}
}
public String getType() {
return "mdat";
}
public long getSize() {
return size;
}
public void getBox(WritableByteChannel writableByteChannel) throws IOException {
writableByteChannel.write(ByteBuffer.wrap(new byte[]{
(byte) ((size & 0xff000000) >> 24),
(byte) ((size & 0xff0000) >> 16),
(byte) ((size & 0xff00) >> 8),
(byte) ((size & 0xff)),
109, 100, 97, 116, // mdat
}));
for (StreamingSample sample : samples) {
writableByteChannel.write((ByteBuffer) sample.getContent().rewind());
}
}
}
private class ChunkContainer {
Mdat mdat;
StreamingTrack streamingTrack;
long duration;
}
}
| |
package uni.helsinki.sdc_weather;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Scanner;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import uni.helsinki.sdc_weather.model.Measurement;
import uni.helsinki.sdc_weather.model.MeasurementDataService;
import android.content.Context;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.TileOverlay;
import com.google.android.gms.maps.model.TileOverlayOptions;
import com.google.maps.android.heatmaps.HeatmapTileProvider;
import com.sensorcon.sensordrone.android.Drone;
public class Weather extends FragmentActivity {
private static final String TAG = "SDC-Weather";
private static final String URL = "http://sdc-weather.herokuapp.com/measurement";
private static final String MAC[] = {"00:17:EC:11:C0:0F", "00:17:EC:11:C0:0F"};
private static final String SAMPLE_JSON = "{ \"latitude\": 60.123, \"longitude\": 24.456, \"timestamp\": \"2014-03-21T12:34:56Z\", \"temperatureCelsius\": -5.0, \"pressureAtmospheres\": 1.0, \"humidity\": 69.96 }";
Button buttonGet;
Button buttonPost;
Button buttonMeasure;
Drone drone;
private HeatmapTileProvider mProvider;
private TileOverlay mOverlay;
private GoogleMap mMap;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_weather);
// Connecting to Drone and enabling sensors
enableDrone(MAC[0]);
buttonGet = (Button) findViewById(R.id.ButtonGet);
buttonGet.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
new DoGet(){
@Override
protected void onPostExecute(List<Measurement> result) {
TextView resultView = (TextView) findViewById(R.id.textView_result);
resultView.setText(result.toString());
}
}.execute();
}
});
final Context thisContext = this;
buttonPost = (Button) findViewById(R.id.ButtonPost);
buttonPost.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
LocationManager lm = (LocationManager)thisContext.getSystemService(Context.LOCATION_SERVICE);
Criteria crit = new Criteria();
crit.setAccuracy(Criteria.NO_REQUIREMENT);
crit.setCostAllowed(true);
crit.setPowerRequirement(Criteria.NO_REQUIREMENT);
crit.setAltitudeRequired(false);
crit.setBearingRequired(false);
crit.setSpeedRequired(false);
String provider = lm.getBestProvider(new Criteria(), false);
Log.i(TAG, "getBestProvider = " + provider);
Location loc = null;
// Using HTTP Post with sample data
Measurement m = new Measurement();
m.setTimestamp(new Date());
if (provider != null && (loc = lm.getLastKnownLocation(provider)) != null) {
if (loc != null) {
Log.i(TAG, "location = " + loc.toString());
m.setLatitude(loc.getLatitude());
m.setLongitude(loc.getLongitude());
}
} else {
Log.i(TAG, "sending dummy location");
m.setLatitude(60.60);
m.setLongitude(24.24);
}
m.setPressureAtmospheres(0.99);
m.setTemperatureCelsius(37.0);
m.setHumidity(42.42);
new DoPost().execute(m);
}
});
buttonMeasure = (Button) findViewById(R.id.ButtonMeasure);
buttonMeasure.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
new DoMeasure() {
@Override
protected void onPostExecute(Measurement result) {
// TODO Auto-generated method stub
super.onPostExecute(result);
new DoPost().execute(result);
}
}.execute();
}
});
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
addHeatMap();
}
private void enableDrone(String mac) {
drone = new Drone();
drone.btConnect(mac);
// For Debug
Log.i(TAG, "Connected to SensorDrone - " + String.valueOf(drone.isConnected));
Log.i(TAG, "Sensor: Temperature - " + String.valueOf(drone.enableTemperature()));
Log.i(TAG, "Sensor: Humidy - " + String.valueOf(drone.enableHumidity()));
Log.i(TAG, "Sensor: Pressure - " + String.valueOf(drone.enablePressure()));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.weather, menu);
return true;
}
private class DoMeasure extends AsyncTask<Void, Void, Measurement> {
@Override
protected Measurement doInBackground(Void... arg0) {
Log.i(TAG, "Measuring Temperature: " + String.valueOf(drone.measureTemperature()));
Log.i(TAG, "Measuring Humididy: " + String.valueOf(drone.measureHumidity()));
Log.i(TAG, "Measuring Pressure: " + String.valueOf(drone.measurePressure()));
Log.i(TAG, "Temperature value: " + String.valueOf(drone.temperature_Celsius));
Log.i(TAG, "Humidity value: " + String.valueOf(drone.humidity_Percent));
Log.i(TAG, "Pressure value: " + String.valueOf(drone.pressure_Atmospheres));
Measurement m = new Measurement();
m.setHumidity(drone.humidity_Percent);
m.setPressureAtmospheres(drone.pressure_Atmospheres);
m.setTemperatureCelsius(drone.temperature_Celsius);
m.setTimestamp(new Date());
m.setLatitude(60.60);
m.setLongitude(24.24);
return m;
}
}
private class DoPost extends AsyncTask<Measurement, Void, Boolean> {
@Override
protected Boolean doInBackground(Measurement... params) {
Boolean result = true;
MeasurementDataService mds = new MeasurementDataService();
DefaultHttpClient httpclient = new DefaultHttpClient();
// Post all JSON objects
for (int i = 0; i < params.length; i++) {
try {
HttpPost httpPostRequest = new HttpPost(URL);
StringEntity se = new StringEntity(mds.toJson(params[i]));
// Set HTTP parameters
httpPostRequest.setEntity(se);
// It might be that we don't need this - httpPostRequest.setHeader("Accept", "application/json");
httpPostRequest.setHeader("Content-type", "application/json");
// Handles what is returned from the page and get data
HttpResponse response = (HttpResponse) httpclient.execute(httpPostRequest);
Log.i(TAG, response.getStatusLine().toString());
if (response.getStatusLine().hashCode() != 200) {
Log.w(TAG, "HTTP Post - returned HTTP Code " + response.getStatusLine().getStatusCode());
result = false;
}
} catch (Exception e) {
Log.e(TAG, "HTTP post failed: ", e);
}
}
return result;
}
}
private class DoGet extends AsyncTask<Void, Void, List<Measurement>> {
@Override
protected List<Measurement> doInBackground(Void... params) {
String result = "";
try {
HttpClient httpclient = new DefaultHttpClient();
HttpGet httpget = new HttpGet(URL);
HttpResponse response = httpclient.execute(httpget);
Log.i(TAG, response.getStatusLine().toString());
HttpEntity entity = response.getEntity();
if (entity != null) {
InputStream inStream = entity.getContent();
result = convertStreamToString(inStream);
inStream.close();
} else {
Log.e(TAG, "HTTP Get received null entity");
}
} catch (Exception e) {
Log.e(TAG, "Connection failed: ", e);
}
try {
return new MeasurementDataService().fromResultJson(result);
} catch (Exception e) {
return new ArrayList<Measurement>();
}
}
}
static String convertStreamToString(java.io.InputStream is) {
java.util.Scanner s = new java.util.Scanner(is).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
private void addHeatMap() {
List<LatLng> list = null;
// Get the data: latitude/longitude positions of police stations.
try {
list = readItems(R.raw.police_stations);
} catch (JSONException e) {
Toast.makeText(this, "Problem reading list of locations.", Toast.LENGTH_LONG).show();
}
// Create a heat map tile provider, passing it the latlngs of the police stations.
mProvider = new HeatmapTileProvider.Builder()
.data(list)
.build();
setupMap();
// Add a tile overlay to the map, using the heat map tile provider.
mOverlay = mMap.addTileOverlay(new TileOverlayOptions().tileProvider(mProvider));
}
/**
* Get a GoogleMap if needed
*/
private void setupMap() {
if (mMap != null) {
return;
}
SupportMapFragment fragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.heatmap);
mMap = fragment.getMap();
}
/**
* Read items from a file containing a JSON array.
* @param rawResource
* @return
* @throws JSONException
*/
private List<LatLng> readItems(int rawResource) throws JSONException {
Scanner scanner = new Scanner(getResources().openRawResource(rawResource));
String contents = "";
try {
while (scanner.hasNextLine()) {
contents += scanner.nextLine();
}
} finally {
scanner.close();
}
ArrayList<LatLng> list = new ArrayList<LatLng>();
JSONArray jArray = new JSONArray(contents);
int len = jArray.length();
for(int i = 0; i < len; ++i) {
JSONObject jo = jArray.getJSONObject(i);
list.add(new LatLng(jo.getDouble("lat"), jo.getDouble("lng")));
}
return list;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Arrays;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.io.testtools.FileBasedTestCase;
import org.apache.commons.io.testtools.YellOnCloseInputStream;
import org.apache.commons.io.testtools.YellOnFlushAndCloseOutputStream;
@SuppressWarnings("deprecation") // these are test cases for the deprecated CopyUtils
/**
* JUnit tests for CopyUtils.
*
* @author Jeff Turner
* @author Matthew Hawthorne
* @author <a href="mailto:jeremias@apache.org">Jeremias Maerki</a>
* @version $Id: CopyUtilsTest.java 1021986 2010-10-13 03:37:23Z ggregory $
* @see CopyUtils
*/
public class CopyUtilsTest extends FileBasedTestCase {
/*
* NOTE this is not particularly beautiful code. A better way to check for
* flush and close status would be to implement "trojan horse" wrapper
* implementations of the various stream classes, which set a flag when
* relevant methods are called. (JT)
*/
private static final int FILE_SIZE = 1024 * 4 + 1;
private byte[] inData = generateTestData(FILE_SIZE);
public CopyUtilsTest(String testName) {
super(testName);
}
// ----------------------------------------------------------------
// Setup
// ----------------------------------------------------------------
@Override
public void setUp() throws Exception {
}
@Override
public void tearDown() throws Exception {
}
// ----------------------------------------------------------------
// Tests
// ----------------------------------------------------------------
public void testCtor() {
new CopyUtils();
// Nothing to assert, the constructor is public and does not blow up.
}
public void testCopy_byteArrayToOutputStream() throws Exception {
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(inData, out);
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_byteArrayToWriter() throws Exception {
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(inData, writer);
writer.flush();
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_byteArrayToWriterWithEncoding() throws Exception {
String inDataStr = "data";
String charsetName = "UTF-8";
StringWriter writer = new StringWriter();
CopyUtils.copy(inDataStr.getBytes(charsetName), writer, charsetName);
assertEquals(inDataStr, writer.toString());
}
public void testCopy_inputStreamToOutputStream() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
int count = CopyUtils.copy(in, out);
assertEquals("Not all bytes were read", 0, in.available());
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
assertEquals(inData.length, count);
}
public void testCopy_inputStreamToWriter() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(in, writer);
writer.flush();
assertEquals("Not all bytes were read", 0, in.available());
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_inputStreamToWriterWithEncoding() throws Exception {
String inDataStr = "data";
String charsetName = "UTF-8";
StringWriter writer = new StringWriter();
CopyUtils.copy(new ByteArrayInputStream(inDataStr.getBytes(charsetName)), writer, charsetName);
assertEquals(inDataStr, writer.toString());
}
public void testCopy_readerToOutputStream() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
Reader reader = new java.io.InputStreamReader(in, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(reader, out);
//Note: this method *does* flush. It is equivalent to:
// OutputStreamWriter _out = new OutputStreamWriter(fout);
// IOUtils.copy( fin, _out, 4096 ); // copy( Reader, Writer, int );
// _out.flush();
// out = fout;
// Note: rely on the method to flush
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_readerToWriter() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
Reader reader = new java.io.InputStreamReader(in, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
int count = CopyUtils.copy(reader, writer);
writer.flush();
assertEquals(
"The number of characters returned by copy is wrong",
inData.length,
count);
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_stringToOutputStream() throws Exception {
String str = new String(inData, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(str, out);
//Note: this method *does* flush. It is equivalent to:
// OutputStreamWriter _out = new OutputStreamWriter(fout);
// IOUtils.copy( str, _out, 4096 ); // copy( Reader, Writer, int );
// _out.flush();
// out = fout;
// note: we don't flush here; this IOUtils method does it for us
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_stringToWriter() throws Exception {
String str = new String(inData, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(str, writer);
writer.flush();
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
} // CopyUtilsTest
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.spec;
import io.undertow.io.BufferWritableOutputStream;
import io.undertow.servlet.UndertowServletMessages;
import io.undertow.servlet.api.ThreadSetupAction;
import io.undertow.servlet.core.CompositeThreadSetupAction;
import io.undertow.servlet.handlers.ServletRequestContext;
import io.undertow.util.Headers;
import org.xnio.Buffers;
import org.xnio.ChannelListener;
import org.xnio.IoUtils;
import org.xnio.Pool;
import org.xnio.Pooled;
import org.xnio.channels.Channels;
import org.xnio.channels.StreamSinkChannel;
import javax.servlet.DispatcherType;
import javax.servlet.ServletOutputStream;
import javax.servlet.ServletRequest;
import javax.servlet.WriteListener;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import static org.xnio.Bits.allAreClear;
import static org.xnio.Bits.anyAreClear;
import static org.xnio.Bits.anyAreSet;
/**
* This stream essentially has two modes. When it is being used in standard blocking mode then
* it will buffer in the pooled buffer. If the stream is closed before the buffer is full it will
* set a content-length header if one has not been explicitly set.
* <p>
* If a content-length header was present when the stream was created then it will automatically
* close and flush itself once the appropriate amount of data has been written.
* <p>
* Once the listener has been set it goes into async mode, and writes become non blocking. Most methods
* have two different code paths, based on if the listener has been set or not
* <p>
* Once the write listener has been set operations must only be invoked on this stream from the write
* listener callback. Attempting to invoke from a different thread will result in an IllegalStateException.
* <p>
* Async listener tasks are queued in the {@link AsyncContextImpl}. At most one lister can be active at
* one time, which simplifies the thread safety requirements.
*
* @author Stuart Douglas
*/
public class ServletOutputStreamImpl extends ServletOutputStream implements BufferWritableOutputStream {
private final ServletRequestContext servletRequestContext;
private Pooled<ByteBuffer> pooledBuffer;
private ByteBuffer buffer;
private Integer bufferSize;
private StreamSinkChannel channel;
private long written;
private int state;
private AsyncContextImpl asyncContext;
private WriteListener listener;
private WriteChannelListener internalListener;
/**
* buffers that are queued up to be written via async writes. This will include
* {@link #buffer} as the first element, and maybe a user supplied buffer that
* did not fit
*/
private ByteBuffer[] buffersToWrite;
private FileChannel pendingFile;
private static final int FLAG_CLOSED = 1;
private static final int FLAG_WRITE_STARTED = 1 << 1;
private static final int FLAG_READY = 1 << 2;
private static final int FLAG_DELEGATE_SHUTDOWN = 1 << 3;
private static final int FLAG_IN_CALLBACK = 1 << 4;
//TODO: should this be configurable?
private static final int MAX_BUFFERS_TO_ALLOCATE = 6;
private CompositeThreadSetupAction threadSetupAction;
/**
* Construct a new instance. No write timeout is configured.
*/
public ServletOutputStreamImpl(final ServletRequestContext servletRequestContext) {
this.threadSetupAction = servletRequestContext.getDeployment().getThreadSetupAction();
this.servletRequestContext = servletRequestContext;
}
/**
* Construct a new instance. No write timeout is configured.
*/
public ServletOutputStreamImpl(final ServletRequestContext servletRequestContext, int bufferSize) {
this.bufferSize = bufferSize;
this.servletRequestContext = servletRequestContext;
}
/**
* {@inheritDoc}
*/
public void write(final int b) throws IOException {
write(new byte[]{(byte) b}, 0, 1);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b, final int off, final int len) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
if (len < 1) {
return;
}
if (listener == null) {
ByteBuffer buffer = buffer();
if (buffer.remaining() < len) {
//so what we have will not fit.
//We allocate multiple buffers up to MAX_BUFFERS_TO_ALLOCATE
//and put it in them
//if it still dopes not fit we loop, re-using these buffers
StreamSinkChannel channel = this.channel;
if (channel == null) {
this.channel = channel = servletRequestContext.getExchange().getResponseChannel();
}
final Pool<ByteBuffer> bufferPool = servletRequestContext.getExchange().getConnection().getBufferPool();
ByteBuffer[] buffers = new ByteBuffer[MAX_BUFFERS_TO_ALLOCATE + 1];
Pooled[] pooledBuffers = new Pooled[MAX_BUFFERS_TO_ALLOCATE];
try {
buffers[0] = buffer;
int bytesWritten = 0;
int rem = buffer.remaining();
buffer.put(b, bytesWritten + off, rem);
buffer.flip();
bytesWritten += rem;
int bufferCount = 1;
for (int i = 0; i < MAX_BUFFERS_TO_ALLOCATE; ++i) {
Pooled<ByteBuffer> pooled = bufferPool.allocate();
pooledBuffers[bufferCount - 1] = pooled;
buffers[bufferCount++] = pooled.getResource();
ByteBuffer cb = pooled.getResource();
int toWrite = len - bytesWritten;
if (toWrite > cb.remaining()) {
rem = cb.remaining();
cb.put(b, bytesWritten + off, rem);
cb.flip();
bytesWritten += rem;
} else {
cb.put(b, bytesWritten + off, toWrite);
bytesWritten = len;
cb.flip();
break;
}
}
Channels.writeBlocking(channel, buffers, 0, bufferCount);
while (bytesWritten < len) {
//ok, it did not fit, loop and loop and loop until it is done
bufferCount = 0;
for (int i = 0; i < MAX_BUFFERS_TO_ALLOCATE + 1; ++i) {
ByteBuffer cb = buffers[i];
cb.clear();
bufferCount++;
int toWrite = len - bytesWritten;
if (toWrite > cb.remaining()) {
rem = cb.remaining();
cb.put(b, bytesWritten + off, rem);
cb.flip();
bytesWritten += rem;
} else {
cb.put(b, bytesWritten + off, toWrite);
bytesWritten = len;
cb.flip();
break;
}
}
Channels.writeBlocking(channel, buffers, 0, bufferCount);
}
buffer.clear();
} finally {
for (int i = 0; i < pooledBuffers.length; ++i) {
Pooled p = pooledBuffers[i];
if (p == null) {
break;
}
p.free();
}
}
} else {
buffer.put(b, off, len);
if (buffer.remaining() == 0) {
writeBufferBlocking(false);
}
}
updateWritten(len);
} else {
if (anyAreClear(state, FLAG_READY)) {
throw UndertowServletMessages.MESSAGES.streamNotReady();
}
//even though we are in async mode we are still buffering
try {
ByteBuffer buffer = buffer();
if (buffer.remaining() > len) {
buffer.put(b, off, len);
} else {
buffer.flip();
final ByteBuffer userBuffer = ByteBuffer.wrap(b, off, len);
final ByteBuffer[] bufs = new ByteBuffer[]{buffer, userBuffer};
long toWrite = Buffers.remaining(bufs);
long res;
long written = 0;
createChannel();
state |= FLAG_WRITE_STARTED;
do {
res = channel.write(bufs);
written += res;
if (res == 0) {
//write it out with a listener
//but we need to copy any extra data
final ByteBuffer copy = ByteBuffer.allocate(userBuffer.remaining());
copy.put(userBuffer);
copy.flip();
this.buffersToWrite = new ByteBuffer[]{buffer, copy};
state &= ~FLAG_READY;
channel.resumeWrites();
return;
}
} while (written < toWrite);
buffer.clear();
}
} finally {
updateWrittenAsync(len);
}
}
}
@Override
public void write(ByteBuffer[] buffers) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
int len = 0;
for (ByteBuffer buf : buffers) {
len += buf.remaining();
}
if (len < 1) {
return;
}
if (listener == null) {
//if we have received the exact amount of content write it out in one go
//this is a common case when writing directly from a buffer cache.
if (this.written == 0 && len == servletRequestContext.getOriginalResponse().getContentLength()) {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
Channels.writeBlocking(channel, buffers, 0, buffers.length);
state |= FLAG_WRITE_STARTED;
} else {
ByteBuffer buffer = buffer();
if (len < buffer.remaining()) {
Buffers.copy(buffer, buffers, 0, buffers.length);
} else {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
if (buffer.position() == 0) {
Channels.writeBlocking(channel, buffers, 0, buffers.length);
} else {
final ByteBuffer[] newBuffers = new ByteBuffer[buffers.length + 1];
buffer.flip();
newBuffers[0] = buffer;
System.arraycopy(buffers, 0, newBuffers, 1, buffers.length);
Channels.writeBlocking(channel, newBuffers, 0, newBuffers.length);
buffer.clear();
}
state |= FLAG_WRITE_STARTED;
}
}
updateWritten(len);
} else {
if (anyAreClear(state, FLAG_READY)) {
throw UndertowServletMessages.MESSAGES.streamNotReady();
}
//even though we are in async mode we are still buffering
try {
ByteBuffer buffer = buffer();
if (buffer.remaining() > len) {
Buffers.copy(buffer, buffers, 0, buffers.length);
} else {
final ByteBuffer[] bufs = new ByteBuffer[buffers.length + 1];
buffer.flip();
bufs[0] = buffer;
System.arraycopy(buffers, 0, bufs, 1, buffers.length);
long toWrite = Buffers.remaining(bufs);
long res;
long written = 0;
createChannel();
state |= FLAG_WRITE_STARTED;
do {
res = channel.write(bufs);
written += res;
if (res == 0) {
//write it out with a listener
//but we need to copy any extra data
//TODO: should really allocate from the pool here
final ByteBuffer copy = ByteBuffer.allocate((int) Buffers.remaining(buffers));
Buffers.copy(copy, buffers, 0, buffers.length);
copy.flip();
this.buffersToWrite = new ByteBuffer[]{buffer, copy};
state &= ~FLAG_READY;
channel.resumeWrites();
return;
}
} while (written < toWrite);
buffer.clear();
}
} finally {
updateWrittenAsync(len);
}
}
}
@Override
public void write(ByteBuffer byteBuffer) throws IOException {
write(new ByteBuffer[]{byteBuffer});
}
void updateWritten(final long len) throws IOException {
this.written += len;
long contentLength = servletRequestContext.getOriginalResponse().getContentLength();
if (contentLength != -1 && this.written >= contentLength) {
close();
}
}
void updateWrittenAsync(final long len) throws IOException {
this.written += len;
long contentLength = servletRequestContext.getOriginalResponse().getContentLength();
if (contentLength != -1 && this.written >= contentLength) {
state |= FLAG_CLOSED;
//if buffersToWrite is set we are already flushing
//so we don't have to do anything
if (buffersToWrite == null && pendingFile == null) {
if (flushBufferAsync(true)) {
channel.shutdownWrites();
state |= FLAG_DELEGATE_SHUTDOWN;
channel.flush();
if(pooledBuffer != null) {
pooledBuffer.free();
buffer = null;
pooledBuffer = null;
}
}
}
}
}
private boolean flushBufferAsync(final boolean writeFinal) throws IOException {
ByteBuffer[] bufs = buffersToWrite;
if (bufs == null) {
ByteBuffer buffer = this.buffer;
if (buffer == null || buffer.position() == 0) {
return true;
}
buffer.flip();
bufs = new ByteBuffer[]{buffer};
}
long toWrite = Buffers.remaining(bufs);
if (toWrite == 0) {
//we clear the buffer, so it can be written to again
buffer.clear();
return true;
}
state |= FLAG_WRITE_STARTED;
createChannel();
long res;
long written = 0;
do {
if (writeFinal) {
res = channel.writeFinal(bufs);
} else {
res = channel.write(bufs);
}
written += res;
if (res == 0) {
//write it out with a listener
state = state & ~FLAG_READY;
buffersToWrite = bufs;
channel.resumeWrites();
return false;
}
} while (written < toWrite);
buffer.clear();
return true;
}
/**
* Returns the underlying buffer. If this has not been created yet then
* it is created.
* <p>
* Callers that use this method must call {@link #updateWritten(long)} to update the written
* amount.
* <p>
* This allows the buffer to be filled directly, which can be more efficient.
* <p>
* This method is basically a hack that should only be used by the print writer
*
* @return The underlying buffer
*/
ByteBuffer underlyingBuffer() {
if(anyAreSet(state, FLAG_CLOSED)) {
return null;
}
return buffer();
}
/**
* {@inheritDoc}
*/
public void flush() throws IOException {
//according to the servlet spec we ignore a flush from within an include
if (servletRequestContext.getOriginalRequest().getDispatcherType() == DispatcherType.INCLUDE ||
servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
if (servletRequestContext.getDeployment().getDeploymentInfo().isIgnoreFlush() &&
servletRequestContext.getExchange().isRequestComplete() &&
servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null) {
//we mark the stream as flushed, but don't actually flush
//because in most cases flush just kills performance
//we only do this if the request is fully read, so that http tunneling scenarios still work
servletRequestContext.getOriginalResponse().setIgnoredFlushPerformed(true);
return;
}
flushInternal();
}
/**
* {@inheritDoc}
*/
public void flushInternal() throws IOException {
if (listener == null) {
if (anyAreSet(state, FLAG_CLOSED)) {
//just return
return;
}
if (buffer != null && buffer.position() != 0) {
writeBufferBlocking(false);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
Channels.flushBlocking(channel);
} else {
if (anyAreClear(state, FLAG_READY)) {
return;
}
createChannel();
if (buffer == null || buffer.position() == 0) {
//nothing to flush, we just flush the underlying stream
//it does not matter if this succeeds or not
channel.flush();
return;
}
//we have some data in the buffer, we can just write it out
//if the write fails we just compact, rather than changing the ready state
state |= FLAG_WRITE_STARTED;
buffer.flip();
long res;
do {
res = channel.write(buffer);
} while (buffer.hasRemaining() && res != 0);
if (!buffer.hasRemaining()) {
channel.flush();
}
buffer.compact();
}
}
@Override
public void transferFrom(FileChannel source) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
if (listener == null) {
if (buffer != null && buffer.position() != 0) {
writeBufferBlocking(false);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
long position = source.position();
long count = source.size() - position;
Channels.transferBlocking(channel, source, position, count);
updateWritten(count);
} else {
state |= FLAG_WRITE_STARTED;
createChannel();
long pos = 0;
try {
long size = source.size();
pos = source.position();
while (size - pos > 0) {
long ret = channel.transferFrom(pendingFile, pos, size - pos);
if (ret <= 0) {
state &= ~FLAG_READY;
pendingFile = source;
source.position(pos);
channel.resumeWrites();
return;
}
pos += ret;
}
} finally {
updateWrittenAsync(pos - source.position());
}
}
}
private void writeBufferBlocking(final boolean writeFinal) throws IOException {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
buffer.flip();
while (buffer.hasRemaining()) {
if (writeFinal) {
channel.writeFinal(buffer);
} else {
channel.write(buffer);
}
if (buffer.hasRemaining()) {
channel.awaitWritable();
}
}
buffer.clear();
state |= FLAG_WRITE_STARTED;
}
/**
* {@inheritDoc}
*/
public void close() throws IOException {
if (servletRequestContext.getOriginalRequest().getDispatcherType() == DispatcherType.INCLUDE ||
servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
if (listener == null) {
if (anyAreSet(state, FLAG_CLOSED)) return;
state |= FLAG_CLOSED;
state &= ~FLAG_READY;
if (allAreClear(state, FLAG_WRITE_STARTED) && channel == null && servletRequestContext.getOriginalResponse().getHeader(Headers.CONTENT_LENGTH_STRING) == null) {
if (servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null) {
if (buffer == null) {
servletRequestContext.getExchange().getResponseHeaders().put(Headers.CONTENT_LENGTH, "0");
} else {
servletRequestContext.getExchange().getResponseHeaders().put(Headers.CONTENT_LENGTH, Integer.toString(buffer.position()));
}
}
}
try {
if (buffer != null) {
writeBufferBlocking(true);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
state |= FLAG_DELEGATE_SHUTDOWN;
StreamSinkChannel channel = this.channel;
if (channel != null) { //mock requests
channel.shutdownWrites();
Channels.flushBlocking(channel);
}
} catch (IOException e) {
IoUtils.safeClose(this.channel);
throw e;
} finally {
if (pooledBuffer != null) {
pooledBuffer.free();
buffer = null;
} else {
buffer = null;
}
}
} else {
closeAsync();
}
}
/**
* Closes the channel, and flushes any data out using async IO
* <p>
* This is used in two situations, if an output stream is not closed when a
* request is done, and when performing a close on a stream that is in async
* mode
*
* @throws IOException
*/
public void closeAsync() throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
try {
state |= FLAG_CLOSED;
state &= ~FLAG_READY;
if (allAreClear(state, FLAG_WRITE_STARTED) && channel == null) {
if (servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null) {
if (buffer == null) {
servletRequestContext.getOriginalResponse().setHeader(Headers.CONTENT_LENGTH, "0");
} else {
servletRequestContext.getOriginalResponse().setHeader(Headers.CONTENT_LENGTH, Integer.toString(buffer.position()));
}
}
}
createChannel();
if (buffer != null) {
if (!flushBufferAsync(true)) {
return;
}
if (pooledBuffer != null) {
pooledBuffer.free();
buffer = null;
} else {
buffer = null;
}
}
channel.shutdownWrites();
state |= FLAG_DELEGATE_SHUTDOWN;
if (!channel.flush()) {
channel.resumeWrites();
}
} catch (IOException e) {
if(pooledBuffer != null) {
pooledBuffer.free();
pooledBuffer = null;
buffer = null;
}
throw e;
}
}
private void createChannel() {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
if(internalListener != null) {
channel.getWriteSetter().set(internalListener);
}
}
}
private ByteBuffer buffer() {
ByteBuffer buffer = this.buffer;
if (buffer != null) {
return buffer;
}
if (bufferSize != null) {
this.buffer = ByteBuffer.allocateDirect(bufferSize);
return this.buffer;
} else {
this.pooledBuffer = servletRequestContext.getExchange().getConnection().getBufferPool().allocate();
this.buffer = pooledBuffer.getResource();
return this.buffer;
}
}
public void resetBuffer() {
if (allAreClear(state, FLAG_WRITE_STARTED)) {
if (pooledBuffer != null) {
pooledBuffer.free();
pooledBuffer = null;
}
buffer = null;
this.written = 0;
} else {
throw UndertowServletMessages.MESSAGES.responseAlreadyCommited();
}
}
public void setBufferSize(final int size) {
if (buffer != null || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.contentHasBeenWritten();
}
this.bufferSize = size;
}
public boolean isClosed() {
return anyAreSet(state, FLAG_CLOSED);
}
@Override
public boolean isReady() {
if (listener == null) {
//TODO: is this the correct behaviour?
throw UndertowServletMessages.MESSAGES.streamNotInAsyncMode();
}
return anyAreSet(state, FLAG_READY);
}
@Override
public void setWriteListener(final WriteListener writeListener) {
if (writeListener == null) {
throw UndertowServletMessages.MESSAGES.listenerCannotBeNull();
}
if (listener != null) {
throw UndertowServletMessages.MESSAGES.listenerAlreadySet();
}
final ServletRequest servletRequest = servletRequestContext.getServletRequest();
if (!servletRequest.isAsyncStarted()) {
throw UndertowServletMessages.MESSAGES.asyncNotStarted();
}
asyncContext = (AsyncContextImpl) servletRequest.getAsyncContext();
listener = writeListener;
//we register the write listener on the underlying connection
//so we don't have to force the creation of the response channel
//under normal circumstances this will break write listener delegation
this.internalListener = new WriteChannelListener();
if(this.channel != null) {
this.channel.getWriteSetter().set(internalListener);
}
//we resume from an async task, after the request has been dispatched
asyncContext.addAsyncTask(new Runnable() {
@Override
public void run() {
if(channel == null) {
servletRequestContext.getExchange().getIoThread().execute(new Runnable() {
@Override
public void run() {
internalListener.handleEvent(null);
}
});
} else {
channel.resumeWrites();
}
}
});
}
ServletRequestContext getServletRequestContext() {
return servletRequestContext;
}
private class WriteChannelListener implements ChannelListener<StreamSinkChannel> {
@Override
public void handleEvent(final StreamSinkChannel aChannel) {
//flush the channel if it is closed
if (anyAreSet(state, FLAG_DELEGATE_SHUTDOWN)) {
try {
//either it will work, and the channel is closed
//or it won't, and we continue with writes resumed
channel.flush();
return;
} catch (IOException e) {
handleError(e);
return;
}
}
//if there is data still to write
if (buffersToWrite != null) {
long toWrite = Buffers.remaining(buffersToWrite);
long written = 0;
long res;
if(toWrite > 0) { //should always be true, but just to be defensive
do {
try {
res = channel.write(buffersToWrite);
written += res;
if (res == 0) {
return;
}
} catch (IOException e) {
handleError(e);
return;
}
} while (written < toWrite);
}
buffersToWrite = null;
buffer.clear();
}
if (pendingFile != null) {
try {
long size = pendingFile.size();
long pos = pendingFile.position();
while (size - pos > 0) {
long ret = channel.transferFrom(pendingFile, pos, size - pos);
if (ret <= 0) {
pendingFile.position(pos);
return;
}
pos += ret;
}
pendingFile = null;
} catch (IOException e) {
handleError(e);
return;
}
}
if (anyAreSet(state, FLAG_CLOSED)) {
try {
if (pooledBuffer != null) {
pooledBuffer.free();
buffer = null;
} else {
buffer = null;
}
channel.shutdownWrites();
state |= FLAG_DELEGATE_SHUTDOWN;
channel.flush();
} catch (IOException e) {
handleError(e);
return;
}
} else {
if (asyncContext.isDispatched()) {
//this is no longer an async request
//we just return for now
//TODO: what do we do here? Revert back to blocking mode?
channel.suspendWrites();
return;
}
state |= FLAG_READY;
try {
state |= FLAG_IN_CALLBACK;
ThreadSetupAction.Handle handle = threadSetupAction.setup(servletRequestContext.getExchange());
try {
listener.onWritePossible();
} finally {
handle.tearDown();
}
if (isReady()) {
//if the stream is still ready then we do not resume writes
//this is per spec, we only call the listener once for each time
//isReady returns true
if(channel != null) {
channel.suspendWrites();
}
} else {
if(channel != null) {
channel.resumeWrites();
}
}
} catch (Throwable e) {
IoUtils.safeClose(channel);
} finally {
state &= ~FLAG_IN_CALLBACK;
}
}
}
private void handleError(final IOException e) {
try {
ThreadSetupAction.Handle handle = threadSetupAction.setup(servletRequestContext.getExchange());
try {
listener.onError(e);
} finally {
handle.tearDown();
}
} finally {
IoUtils.safeClose(channel, servletRequestContext.getExchange().getConnection());
}
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.rongcloud.demo_imlib_basic_android.logger;
/**
* Helper class for a list (or tree) of LoggerNodes.
*
* <p>When this is set as the head of the list,
* an instance of it can function as a drop-in replacement for {@link android.util.Log}.
* Most of the methods in this class server only to map a method call in Log to its equivalent
* in LogNode.</p>
*/
public class Log {
// Grabbing the native values from Android's native logging facilities,
// to make for easy migration and interop.
public static final int NONE = -1;
public static final int VERBOSE = android.util.Log.VERBOSE;
public static final int DEBUG = android.util.Log.DEBUG;
public static final int INFO = android.util.Log.INFO;
public static final int WARN = android.util.Log.WARN;
public static final int ERROR = android.util.Log.ERROR;
public static final int ASSERT = android.util.Log.ASSERT;
// Stores the beginning of the LogNode topology.
private static LogNode mLogNode;
/**
* Returns the next LogNode in the linked list.
*/
public static LogNode getLogNode() {
return mLogNode;
}
/**
* Sets the LogNode data will be sent to.
*/
public static void setLogNode(LogNode node) {
mLogNode = node;
}
/**
* Instructs the LogNode to print the log data provided. Other LogNodes can
* be chained to the end of the LogNode as desired.
*
* @param priority Log level of the data being logged. Verbose, Error, etc.
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void println(int priority, String tag, String msg, Throwable tr) {
if (mLogNode != null) {
mLogNode.println(priority, tag, msg, tr);
}
}
/**
* Instructs the LogNode to print the log data provided. Other LogNodes can
* be chained to the end of the LogNode as desired.
*
* @param priority Log level of the data being logged. Verbose, Error, etc.
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged. The actual message to be logged.
*/
public static void println(int priority, String tag, String msg) {
println(priority, tag, msg, null);
}
/**
* Prints a message at VERBOSE priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void v(String tag, String msg, Throwable tr) {
println(VERBOSE, tag, msg, tr);
}
/**
* Prints a message at VERBOSE priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void v(String tag, String msg) {
v(tag, msg, null);
}
/**
* Prints a message at DEBUG priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void d(String tag, String msg, Throwable tr) {
println(DEBUG, tag, msg, tr);
}
/**
* Prints a message at DEBUG priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void d(String tag, String msg) {
d(tag, msg, null);
}
/**
* Prints a message at INFO priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void i(String tag, String msg, Throwable tr) {
println(INFO, tag, msg, tr);
}
/**
* Prints a message at INFO priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void i(String tag, String msg) {
i(tag, msg, null);
}
/**
* Prints a message at WARN priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void w(String tag, String msg, Throwable tr) {
println(WARN, tag, msg, tr);
}
/**
* Prints a message at WARN priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void w(String tag, String msg) {
w(tag, msg, null);
}
/**
* Prints a message at WARN priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void w(String tag, Throwable tr) {
w(tag, null, tr);
}
/**
* Prints a message at ERROR priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void e(String tag, String msg, Throwable tr) {
println(ERROR, tag, msg, tr);
}
/**
* Prints a message at ERROR priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void e(String tag, String msg) {
e(tag, msg, null);
}
/**
* Prints a message at ASSERT priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void wtf(String tag, String msg, Throwable tr) {
println(ASSERT, tag, msg, tr);
}
/**
* Prints a message at ASSERT priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param msg The actual message to be logged.
*/
public static void wtf(String tag, String msg) {
wtf(tag, msg, null);
}
/**
* Prints a message at ASSERT priority.
*
* @param tag Tag for for the log data. Can be used to organize log statements.
* @param tr If an exception was thrown, this can be sent along for the logging facilities
* to extract and print useful information.
*/
public static void wtf(String tag, Throwable tr) {
wtf(tag, null, tr);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.db.context;
import java.nio.ByteBuffer;
import org.junit.Test;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.ClockAndCount;
import org.apache.cassandra.db.context.CounterContext.Relationship;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.CounterId;
import static org.apache.cassandra.db.context.CounterContext.ContextState;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public class CounterContextTest
{
private static final CounterContext cc = new CounterContext();
private static final int headerSizeLength = 2;
private static final int headerEltLength = 2;
private static final int idLength = 16;
private static final int clockLength = 8;
private static final int countLength = 8;
private static final int stepLength = idLength + clockLength + countLength;
@Test
public void testAllocate()
{
ContextState allGlobal = ContextState.allocate(3, 0, 0);
assertEquals(headerSizeLength + 3 * headerEltLength + 3 * stepLength, allGlobal.context.remaining());
ContextState allLocal = ContextState.allocate(0, 3, 0);
assertEquals(headerSizeLength + 3 * headerEltLength + 3 * stepLength, allLocal.context.remaining());
ContextState allRemote = ContextState.allocate(0, 0, 3);
assertEquals(headerSizeLength + 3 * stepLength, allRemote.context.remaining());
ContextState mixed = ContextState.allocate(1, 1, 1);
assertEquals(headerSizeLength + 2 * headerEltLength + 3 * stepLength, mixed.context.remaining());
}
@Test
public void testDiff()
{
ContextState left;
ContextState right;
// equality: equal nodes, all counts same
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 3L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.wrap(ByteBufferUtil.clone(left.context));
assertEquals(Relationship.EQUAL, cc.diff(left.context, right.context));
// greater than: left has superset of nodes (counts equal)
left = ContextState.allocate(0, 0, 4);
left.writeRemote(CounterId.fromInt(3), 3L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
left.writeRemote(CounterId.fromInt(12), 0L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 3L, 0L);
right.writeRemote(CounterId.fromInt(6), 2L, 0L);
right.writeRemote(CounterId.fromInt(9), 1L, 0L);
assertEquals(Relationship.GREATER_THAN, cc.diff(left.context, right.context));
// less than: left has subset of nodes (counts equal)
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 3L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.allocate(0, 0, 4);
right.writeRemote(CounterId.fromInt(3), 3L, 0L);
right.writeRemote(CounterId.fromInt(6), 2L, 0L);
right.writeRemote(CounterId.fromInt(9), 1L, 0L);
right.writeRemote(CounterId.fromInt(12), 0L, 0L);
assertEquals(Relationship.LESS_THAN, cc.diff(left.context, right.context));
// greater than: equal nodes, but left has higher counts
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 3L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 3L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 3L, 0L);
right.writeRemote(CounterId.fromInt(6), 2L, 0L);
right.writeRemote(CounterId.fromInt(9), 1L, 0L);
assertEquals(Relationship.GREATER_THAN, cc.diff(left.context, right.context));
// less than: equal nodes, but right has higher counts
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 3L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 3L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 3L, 0L);
right.writeRemote(CounterId.fromInt(6), 9L, 0L);
right.writeRemote(CounterId.fromInt(9), 3L, 0L);
assertEquals(Relationship.LESS_THAN, cc.diff(left.context, right.context));
// disjoint: right and left have disjoint node sets
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 1L, 0L);
left.writeRemote(CounterId.fromInt(4), 1L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 1L, 0L);
right.writeRemote(CounterId.fromInt(6), 1L, 0L);
right.writeRemote(CounterId.fromInt(9), 1L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 1L, 0L);
left.writeRemote(CounterId.fromInt(4), 1L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(2), 1L, 0L);
right.writeRemote(CounterId.fromInt(6), 1L, 0L);
right.writeRemote(CounterId.fromInt(12), 1L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
// disjoint: equal nodes, but right and left have higher counts in differing nodes
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 1L, 0L);
left.writeRemote(CounterId.fromInt(6), 3L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 1L, 0L);
right.writeRemote(CounterId.fromInt(6), 1L, 0L);
right.writeRemote(CounterId.fromInt(9), 5L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 2L, 0L);
left.writeRemote(CounterId.fromInt(6), 3L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 1L, 0L);
right.writeRemote(CounterId.fromInt(6), 9L, 0L);
right.writeRemote(CounterId.fromInt(9), 5L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
// disjoint: left has more nodes, but lower counts
left = ContextState.allocate(0, 0, 4);
left.writeRemote(CounterId.fromInt(3), 2L, 0L);
left.writeRemote(CounterId.fromInt(6), 3L, 0L);
left.writeRemote(CounterId.fromInt(9), 1L, 0L);
left.writeRemote(CounterId.fromInt(12), 1L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 4L, 0L);
right.writeRemote(CounterId.fromInt(6), 9L, 0L);
right.writeRemote(CounterId.fromInt(9), 5L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
// disjoint: left has less nodes, but higher counts
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 5L, 0L);
left.writeRemote(CounterId.fromInt(6), 3L, 0L);
left.writeRemote(CounterId.fromInt(9), 2L, 0L);
right = ContextState.allocate(0, 0, 4);
right.writeRemote(CounterId.fromInt(3), 4L, 0L);
right.writeRemote(CounterId.fromInt(6), 3L, 0L);
right.writeRemote(CounterId.fromInt(9), 2L, 0L);
right.writeRemote(CounterId.fromInt(12), 1L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
// disjoint: mixed nodes and counts
left = ContextState.allocate(0, 0, 3);
left.writeRemote(CounterId.fromInt(3), 5L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 2L, 0L);
right = ContextState.allocate(0, 0, 4);
right.writeRemote(CounterId.fromInt(3), 4L, 0L);
right.writeRemote(CounterId.fromInt(6), 3L, 0L);
right.writeRemote(CounterId.fromInt(9), 2L, 0L);
right.writeRemote(CounterId.fromInt(12), 1L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
left = ContextState.allocate(0, 0, 4);
left.writeRemote(CounterId.fromInt(3), 5L, 0L);
left.writeRemote(CounterId.fromInt(6), 2L, 0L);
left.writeRemote(CounterId.fromInt(7), 2L, 0L);
left.writeRemote(CounterId.fromInt(9), 2L, 0L);
right = ContextState.allocate(0, 0, 3);
right.writeRemote(CounterId.fromInt(3), 4L, 0L);
right.writeRemote(CounterId.fromInt(6), 3L, 0L);
right.writeRemote(CounterId.fromInt(9), 2L, 0L);
assertEquals(Relationship.DISJOINT, cc.diff(left.context, right.context));
}
@Test
public void testMerge()
{
// note: local counts aggregated; remote counts are reconciled (i.e. take max)
ContextState left = ContextState.allocate(0, 1, 3);
left.writeRemote(CounterId.fromInt(1), 1L, 1L);
left.writeRemote(CounterId.fromInt(2), 2L, 2L);
left.writeRemote(CounterId.fromInt(4), 6L, 3L);
left.writeLocal(CounterId.getLocalId(), 7L, 3L);
ContextState right = ContextState.allocate(0, 1, 2);
right.writeRemote(CounterId.fromInt(4), 4L, 4L);
right.writeRemote(CounterId.fromInt(5), 5L, 5L);
right.writeLocal(CounterId.getLocalId(), 2L, 9L);
ByteBuffer merged = cc.merge(left.context, right.context);
int hd = 4;
assertEquals(hd + 5 * stepLength, merged.remaining());
// local node id's counts are aggregated
assertTrue(Util.equalsCounterId(CounterId.getLocalId(), merged, hd + 4 * stepLength));
assertEquals(9L, merged.getLong(merged.position() + hd + 4 * stepLength + idLength));
assertEquals(12L, merged.getLong(merged.position() + hd + 4*stepLength + idLength + clockLength));
// remote node id counts are reconciled (i.e. take max)
assertTrue(Util.equalsCounterId(CounterId.fromInt(4), merged, hd + 2 * stepLength));
assertEquals(6L, merged.getLong(merged.position() + hd + 2 * stepLength + idLength));
assertEquals( 3L, merged.getLong(merged.position() + hd + 2*stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(5), merged, hd + 3 * stepLength));
assertEquals(5L, merged.getLong(merged.position() + hd + 3 * stepLength + idLength));
assertEquals( 5L, merged.getLong(merged.position() + hd + 3*stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(2), merged, hd + stepLength));
assertEquals(2L, merged.getLong(merged.position() + hd + stepLength + idLength));
assertEquals( 2L, merged.getLong(merged.position() + hd + stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), merged, hd));
assertEquals( 1L, merged.getLong(merged.position() + hd + idLength));
assertEquals( 1L, merged.getLong(merged.position() + hd + idLength + clockLength));
//
// Test merging two exclusively global contexts
//
left = ContextState.allocate(3, 0, 0);
left.writeGlobal(CounterId.fromInt(1), 1L, 1L);
left.writeGlobal(CounterId.fromInt(2), 2L, 2L);
left.writeGlobal(CounterId.fromInt(3), 3L, 3L);
right = ContextState.allocate(3, 0, 0);
right.writeGlobal(CounterId.fromInt(3), 6L, 6L);
right.writeGlobal(CounterId.fromInt(4), 4L, 4L);
right.writeGlobal(CounterId.fromInt(5), 5L, 5L);
merged = cc.merge(left.context, right.context);
assertEquals(headerSizeLength + 5 * headerEltLength + 5 * stepLength, merged.remaining());
assertEquals(18L, cc.total(merged));
assertEquals(5, merged.getShort(merged.position()));
int headerLength = headerSizeLength + 5 * headerEltLength;
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), merged, headerLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + idLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(2), merged, headerLength + stepLength));
assertEquals(2L, merged.getLong(merged.position() + headerLength + stepLength + idLength));
assertEquals(2L, merged.getLong(merged.position() + headerLength + stepLength + idLength + clockLength));
// pick the global shard with the largest clock
assertTrue(Util.equalsCounterId(CounterId.fromInt(3), merged, headerLength + 2 * stepLength));
assertEquals(6L, merged.getLong(merged.position() + headerLength + 2 * stepLength + idLength));
assertEquals(6L, merged.getLong(merged.position() + headerLength + 2 * stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(4), merged, headerLength + 3 * stepLength));
assertEquals(4L, merged.getLong(merged.position() + headerLength + 3 * stepLength + idLength));
assertEquals(4L, merged.getLong(merged.position() + headerLength + 3 * stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(5), merged, headerLength + 4 * stepLength));
assertEquals(5L, merged.getLong(merged.position() + headerLength + 4 * stepLength + idLength));
assertEquals(5L, merged.getLong(merged.position() + headerLength + 4 * stepLength + idLength + clockLength));
//
// Test merging two global contexts w/ 'invalid shards'
//
left = ContextState.allocate(1, 0, 0);
left.writeGlobal(CounterId.fromInt(1), 10L, 20L);
right = ContextState.allocate(1, 0, 0);
right.writeGlobal(CounterId.fromInt(1), 10L, 30L);
merged = cc.merge(left.context, right.context);
headerLength = headerSizeLength + headerEltLength;
assertEquals(headerLength + stepLength, merged.remaining());
assertEquals(30L, cc.total(merged));
assertEquals(1, merged.getShort(merged.position()));
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), merged, headerLength));
assertEquals(10L, merged.getLong(merged.position() + headerLength + idLength));
// with equal clock, we should pick the largest value
assertEquals(30L, merged.getLong(merged.position() + headerLength + idLength + clockLength));
//
// Test merging global w/ mixed contexts
//
left = ContextState.allocate(2, 0, 0);
left.writeGlobal(CounterId.fromInt(1), 1L, 1L);
left.writeGlobal(CounterId.fromInt(2), 1L, 1L);
right = ContextState.allocate(0, 1, 1);
right.writeLocal(CounterId.fromInt(1), 100L, 100L);
right.writeRemote(CounterId.fromInt(2), 100L, 100L);
// global shards should dominate local/remote, even with lower clock and value
merged = cc.merge(left.context, right.context);
headerLength = headerSizeLength + 2 * headerEltLength;
assertEquals(headerLength + 2 * stepLength, merged.remaining());
assertEquals(2L, cc.total(merged));
assertEquals(2, merged.getShort(merged.position()));
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), merged, headerLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + idLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(2), merged, headerLength + stepLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + stepLength + idLength));
assertEquals(1L, merged.getLong(merged.position() + headerLength + stepLength + idLength + clockLength));
}
@Test
public void testTotal()
{
ContextState mixed = ContextState.allocate(0, 1, 4);
mixed.writeRemote(CounterId.fromInt(1), 1L, 1L);
mixed.writeRemote(CounterId.fromInt(2), 2L, 2L);
mixed.writeRemote(CounterId.fromInt(4), 4L, 4L);
mixed.writeRemote(CounterId.fromInt(5), 5L, 5L);
mixed.writeLocal(CounterId.getLocalId(), 12L, 12L);
assertEquals(24L, cc.total(mixed.context));
ContextState global = ContextState.allocate(3, 0, 0);
global.writeGlobal(CounterId.fromInt(1), 1L, 1L);
global.writeGlobal(CounterId.fromInt(2), 2L, 2L);
global.writeGlobal(CounterId.fromInt(3), 3L, 3L);
assertEquals(6L, cc.total(global.context));
}
@Test
public void testClearLocal()
{
ContextState state;
ByteBuffer marked;
ByteBuffer cleared;
// mark/clear for remote-only contexts is a no-op
state = ContextState.allocate(0, 0, 1);
state.writeRemote(CounterId.fromInt(1), 1L, 1L);
assertFalse(cc.shouldClearLocal(state.context));
marked = cc.markLocalToBeCleared(state.context);
assertEquals(0, marked.getShort(marked.position()));
assertSame(state.context, marked); // should return the original context
cleared = cc.clearAllLocal(marked);
assertSame(cleared, marked); // shouldn't alter anything either
// a single local shard
state = ContextState.allocate(0, 1, 0);
state.writeLocal(CounterId.fromInt(1), 1L, 1L);
assertFalse(cc.shouldClearLocal(state.context));
marked = cc.markLocalToBeCleared(state.context);
assertTrue(cc.shouldClearLocal(marked));
assertEquals(-1, marked.getShort(marked.position()));
assertNotSame(state.context, marked); // shouldn't alter in place, as it used to do
cleared = cc.clearAllLocal(marked);
assertFalse(cc.shouldClearLocal(cleared));
assertEquals(0, cleared.getShort(cleared.position()));
// 2 global + 1 local shard
state = ContextState.allocate(2, 1, 0);
state.writeLocal(CounterId.fromInt(1), 1L, 1L);
state.writeGlobal(CounterId.fromInt(2), 2L, 2L);
state.writeGlobal(CounterId.fromInt(3), 3L, 3L);
assertFalse(cc.shouldClearLocal(state.context));
marked = cc.markLocalToBeCleared(state.context);
assertTrue(cc.shouldClearLocal(marked));
assertEquals(-3, marked.getShort(marked.position()));
assertEquals(0, marked.getShort(marked.position() + headerSizeLength));
assertEquals(Short.MIN_VALUE + 1, marked.getShort(marked.position() + headerSizeLength + headerEltLength));
assertEquals(Short.MIN_VALUE + 2, marked.getShort(marked.position() + headerSizeLength + 2 * headerEltLength));
int headerLength = headerSizeLength + 3 * headerEltLength;
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), marked, headerLength));
assertEquals(1L, marked.getLong(marked.position() + headerLength + idLength));
assertEquals(1L, marked.getLong(marked.position() + headerLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(2), marked, headerLength + stepLength));
assertEquals(2L, marked.getLong(marked.position() + headerLength + stepLength + idLength));
assertEquals(2L, marked.getLong(marked.position() + headerLength + stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(3), marked, headerLength + 2 * stepLength));
assertEquals(3L, marked.getLong(marked.position() + headerLength + 2 * stepLength + idLength));
assertEquals(3L, marked.getLong(marked.position() + headerLength + 2 * stepLength + idLength + clockLength));
cleared = cc.clearAllLocal(marked);
assertFalse(cc.shouldClearLocal(cleared));
assertEquals(2, cleared.getShort(cleared.position())); // 2 global shards
assertEquals(Short.MIN_VALUE + 1, cleared.getShort(marked.position() + headerEltLength));
assertEquals(Short.MIN_VALUE + 2, cleared.getShort(marked.position() + headerSizeLength + headerEltLength));
headerLength = headerSizeLength + 2 * headerEltLength;
assertTrue(Util.equalsCounterId(CounterId.fromInt(1), cleared, headerLength));
assertEquals(1L, cleared.getLong(cleared.position() + headerLength + idLength));
assertEquals(1L, cleared.getLong(cleared.position() + headerLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(2), cleared, headerLength + stepLength));
assertEquals(2L, cleared.getLong(cleared.position() + headerLength + stepLength + idLength));
assertEquals(2L, cleared.getLong(cleared.position() + headerLength + stepLength + idLength + clockLength));
assertTrue(Util.equalsCounterId(CounterId.fromInt(3), cleared, headerLength + 2 * stepLength));
assertEquals(3L, cleared.getLong(cleared.position() + headerLength + 2 * stepLength + idLength));
assertEquals(3L, cleared.getLong(cleared.position() + headerLength + 2 * stepLength + idLength + clockLength));
// a single global shard - no-op
state = ContextState.allocate(1, 0, 0);
state.writeGlobal(CounterId.fromInt(1), 1L, 1L);
assertFalse(cc.shouldClearLocal(state.context));
marked = cc.markLocalToBeCleared(state.context);
assertEquals(1, marked.getShort(marked.position()));
assertSame(state.context, marked);
cleared = cc.clearAllLocal(marked);
assertSame(cleared, marked);
}
@Test
public void testFindPositionOf()
{
ContextState state = ContextState.allocate(3, 3, 3);
state.writeGlobal(CounterId.fromInt(1), 1L, 1L);
state.writeRemote(CounterId.fromInt(2), 2L, 2L);
state.writeLocal( CounterId.fromInt(3), 3L, 3L);
state.writeGlobal(CounterId.fromInt(4), 4L, 4L);
state.writeRemote(CounterId.fromInt(5), 5L, 5L);
state.writeLocal( CounterId.fromInt(6), 6L, 6L);
state.writeGlobal(CounterId.fromInt(7), 7L, 7L);
state.writeRemote(CounterId.fromInt(8), 8L, 8L);
state.writeLocal(CounterId.fromInt(9), 9L, 9L);
int headerLength = headerSizeLength + 6 * headerEltLength;
assertEquals(headerLength, cc.findPositionOf(state.context, CounterId.fromInt(1)));
assertEquals(headerLength + stepLength, cc.findPositionOf(state.context, CounterId.fromInt(2)));
assertEquals(headerLength + 2 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(3)));
assertEquals(headerLength + 3 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(4)));
assertEquals(headerLength + 4 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(5)));
assertEquals(headerLength + 5 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(6)));
assertEquals(headerLength + 6 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(7)));
assertEquals(headerLength + 7 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(8)));
assertEquals(headerLength + 8 * stepLength, cc.findPositionOf(state.context, CounterId.fromInt(9)));
assertEquals(-1, cc.findPositionOf(state.context, CounterId.fromInt(0)));
assertEquals(-1, cc.findPositionOf(state.context, CounterId.fromInt(10)));
assertEquals(-1, cc.findPositionOf(state.context, CounterId.fromInt(15)));
assertEquals(-1, cc.findPositionOf(state.context, CounterId.fromInt(20)));
}
@Test
public void testGetGlockAndCountOf()
{
ContextState state = ContextState.allocate(3, 3, 3);
state.writeGlobal(CounterId.fromInt(1), 1L, 1L);
state.writeRemote(CounterId.fromInt(2), 2L, 2L);
state.writeLocal( CounterId.fromInt(3), 3L, 3L);
state.writeGlobal(CounterId.fromInt(4), 4L, 4L);
state.writeRemote(CounterId.fromInt(5), 5L, 5L);
state.writeLocal( CounterId.fromInt(6), 6L, 6L);
state.writeGlobal(CounterId.fromInt(7), 7L, 7L);
state.writeRemote(CounterId.fromInt(8), 8L, 8L);
state.writeLocal(CounterId.fromInt(9), 9L, 9L);
assertEquals(ClockAndCount.create(1L, 1L), cc.getClockAndCountOf(state.context, CounterId.fromInt(1)));
assertEquals(ClockAndCount.create(2L, 2L), cc.getClockAndCountOf(state.context, CounterId.fromInt(2)));
assertEquals(ClockAndCount.create(3L, 3L), cc.getClockAndCountOf(state.context, CounterId.fromInt(3)));
assertEquals(ClockAndCount.create(4L, 4L), cc.getClockAndCountOf(state.context, CounterId.fromInt(4)));
assertEquals(ClockAndCount.create(5L, 5L), cc.getClockAndCountOf(state.context, CounterId.fromInt(5)));
assertEquals(ClockAndCount.create(6L, 6L), cc.getClockAndCountOf(state.context, CounterId.fromInt(6)));
assertEquals(ClockAndCount.create(7L, 7L), cc.getClockAndCountOf(state.context, CounterId.fromInt(7)));
assertEquals(ClockAndCount.create(8L, 8L), cc.getClockAndCountOf(state.context, CounterId.fromInt(8)));
assertEquals(ClockAndCount.create(9L, 9L), cc.getClockAndCountOf(state.context, CounterId.fromInt(9)));
assertEquals(ClockAndCount.create(0L, 0L), cc.getClockAndCountOf(state.context, CounterId.fromInt(0)));
assertEquals(ClockAndCount.create(0L, 0L), cc.getClockAndCountOf(state.context, CounterId.fromInt(10)));
assertEquals(ClockAndCount.create(0L, 0L), cc.getClockAndCountOf(state.context, CounterId.fromInt(15)));
assertEquals(ClockAndCount.create(0L, 0L), cc.getClockAndCountOf(state.context, CounterId.fromInt(20)));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.shared.renderkit.html;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.faces.application.ProjectStage;
import javax.faces.component.UIComponent;
import javax.faces.component.UIPanel;
import javax.faces.component.behavior.ClientBehavior;
import javax.faces.component.behavior.ClientBehaviorHolder;
import javax.faces.component.html.HtmlPanelGrid;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import org.apache.myfaces.shared.renderkit.JSFAttr;
import org.apache.myfaces.shared.renderkit.RendererUtils;
import org.apache.myfaces.shared.renderkit.html.util.ResourceUtils;
import org.apache.myfaces.shared.util.ArrayUtils;
import org.apache.myfaces.shared.util.StringUtils;
public class HtmlGridRendererBase
extends HtmlRenderer
{
//private static final Log log = LogFactory.getLog(HtmlGridRendererBase.class);
private static final Logger log = Logger.getLogger(HtmlGridRendererBase.class.getName());
private static final Integer[] ZERO_INT_ARRAY = new Integer[]{0};
public boolean getRendersChildren()
{
return true;
}
@Override
public void decode(FacesContext context, UIComponent component)
{
// Check for npe
super.decode(context, component);
HtmlRendererUtils.decodeClientBehaviors(context, component);
}
public void encodeBegin(FacesContext facesContext, UIComponent component)
throws IOException
{
// all work done in encodeEnd()
}
public void encodeChildren(FacesContext context, UIComponent component)
throws IOException
{
// all work done in encodeEnd()
}
public void encodeEnd(FacesContext facesContext, UIComponent component)
throws IOException
{
RendererUtils.checkParamValidity(facesContext, component, UIPanel.class);
int columns;
if (component instanceof HtmlPanelGrid)
{
columns = ((HtmlPanelGrid)component).getColumns();
}
else
{
Integer i = (Integer)component.getAttributes().get(
org.apache.myfaces.shared.renderkit.JSFAttr.COLUMNS_ATTR);
columns = i != null ? i.intValue() : 0;
}
if (columns <= 0)
{
if (log.isLoggable(Level.SEVERE))
{
log.severe("Wrong columns attribute for PanelGrid " +
component.getClientId(facesContext) + ": " + columns);
}
columns = 1;
}
ResponseWriter writer = facesContext.getResponseWriter();
Map<String, List<ClientBehavior>> behaviors = null;
if (component instanceof ClientBehaviorHolder)
{
behaviors = ((ClientBehaviorHolder) component).getClientBehaviors();
}
if (behaviors != null && !behaviors.isEmpty())
{
ResourceUtils.renderDefaultJsfJsInlineIfNecessary(facesContext, writer);
}
writer.startElement(HTML.TABLE_ELEM, component);
if (component instanceof ClientBehaviorHolder)
{
if (!behaviors.isEmpty())
{
HtmlRendererUtils.writeIdAndName(writer, component, facesContext);
}
else
{
HtmlRendererUtils.writeIdIfNecessary(writer, component, facesContext);
}
long commonPropertiesMarked = 0L;
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
commonPropertiesMarked = CommonPropertyUtils.getCommonPropertiesMarked(component);
}
if (behaviors.isEmpty() && isCommonPropertiesOptimizationEnabled(facesContext))
{
CommonPropertyUtils.renderEventProperties(writer,
commonPropertiesMarked, component);
}
else
{
if (isCommonEventsOptimizationEnabled(facesContext))
{
CommonEventUtils.renderBehaviorizedEventHandlers(facesContext, writer,
commonPropertiesMarked,
CommonEventUtils.getCommonEventsMarked(component), component, behaviors);
}
else
{
HtmlRendererUtils.renderBehaviorizedEventHandlers(facesContext, writer, component, behaviors);
}
}
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
HtmlRendererUtils.renderHTMLAttributes(writer, component, HTML.TABLE_ATTRIBUTES);
CommonPropertyUtils.renderCommonPassthroughPropertiesWithoutEvents(writer,
commonPropertiesMarked, component);
}
else
{
HtmlRendererUtils.renderHTMLAttributes(writer, component,
HTML.TABLE_PASSTHROUGH_ATTRIBUTES_WITHOUT_EVENTS);
}
}
else
{
HtmlRendererUtils.writeIdIfNecessary(writer, component, facesContext);
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
HtmlRendererUtils.renderHTMLAttributes(writer, component, HTML.TABLE_ATTRIBUTES);
CommonPropertyUtils.renderCommonPassthroughProperties(writer,
CommonPropertyUtils.getCommonPropertiesMarked(component), component);
}
else
{
HtmlRendererUtils.renderHTMLAttributes(writer, component, HTML.TABLE_PASSTHROUGH_ATTRIBUTES);
}
}
writer.flush();
HtmlRendererUtils.renderTableCaption(facesContext, writer, component);
// theader and tfooter are rendered before the tbody
renderHeaderOrFooter(facesContext, writer, component, columns, true); //Header facet
renderHeaderOrFooter(facesContext, writer, component, columns, false); //Footer facet
renderChildren(facesContext, writer, component, columns);
writer.endElement(HTML.TABLE_ELEM);
}
protected void renderHeaderOrFooter(FacesContext context,
ResponseWriter writer,
UIComponent component,
int columns,
boolean header)
throws IOException
{
UIComponent facet = component.getFacet(header ? "header" : "footer");
if (facet == null)
{
return;
}
writer.startElement(
header ? org.apache.myfaces.shared.renderkit.html.HTML.THEAD_ELEM : HTML.TFOOT_ELEM, null);
// component);
writer.startElement(HTML.TR_ELEM, null); // component);
writer.startElement(header ? HTML.TH_ELEM : HTML.TD_ELEM, null); // component);
String styleClass = (component instanceof HtmlPanelGrid)
? (header ?
((HtmlPanelGrid)component).getHeaderClass() :
((HtmlPanelGrid)component).getFooterClass())
: (header ?
(String)component.getAttributes().get(JSFAttr.HEADER_CLASS_ATTR) :
(String)component.getAttributes().get(
org.apache.myfaces.shared.renderkit.JSFAttr.FOOTER_CLASS_ATTR));
if (styleClass != null)
{
writer.writeAttribute(HTML.CLASS_ATTR, styleClass,
header ? JSFAttr.HEADER_CLASS_ATTR :
org.apache.myfaces.shared.renderkit.JSFAttr.FOOTER_CLASS_ATTR);
}
if (header)
{
writer.writeAttribute(HTML.SCOPE_ATTR, HTML.SCOPE_COLGROUP_VALUE, null);
}
writer.writeAttribute(HTML.COLSPAN_ATTR, Integer.toString(columns), null);
//RendererUtils.renderChild(context, facet);
facet.encodeAll(context);
writer.endElement(header ? HTML.TH_ELEM : HTML.TD_ELEM);
writer.endElement(HTML.TR_ELEM);
writer.endElement(header ? HTML.THEAD_ELEM : HTML.TFOOT_ELEM);
}
protected int childAttributes(FacesContext context,
ResponseWriter writer,
UIComponent component,
int columnIndex)
throws IOException
{
// subclasses can override this method to add attributes to the table cell <td> tag
return columnIndex;
}
protected void renderChildren(FacesContext context,
ResponseWriter writer,
UIComponent component,
int columns)
throws IOException
{
String columnClasses;
String rowClasses;
if (component instanceof HtmlPanelGrid)
{
columnClasses = ((HtmlPanelGrid)component).getColumnClasses();
rowClasses = ((HtmlPanelGrid)component).getRowClasses();
}
else
{
columnClasses = (String)component.getAttributes().get(
org.apache.myfaces.shared.renderkit.JSFAttr.COLUMN_CLASSES_ATTR);
rowClasses = (String)component.getAttributes().get(JSFAttr.ROW_CLASSES_ATTR);
}
String[] columnClassesArray = (columnClasses == null)
? ArrayUtils.EMPTY_STRING_ARRAY
: StringUtils.trim(StringUtils.splitShortString(columnClasses, ','));
int columnClassesCount = columnClassesArray.length;
String[] rowClassesArray = (rowClasses == null)
? org.apache.myfaces.shared.util.ArrayUtils.EMPTY_STRING_ARRAY
: StringUtils.trim(StringUtils.splitShortString(rowClasses, ','));
int rowClassesCount = rowClassesArray.length;
int childCount = getChildCount(component);
if (childCount > 0)
{
// get the row indizes for which a new TBODY element should be created
Integer[] bodyrows = null;
String bodyrowsAttr = (String) component.getAttributes().get(JSFAttr.BODYROWS_ATTR);
if(bodyrowsAttr != null && !"".equals(bodyrowsAttr))
{
String[] bodyrowsString = StringUtils.trim(StringUtils.splitShortString(bodyrowsAttr, ','));
// parsing with no exception handling, because of JSF-spec:
// "If present, this must be a comma separated list of integers."
bodyrows = new Integer[bodyrowsString.length];
for(int i = 0; i < bodyrowsString.length; i++)
{
bodyrows[i] = new Integer(bodyrowsString[i]);
}
}
else
{
bodyrows = ZERO_INT_ARRAY;
}
int bodyrowsCount = 0;
int rowIndex = -1;
int columnIndex = 0;
int rowClassIndex = 0;
boolean rowStarted = false;
for (int i = 0, size = component.getChildCount(); i < size; i++)
{
UIComponent child = component.getChildren().get(i);
if (child.isRendered())
{
if (columnIndex == 0)
{
rowIndex++;
if (rowStarted)
{
//do we have to close the last row?
writer.endElement(HTML.TR_ELEM);
}
// is the current row listed in the bodyrows attribute
if(ArrayUtils.contains(bodyrows, rowIndex))
{
// close any preopened TBODY element first
if(bodyrowsCount != 0)
{
writer.endElement(HTML.TBODY_ELEM);
}
writer.startElement(HTML.TBODY_ELEM, null); // component);
bodyrowsCount++;
}
//start of new/next row
writer.startElement(HTML.TR_ELEM, null); // component);
if (rowClassIndex < rowClassesCount)
{
writer.writeAttribute(HTML.CLASS_ATTR, rowClassesArray[rowClassIndex], null);
}
rowStarted = true;
rowClassIndex++;
if (rowClassIndex == rowClassesCount)
{
rowClassIndex = 0;
}
}
writer.startElement(HTML.TD_ELEM, null); // component);
if (columnIndex < columnClassesCount)
{
writer.writeAttribute(HTML.CLASS_ATTR, columnClassesArray[columnIndex], null);
}
columnIndex = childAttributes(context, writer, child, columnIndex);
//RendererUtils.renderChild(context, child);
child.encodeAll(context);
writer.endElement(HTML.TD_ELEM);
columnIndex++;
if (columnIndex >= columns)
{
columnIndex = 0;
}
}
}
if (rowStarted)
{
if (columnIndex > 0)
{
Level level = context.isProjectStage(ProjectStage.Production) ? Level.FINE : Level.WARNING;
if (log.isLoggable(level))
{
log.log(level, "PanelGrid " + RendererUtils.getPathToComponent(component)
+ " has not enough children. Child count should be a "
+ "multiple of the columns attribute.");
}
//Render empty columns, so that table is correct
for ( ; columnIndex < columns; columnIndex++)
{
writer.startElement(HTML.TD_ELEM, null); // component);
if (columnIndex < columnClassesCount)
{
writer.writeAttribute(HTML.CLASS_ATTR, columnClassesArray[columnIndex], null);
}
writer.endElement(HTML.TD_ELEM);
}
}
writer.endElement(HTML.TR_ELEM);
// close any preopened TBODY element first
if(bodyrowsCount != 0)
{
writer.endElement(HTML.TBODY_ELEM);
}
}
}
}
}
| |
package io.cisa.taxiiserver.repository;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Instant;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpSession;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.audit.AuditEvent;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpSession;
import org.springframework.security.web.authentication.WebAuthenticationDetails;
import org.springframework.test.context.junit4.SpringRunner;
import io.cisa.taxiiserver.JTaxiiServerApp;
import io.cisa.taxiiserver.config.Constants;
import io.cisa.taxiiserver.config.audit.AuditEventConverter;
import io.cisa.taxiiserver.domain.PersistentAuditEvent;
/**
* Test class for the CustomAuditEventRepository customAuditEventRepository class.
*
* @see CustomAuditEventRepository
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = JTaxiiServerApp.class)
public class CustomAuditEventRepositoryIntTest {
@Autowired
private PersistenceAuditEventRepository persistenceAuditEventRepository;
@Autowired
private AuditEventConverter auditEventConverter;
private CustomAuditEventRepository customAuditEventRepository;
private PersistentAuditEvent testUserEvent;
private PersistentAuditEvent testOtherUserEvent;
private PersistentAuditEvent testOldUserEvent;
@Before
public void setup() {
customAuditEventRepository = new CustomAuditEventRepository(persistenceAuditEventRepository, auditEventConverter);
persistenceAuditEventRepository.deleteAll();
Instant oneHourAgo = Instant.now().minusSeconds(3600);
testUserEvent = new PersistentAuditEvent();
testUserEvent.setPrincipal("test-user");
testUserEvent.setAuditEventType("test-type");
testUserEvent.setAuditEventDate(oneHourAgo);
Map<String, String> data = new HashMap<>();
data.put("test-key", "test-value");
testUserEvent.setData(data);
testOldUserEvent = new PersistentAuditEvent();
testOldUserEvent.setPrincipal("test-user");
testOldUserEvent.setAuditEventType("test-type");
testOldUserEvent.setAuditEventDate(oneHourAgo.minusSeconds(10000));
testOtherUserEvent = new PersistentAuditEvent();
testOtherUserEvent.setPrincipal("other-test-user");
testOtherUserEvent.setAuditEventType("test-type");
testOtherUserEvent.setAuditEventDate(oneHourAgo);
}
@Test
public void testFindAfter() {
persistenceAuditEventRepository.save(testUserEvent);
persistenceAuditEventRepository.save(testOldUserEvent);
List<AuditEvent> events =
customAuditEventRepository.find(Date.from(testUserEvent.getAuditEventDate().minusSeconds(3600)));
assertThat(events).hasSize(1);
AuditEvent event = events.get(0);
assertThat(event.getPrincipal()).isEqualTo(testUserEvent.getPrincipal());
assertThat(event.getType()).isEqualTo(testUserEvent.getAuditEventType());
assertThat(event.getData()).containsKey("test-key");
assertThat(event.getData().get("test-key").toString()).isEqualTo("test-value");
assertThat(event.getTimestamp()).isEqualTo(Date.from(testUserEvent.getAuditEventDate()));
}
@Test
public void testFindByPrincipal() {
persistenceAuditEventRepository.save(testUserEvent);
persistenceAuditEventRepository.save(testOldUserEvent);
persistenceAuditEventRepository.save(testOtherUserEvent);
List<AuditEvent> events = customAuditEventRepository
.find("test-user", Date.from(testUserEvent.getAuditEventDate().minusSeconds(3600)));
assertThat(events).hasSize(1);
AuditEvent event = events.get(0);
assertThat(event.getPrincipal()).isEqualTo(testUserEvent.getPrincipal());
assertThat(event.getType()).isEqualTo(testUserEvent.getAuditEventType());
assertThat(event.getData()).containsKey("test-key");
assertThat(event.getData().get("test-key").toString()).isEqualTo("test-value");
assertThat(event.getTimestamp()).isEqualTo(Date.from(testUserEvent.getAuditEventDate()));
}
@Test
public void testFindByPrincipalNotNullAndAfterIsNull() {
persistenceAuditEventRepository.save(testUserEvent);
persistenceAuditEventRepository.save(testOtherUserEvent);
List<AuditEvent> events = customAuditEventRepository.find("test-user", null);
assertThat(events).hasSize(1);
assertThat(events.get(0).getPrincipal()).isEqualTo("test-user");
}
@Test
public void testFindByPrincipalIsNullAndAfterIsNull() {
persistenceAuditEventRepository.save(testUserEvent);
persistenceAuditEventRepository.save(testOtherUserEvent);
List<AuditEvent> events = customAuditEventRepository.find(null, null);
assertThat(events).hasSize(2);
assertThat(events).extracting("principal")
.containsExactlyInAnyOrder("test-user", "other-test-user");
}
@Test
public void findByPrincipalAndType() {
persistenceAuditEventRepository.save(testUserEvent);
persistenceAuditEventRepository.save(testOldUserEvent);
testOtherUserEvent.setAuditEventType(testUserEvent.getAuditEventType());
persistenceAuditEventRepository.save(testOtherUserEvent);
PersistentAuditEvent testUserOtherTypeEvent = new PersistentAuditEvent();
testUserOtherTypeEvent.setPrincipal(testUserEvent.getPrincipal());
testUserOtherTypeEvent.setAuditEventType("test-other-type");
testUserOtherTypeEvent.setAuditEventDate(testUserEvent.getAuditEventDate());
persistenceAuditEventRepository.save(testUserOtherTypeEvent);
List<AuditEvent> events = customAuditEventRepository.find("test-user",
Date.from(testUserEvent.getAuditEventDate().minusSeconds(3600)), "test-type");
assertThat(events).hasSize(1);
AuditEvent event = events.get(0);
assertThat(event.getPrincipal()).isEqualTo(testUserEvent.getPrincipal());
assertThat(event.getType()).isEqualTo(testUserEvent.getAuditEventType());
assertThat(event.getData()).containsKey("test-key");
assertThat(event.getData().get("test-key").toString()).isEqualTo("test-value");
assertThat(event.getTimestamp()).isEqualTo(Date.from(testUserEvent.getAuditEventDate()));
}
@Test
public void addAuditEvent() {
Map<String, Object> data = new HashMap<>();
data.put("test-key", "test-value");
AuditEvent event = new AuditEvent("test-user", "test-type", data);
customAuditEventRepository.add(event);
List<PersistentAuditEvent> persistentAuditEvents = persistenceAuditEventRepository.findAll();
assertThat(persistentAuditEvents).hasSize(1);
PersistentAuditEvent persistentAuditEvent = persistentAuditEvents.get(0);
assertThat(persistentAuditEvent.getPrincipal()).isEqualTo(event.getPrincipal());
assertThat(persistentAuditEvent.getAuditEventType()).isEqualTo(event.getType());
assertThat(persistentAuditEvent.getData()).containsKey("test-key");
assertThat(persistentAuditEvent.getData().get("test-key")).isEqualTo("test-value");
assertThat(persistentAuditEvent.getAuditEventDate()).isEqualTo(event.getTimestamp().toInstant());
}
@Test
public void testAddEventWithWebAuthenticationDetails() {
HttpSession session = new MockHttpSession(null, "test-session-id");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setSession(session);
request.setRemoteAddr("1.2.3.4");
WebAuthenticationDetails details = new WebAuthenticationDetails(request);
Map<String, Object> data = new HashMap<>();
data.put("test-key", details);
AuditEvent event = new AuditEvent("test-user", "test-type", data);
customAuditEventRepository.add(event);
List<PersistentAuditEvent> persistentAuditEvents = persistenceAuditEventRepository.findAll();
assertThat(persistentAuditEvents).hasSize(1);
PersistentAuditEvent persistentAuditEvent = persistentAuditEvents.get(0);
assertThat(persistentAuditEvent.getData().get("remoteAddress")).isEqualTo("1.2.3.4");
assertThat(persistentAuditEvent.getData().get("sessionId")).isEqualTo("test-session-id");
}
@Test
public void testAddEventWithNullData() {
Map<String, Object> data = new HashMap<>();
data.put("test-key", null);
AuditEvent event = new AuditEvent("test-user", "test-type", data);
customAuditEventRepository.add(event);
List<PersistentAuditEvent> persistentAuditEvents = persistenceAuditEventRepository.findAll();
assertThat(persistentAuditEvents).hasSize(1);
PersistentAuditEvent persistentAuditEvent = persistentAuditEvents.get(0);
assertThat(persistentAuditEvent.getData().get("test-key")).isEqualTo("null");
}
@Test
public void addAuditEventWithAnonymousUser() {
Map<String, Object> data = new HashMap<>();
data.put("test-key", "test-value");
AuditEvent event = new AuditEvent(Constants.ANONYMOUS_USER, "test-type", data);
customAuditEventRepository.add(event);
List<PersistentAuditEvent> persistentAuditEvents = persistenceAuditEventRepository.findAll();
assertThat(persistentAuditEvents).hasSize(0);
}
@Test
public void addAuditEventWithAuthorizationFailureType() {
Map<String, Object> data = new HashMap<>();
data.put("test-key", "test-value");
AuditEvent event = new AuditEvent("test-user", "AUTHORIZATION_FAILURE", data);
customAuditEventRepository.add(event);
List<PersistentAuditEvent> persistentAuditEvents = persistenceAuditEventRepository.findAll();
assertThat(persistentAuditEvents).hasSize(0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.websocket;
import java.io.IOException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.websocket.CloseReason;
import javax.websocket.CloseReason.CloseCode;
import javax.websocket.CloseReason.CloseCodes;
import javax.websocket.DeploymentException;
import javax.websocket.Endpoint;
import javax.websocket.EndpointConfig;
import javax.websocket.Extension;
import javax.websocket.MessageHandler;
import javax.websocket.MessageHandler.Partial;
import javax.websocket.MessageHandler.Whole;
import javax.websocket.PongMessage;
import javax.websocket.RemoteEndpoint;
import javax.websocket.SendResult;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.res.StringManager;
public class WsSession implements Session {
// An ellipsis is a single character that looks like three periods in a row
// and is used to indicate a continuation.
private static final byte[] ELLIPSIS_BYTES =
"\u2026".getBytes(StandardCharsets.UTF_8);
// An ellipsis is three bytes in UTF-8
private static final int ELLIPSIS_BYTES_LEN = ELLIPSIS_BYTES.length;
private static final StringManager sm =
StringManager.getManager(Constants.PACKAGE_NAME);
private static AtomicLong ids = new AtomicLong(0);
private final Log log = LogFactory.getLog(WsSession.class);
private final Endpoint localEndpoint;
private final WsRemoteEndpointImplBase wsRemoteEndpoint;
private final RemoteEndpoint.Async remoteEndpointAsync;
private final RemoteEndpoint.Basic remoteEndpointBasic;
private final ClassLoader applicationClassLoader;
private final WsWebSocketContainer webSocketContainer;
private final URI requestUri;
private final Map<String,List<String>> requestParameterMap;
private final String queryString;
private final Principal userPrincipal;
private final EndpointConfig endpointConfig;
private final List<Extension> negotiatedExtensions;
private final String subProtocol;
private final Map<String,String> pathParameters;
private final boolean secure;
private final String httpSessionId;
private final String id;
// Expected to handle message types of <String> only
private MessageHandler textMessageHandler = null;
// Expected to handle message types of <ByteBuffer> only
private MessageHandler binaryMessageHandler = null;
private MessageHandler.Whole<PongMessage> pongMessageHandler = null;
private volatile State state = State.OPEN;
private final Object stateLock = new Object();
private final Map<String,Object> userProperties = new ConcurrentHashMap<String, Object>();
private volatile int maxBinaryMessageBufferSize =
Constants.DEFAULT_BUFFER_SIZE;
private volatile int maxTextMessageBufferSize =
Constants.DEFAULT_BUFFER_SIZE;
private volatile long maxIdleTimeout = 0;
private volatile long lastActive = System.currentTimeMillis();
private Map<FutureToSendHandler,FutureToSendHandler> futures =
new ConcurrentHashMap<FutureToSendHandler,FutureToSendHandler>();
/**
* Creates a new WebSocket session for communication between the two
* provided end points. The result of {@link Thread#getContextClassLoader()}
* at the time this constructor is called will be used when calling
* {@link Endpoint#onClose(Session, CloseReason)}.
*
* @param localEndpoint
* @param wsRemoteEndpoint
* @param negotiatedExtensions
* @throws DeploymentException
*/
public WsSession(Endpoint localEndpoint,
WsRemoteEndpointImplBase wsRemoteEndpoint,
WsWebSocketContainer wsWebSocketContainer,
URI requestUri, Map<String,List<String>> requestParameterMap,
String queryString, Principal userPrincipal, String httpSessionId,
List<Extension> negotiatedExtensions, String subProtocol, Map<String,String> pathParameters,
boolean secure, EndpointConfig endpointConfig) throws DeploymentException {
this.localEndpoint = localEndpoint;
this.wsRemoteEndpoint = wsRemoteEndpoint;
this.wsRemoteEndpoint.setSession(this);
this.remoteEndpointAsync = new WsRemoteEndpointAsync(wsRemoteEndpoint);
this.remoteEndpointBasic = new WsRemoteEndpointBasic(wsRemoteEndpoint);
this.webSocketContainer = wsWebSocketContainer;
applicationClassLoader = Thread.currentThread().getContextClassLoader();
wsRemoteEndpoint.setSendTimeout(
wsWebSocketContainer.getDefaultAsyncSendTimeout());
this.maxBinaryMessageBufferSize =
webSocketContainer.getDefaultMaxBinaryMessageBufferSize();
this.maxTextMessageBufferSize =
webSocketContainer.getDefaultMaxTextMessageBufferSize();
this.maxIdleTimeout =
webSocketContainer.getDefaultMaxSessionIdleTimeout();
this.requestUri = requestUri;
if (requestParameterMap == null) {
this.requestParameterMap = Collections.emptyMap();
} else {
this.requestParameterMap = requestParameterMap;
}
this.queryString = queryString;
this.userPrincipal = userPrincipal;
this.httpSessionId = httpSessionId;
this.negotiatedExtensions = negotiatedExtensions;
if (subProtocol == null) {
this.subProtocol = "";
} else {
this.subProtocol = subProtocol;
}
this.pathParameters = pathParameters;
this.secure = secure;
this.wsRemoteEndpoint.setEncoders(endpointConfig);
this.endpointConfig = endpointConfig;
this.userProperties.putAll(endpointConfig.getUserProperties());
this.id = Long.toHexString(ids.getAndIncrement());
}
@Override
public WebSocketContainer getContainer() {
checkState();
return webSocketContainer;
}
@Override
public void addMessageHandler(MessageHandler listener) {
Class<?> target = Util.getMessageType(listener);
doAddMessageHandler(target, listener);
}
@Override
public <T> void addMessageHandler(Class<T> clazz, Partial<T> handler)
throws IllegalStateException {
doAddMessageHandler(clazz, handler);
}
@Override
public <T> void addMessageHandler(Class<T> clazz, Whole<T> handler)
throws IllegalStateException {
doAddMessageHandler(clazz, handler);
}
@SuppressWarnings("unchecked")
private void doAddMessageHandler(Class<?> target, MessageHandler listener) {
checkState();
// Message handlers that require decoders may map to text messages,
// binary messages, both or neither.
// The frame processing code expects binary message handlers to
// accept ByteBuffer
// Use the POJO message handler wrappers as they are designed to wrap
// arbitrary objects with MessageHandlers and can wrap MessageHandlers
// just as easily.
Set<MessageHandlerResult> mhResults =
Util.getMessageHandlers(target, listener, endpointConfig, this);
for (MessageHandlerResult mhResult : mhResults) {
switch (mhResult.getType()) {
case TEXT: {
if (textMessageHandler != null) {
throw new IllegalStateException(
sm.getString("wsSession.duplicateHandlerText"));
}
textMessageHandler = mhResult.getHandler();
break;
}
case BINARY: {
if (binaryMessageHandler != null) {
throw new IllegalStateException(
sm.getString("wsSession.duplicateHandlerBinary"));
}
binaryMessageHandler = mhResult.getHandler();
break;
}
case PONG: {
if (pongMessageHandler != null) {
throw new IllegalStateException(
sm.getString("wsSession.duplicateHandlerPong"));
}
MessageHandler handler = mhResult.getHandler();
if (handler instanceof MessageHandler.Whole<?>) {
pongMessageHandler =
(MessageHandler.Whole<PongMessage>) handler;
} else {
throw new IllegalStateException(
sm.getString("wsSession.invalidHandlerTypePong"));
}
break;
}
default: {
throw new IllegalArgumentException(sm.getString(
"wsSession.unknownHandlerType", listener,
mhResult.getType()));
}
}
}
}
@Override
public Set<MessageHandler> getMessageHandlers() {
checkState();
Set<MessageHandler> result = new HashSet<MessageHandler>();
if (binaryMessageHandler != null) {
result.add(binaryMessageHandler);
}
if (textMessageHandler != null) {
result.add(textMessageHandler);
}
if (pongMessageHandler != null) {
result.add(pongMessageHandler);
}
return result;
}
@Override
public void removeMessageHandler(MessageHandler listener) {
checkState();
if (listener == null) {
return;
}
MessageHandler wrapped = null;
if (listener instanceof WrappedMessageHandler) {
wrapped = ((WrappedMessageHandler) listener).getWrappedHandler();
}
if (wrapped == null) {
wrapped = listener;
}
boolean removed = false;
if (wrapped.equals(textMessageHandler) ||
listener.equals(textMessageHandler)) {
textMessageHandler = null;
removed = true;
}
if (listener.equals(binaryMessageHandler) ||
listener.equals(binaryMessageHandler)) {
binaryMessageHandler = null;
removed = true;
}
if (listener.equals(pongMessageHandler) ||
listener.equals(pongMessageHandler)) {
pongMessageHandler = null;
removed = true;
}
if (!removed) {
// ISE for now. Could swallow this silently / log this if the ISE
// becomes a problem
throw new IllegalStateException(
sm.getString("wsSession.removeHandlerFailed", listener));
}
}
@Override
public String getProtocolVersion() {
checkState();
return Constants.WS_VERSION_HEADER_VALUE;
}
@Override
public String getNegotiatedSubprotocol() {
checkState();
return subProtocol;
}
@Override
public List<Extension> getNegotiatedExtensions() {
checkState();
return negotiatedExtensions;
}
@Override
public boolean isSecure() {
checkState();
return secure;
}
@Override
public boolean isOpen() {
return state == State.OPEN;
}
@Override
public long getMaxIdleTimeout() {
checkState();
return maxIdleTimeout;
}
@Override
public void setMaxIdleTimeout(long timeout) {
checkState();
this.maxIdleTimeout = timeout;
}
@Override
public void setMaxBinaryMessageBufferSize(int max) {
checkState();
this.maxBinaryMessageBufferSize = max;
}
@Override
public int getMaxBinaryMessageBufferSize() {
checkState();
return maxBinaryMessageBufferSize;
}
@Override
public void setMaxTextMessageBufferSize(int max) {
checkState();
this.maxTextMessageBufferSize = max;
}
@Override
public int getMaxTextMessageBufferSize() {
checkState();
return maxTextMessageBufferSize;
}
@Override
public Set<Session> getOpenSessions() {
checkState();
return webSocketContainer.getOpenSessions(localEndpoint.getClass());
}
@Override
public RemoteEndpoint.Async getAsyncRemote() {
checkState();
return remoteEndpointAsync;
}
@Override
public RemoteEndpoint.Basic getBasicRemote() {
checkState();
return remoteEndpointBasic;
}
@Override
public void close() throws IOException {
close(new CloseReason(CloseCodes.NORMAL_CLOSURE, ""));
}
@Override
public void close(CloseReason closeReason) throws IOException {
doClose(closeReason, closeReason);
}
/**
* WebSocket 1.0. Section 2.1.5.
* Need internal close method as spec requires that the local endpoint
* receives a 1006 on timeout.
*/
private void doClose(CloseReason closeReasonMessage,
CloseReason closeReasonLocal) {
// Double-checked locking. OK because state is volatile
if (state != State.OPEN) {
return;
}
synchronized (stateLock) {
if (state != State.OPEN) {
return;
}
state = State.CLOSING;
sendCloseMessage(closeReasonMessage);
fireEndpointOnClose(closeReasonLocal);
state = State.CLOSED;
}
IOException ioe = new IOException(sm.getString("wsSession.messageFailed"));
SendResult sr = new SendResult(ioe);
for (FutureToSendHandler f2sh : futures.keySet()) {
f2sh.onResult(sr);
}
}
/**
* Called when a close message is received. Should only ever happen once.
* Also called after a protocol error when the ProtocolHandler needs to
* force the closing of the connection.
*/
public void onClose(CloseReason closeReason) {
synchronized (stateLock) {
if (state == State.OPEN) {
sendCloseMessage(closeReason);
fireEndpointOnClose(closeReason);
state = State.CLOSED;
}
// Close the socket
wsRemoteEndpoint.close();
}
}
private void fireEndpointOnClose(CloseReason closeReason) {
// Fire the onClose event
Thread t = Thread.currentThread();
ClassLoader cl = t.getContextClassLoader();
t.setContextClassLoader(applicationClassLoader);
try {
localEndpoint.onClose(this, closeReason);
} catch (Throwable throwable) {
ExceptionUtils.handleThrowable(throwable);
localEndpoint.onError(this, throwable);
} finally {
t.setContextClassLoader(cl);
}
}
private void sendCloseMessage(CloseReason closeReason) {
// 125 is maximum size for the payload of a control message
ByteBuffer msg = ByteBuffer.allocate(125);
CloseCode closeCode = closeReason.getCloseCode();
// CLOSED_ABNORMALLY should not be put on the wire
if (closeCode == CloseCodes.CLOSED_ABNORMALLY) {
// PROTOCOL_ERROR is probably better than GOING_AWAY here
msg.putShort((short) CloseCodes.PROTOCOL_ERROR.getCode());
} else {
msg.putShort((short) closeCode.getCode());
}
String reason = closeReason.getReasonPhrase();
if (reason != null && reason.length() > 0) {
appendCloseReasonWithTruncation(msg, reason);
}
msg.flip();
try {
wsRemoteEndpoint.startMessageBlock(
Constants.OPCODE_CLOSE, msg, true);
} catch (IOException ioe) {
// Failed to send close message. Close the socket and let the caller
// deal with the Exception
if (log.isDebugEnabled()) {
log.debug(sm.getString("wsSession.sendCloseFail"), ioe);
}
wsRemoteEndpoint.close();
// Failure to send a close message is not unexpected in the case of
// an abnormal closure (usually triggered by a failure to read/write
// from/to the client. In this case do not trigger the endpoint's
// error handling
if (closeCode != CloseCodes.CLOSED_ABNORMALLY) {
localEndpoint.onError(this, ioe);
}
} finally {
webSocketContainer.unregisterSession(localEndpoint, this);
}
}
/**
* Use protected so unit tests can access this method directly.
*/
protected static void appendCloseReasonWithTruncation(ByteBuffer msg,
String reason) {
// Once the close code has been added there are a maximum of 123 bytes
// left for the reason phrase. If it is truncated then care needs to be
// taken to ensure the bytes are not truncated in the middle of a
// multi-byte UTF-8 character.
byte[] reasonBytes = reason.getBytes(StandardCharsets.UTF_8);
if (reasonBytes.length <= 123) {
// No need to truncate
msg.put(reasonBytes);
} else {
// Need to truncate
int remaining = 123 - ELLIPSIS_BYTES_LEN;
int pos = 0;
byte[] bytesNext = reason.substring(pos, pos + 1).getBytes(
StandardCharsets.UTF_8);
while (remaining >= bytesNext.length) {
msg.put(bytesNext);
remaining -= bytesNext.length;
pos++;
bytesNext = reason.substring(pos, pos + 1).getBytes(
StandardCharsets.UTF_8);
}
msg.put(ELLIPSIS_BYTES);
}
}
/**
* Make the session aware of a {@link FutureToSendHandler} that will need to
* be forcibly closed if the session closes before the
* {@link FutureToSendHandler} completes.
*/
protected void registerFuture(FutureToSendHandler f2sh) {
futures.put(f2sh, f2sh);
}
/**
* Remove a {@link FutureToSendHandler} from the set of tracked instances.
*/
protected void unregisterFuture(FutureToSendHandler f2sh) {
futures.remove(f2sh);
}
@Override
public URI getRequestURI() {
checkState();
return requestUri;
}
@Override
public Map<String,List<String>> getRequestParameterMap() {
checkState();
return requestParameterMap;
}
@Override
public String getQueryString() {
checkState();
return queryString;
}
@Override
public Principal getUserPrincipal() {
checkState();
return userPrincipal;
}
@Override
public Map<String,String> getPathParameters() {
checkState();
return pathParameters;
}
@Override
public String getId() {
return id;
}
@Override
public Map<String,Object> getUserProperties() {
checkState();
return userProperties;
}
public Endpoint getLocal() {
return localEndpoint;
}
public String getHttpSessionId() {
return httpSessionId;
}
protected MessageHandler getTextMessageHandler() {
return textMessageHandler;
}
protected MessageHandler getBinaryMessageHandler() {
return binaryMessageHandler;
}
protected MessageHandler.Whole<PongMessage> getPongMessageHandler() {
return pongMessageHandler;
}
protected void updateLastActive() {
lastActive = System.currentTimeMillis();
}
protected void checkExpiration() {
long timeout = maxIdleTimeout;
if (timeout < 1) {
return;
}
if (System.currentTimeMillis() - lastActive > timeout) {
String msg = sm.getString("wsSession.timeout");
doClose(new CloseReason(CloseCodes.GOING_AWAY, msg),
new CloseReason(CloseCodes.CLOSED_ABNORMALLY, msg));
}
}
private void checkState() {
if (state == State.CLOSED) {
throw new IllegalStateException(sm.getString("wsSession.closed", id));
}
}
private static enum State {
OPEN,
CLOSING,
CLOSED
}
}
| |
/**
* Copyright (C) 2014 Telenor Digital AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comoyo.commons.logging.context;
import java.io.UnsupportedEncodingException;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.logging.ErrorManager;
import java.util.logging.Filter;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/**
* Wrapper class that will enrich log messages passing through wrapped
* {@link Handler}s with context information available through the
* {@link LoggingContext} interface.
*
*/
public class ContextAddingHandler
extends Handler
{
private final Handler wrapped;
private ContextAddingHandler(final Handler wrapped)
{
this.wrapped = wrapped;
}
/**
* Enrich messages passing through a single handler with context information.
*
* @param handler the underlying logging handler
* @return a version of the given handler that also logs context information
*/
public static Handler wrapHandler(final Handler handler)
{
return new ContextAddingHandler(handler);
}
/**
* Enrich messages passing through all {@link Handler}s of a given
* logger with context information. This modifies the set of
* handlers for the given logger. Handlers added after this
* function has been called are not affected. Calling this
* function multiple times is supported, and will not affect
* already modified handlers in this logger's handler set.
*
* @param logger the logger to modify
*/
public static void wrapAllHandlers(final Logger logger)
{
final Handler[] handlers = logger.getHandlers();
for (final Handler handler : handlers) {
if (!(handler instanceof ContextAddingHandler)) {
logger.removeHandler(handler);
logger.addHandler(wrapHandler(handler));
}
}
}
private static String escapeString(final String string)
{
return string.replace("\\", "\\\\").replace("\n", "\\n").replace("\"", "\\\"");
}
private static LogRecord addContextToRecord(final LogRecord original)
{
final Map<String, String> context
= original.getThrown() == null
? LoggingContext.getContext()
: LoggingContext.getLastEnteredContext();
if (context == null) {
return original;
}
final ResourceBundle bundle = original.getResourceBundle();
final String message = original.getMessage();
String localized = message;
if (message == null) {
localized = "";
}
else {
if (bundle != null) {
try {
localized = bundle.getString(message);
}
catch (MissingResourceException e) {
localized = message;
}
}
else {
localized = message;
}
}
final StringBuilder sb = new StringBuilder(localized);
sb.append(" | context: {");
boolean first = true;
for (Map.Entry<String, String> entry : context.entrySet()) {
if (!first) {
sb.append(", ");
}
sb.append("\"")
.append(escapeString(entry.getKey()))
.append("\": \"")
.append(escapeString(entry.getValue()))
.append("\"");
first = false;
}
sb.append("}");
final LogRecord record = new LogRecord(original.getLevel(), sb.toString());
record.setLevel(original.getLevel());
record.setLoggerName(original.getLoggerName());
record.setMillis(original.getMillis());
record.setParameters(original.getParameters());
record.setSequenceNumber(original.getSequenceNumber());
record.setSourceClassName(original.getSourceClassName());
record.setSourceMethodName(original.getSourceMethodName());
record.setThreadID(original.getThreadID());
record.setThrown(original.getThrown());
return record;
}
@Override
public void publish(final LogRecord record)
{
wrapped.publish(addContextToRecord(record));
}
@Override
public void close()
{
wrapped.close();
}
@Override
public void flush()
{
wrapped.flush();
}
@Override
public String getEncoding()
{
return wrapped.getEncoding();
}
@Override
public ErrorManager getErrorManager()
{
return wrapped.getErrorManager();
}
@Override
public Filter getFilter()
{
return wrapped.getFilter();
}
@Override
public Formatter getFormatter()
{
return wrapped.getFormatter();
}
@Override
public Level getLevel()
{
return wrapped.getLevel();
}
@Override
public boolean isLoggable(final LogRecord record)
{
return wrapped.isLoggable(record);
}
@Override
public void setEncoding(final String encoding)
throws UnsupportedEncodingException
{
wrapped.setEncoding(encoding);
}
@Override
public void setErrorManager(final ErrorManager em)
{
wrapped.setErrorManager(em);
}
@Override
public void setFilter(final Filter newFilter)
{
wrapped.setFilter(newFilter);
}
@Override
public void setFormatter(final Formatter newFormatter)
{
wrapped.setFormatter(newFormatter);
}
@Override
public void setLevel(final Level newLevel)
{
wrapped.setLevel(newLevel);
}
}
| |
/*
* Copyright 2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.richclient.beans;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.springframework.beans.BeansException;
import org.springframework.beans.InvalidPropertyException;
import org.springframework.beans.NotReadablePropertyException;
import org.springframework.beans.NotWritablePropertyException;
import org.springframework.beans.NullValueInNestedPathException;
import org.springframework.beans.TypeMismatchException;
import org.springframework.core.JdkVersion;
import org.springframework.core.MethodParameter;
import org.springframework.richclient.util.ReflectionUtils;
/**
* This class implements actual access to properties for
* <tt>AbstractNestedMemberPropertyAccessor</tt>.
*
* @author Arne Limburg
*/
public class DefaultMemberPropertyAccessor extends AbstractNestedMemberPropertyAccessor {
private Object target;
private boolean fixedTargetClass;
public DefaultMemberPropertyAccessor(Class targetClass) {
this(targetClass, null, false, false);
}
public DefaultMemberPropertyAccessor(Object target) {
this(target, false, true);
}
public DefaultMemberPropertyAccessor(Object target, boolean fieldAccessEnabled, boolean strictNullHandlingEnabled) {
super(target.getClass(), fieldAccessEnabled, strictNullHandlingEnabled);
setTarget(target);
}
public DefaultMemberPropertyAccessor(Class targetClass, Object target, boolean fieldAccessEnabled, boolean strictNullHandlingEnabled) {
super(targetClass, fieldAccessEnabled, strictNullHandlingEnabled);
fixedTargetClass = true;
setTarget(target);
}
protected DefaultMemberPropertyAccessor(AbstractNestedMemberPropertyAccessor parent, String baseProperty) {
super(parent, baseProperty);
}
public Object getTarget() {
if (target != null) {
return target;
}
else {
return super.getTarget();
}
}
public void setTarget(Object target) {
if (getParentPropertyAccessor() != null) {
throw new IllegalStateException("explicite setting of target is not allowed for child property accessors");
}
this.target = target;
if (!fixedTargetClass && target != null && target.getClass() != getTargetClass()) {
setTargetClass(target.getClass());
clearChildPropertyAccessorCache();
}
}
public Object getIndexedPropertyValue(String propertyName) throws BeansException {
if (getPropertyType(propertyName) == null) {
throw new NotReadablePropertyException(getTargetClass(), propertyName,
"property type could not be determined");
}
String rootPropertyName = getRootPropertyName(propertyName);
Member readAccessor = getReadPropertyAccessor(rootPropertyName);
if (readAccessor == null) {
throw new NotReadablePropertyException(getTargetClass(), propertyName,
"Neither non-static field nor get-method exists for indexed property");
}
Object rootProperty = getPropertyValue(rootPropertyName);
if (rootProperty == null) {
if (isStrictNullHandlingEnabled()) {
throw new NullValueInNestedPathException(getTargetClass(), propertyName);
}
else if (isWritableProperty(rootPropertyName)) {
return null;
}
else {
throw new NotReadablePropertyException(getTargetClass(), propertyName);
}
}
Object[] indices;
try {
indices = getIndices(propertyName);
}
catch (Exception e) {
// could not convert indices
throw createNotReadablePropertyException(propertyName, e);
}
return getPropertyValue(rootProperty, indices);
}
public Object getSimplePropertyValue(String propertyName) throws BeansException {
Member readAccessor = getReadPropertyAccessor(propertyName);
if (readAccessor == null) {
throw new NotReadablePropertyException(getTargetClass(), propertyName,
"Neither non-static field nor get-method does exist");
}
Object target = getTarget();
if (target == null) {
return null;
}
try {
ReflectionUtils.makeAccessible(readAccessor);
if (readAccessor instanceof Field) {
return ((Field) readAccessor).get(target);
}
else {// readAccessor instanceof Method
return ((Method) readAccessor).invoke(target, null);
}
}
catch (IllegalAccessException e) {
throw new InvalidPropertyException(getTargetClass(), propertyName, "Property is not accessible", e);
}
catch (InvocationTargetException e) {
ReflectionUtils.handleInvocationTargetException(e);
throw new IllegalStateException(
"An unexpected state occured during getSimplePropertyValue(String). This may be a bug.");
}
}
private Object getPropertyValue(Object assemblage, Object[] indices) {
return getPropertyValue(assemblage, indices, 0);
}
private Object getPropertyValue(Object assemblage, Object[] indices, int parameterIndex) {
if (assemblage == null) {
if (isStrictNullHandlingEnabled()) {
throw new NullValueInNestedPathException(getTargetClass(), "");
}
else {
return null;
}
}
Object value = null;
if (assemblage.getClass().isArray()) {
value = getArrayValue(assemblage, (Integer) indices[parameterIndex]);
}
else if (assemblage instanceof List) {
value = getListValue((List) assemblage, (Integer) indices[parameterIndex]);
}
else if (assemblage instanceof Map) {
value = getMapValue((Map) assemblage, indices[parameterIndex]);
}
else if (assemblage instanceof Collection) {
value = getCollectionValue((Collection) assemblage, (Integer) indices[parameterIndex]);
}
else {
throw new IllegalStateException(
"getPropertyValue(Object, Object[], int) called with neither array nor collection nor map");
}
if (parameterIndex == indices.length - 1) {
return value;
}
if (value == null) {
if (isStrictNullHandlingEnabled()) {
throw new InvalidPropertyException(getTargetClass(), "", "");
}
else {
return null;
}
}
return getPropertyValue(value, indices, parameterIndex + 1);
}
private Object getArrayValue(Object array, Integer index) {
if (Array.getLength(array) > index.intValue()) {
return Array.get(array, index.intValue());
}
else if (isStrictNullHandlingEnabled()) {
throw new InvalidPropertyException(getTargetClass(), "", "");
}
else {
return null;
}
}
private Object getListValue(List list, Integer index) {
if (list.size() > index.intValue()) {
return list.get(index.intValue());
}
else if (isStrictNullHandlingEnabled()) {
throw new InvalidPropertyException(getTargetClass(), "", "");
}
else {
return null;
}
}
private Object getMapValue(Map map, Object key) {
if (map.containsKey(key)) {
return map.get(key);
}
else {
if (!JdkVersion.isAtLeastJava15()) {
// we don't know the type of the keys, so we fall back to
// comparing toString()
for (Iterator i = map.entrySet().iterator(); i.hasNext();) {
Map.Entry entry = (Map.Entry) i.next();
if (entry.getKey() == key
|| (entry.getKey() != null && key != null && entry.getKey().toString().equals(
key.toString()))) {
return entry.getValue();
}
}
}
return null;
}
}
private Object getCollectionValue(Collection collection, Integer index) {
if (collection.size() > index.intValue()) {
Iterator iterator = collection.iterator();
for (int i = 0; i < index.intValue(); i++) {
iterator.next();
}
return iterator.next();
}
else if (isStrictNullHandlingEnabled()) {
throw new InvalidPropertyException(getTargetClass(), "", "");
}
else {
return null;
}
}
public void setIndexedPropertyValue(String propertyName, Object value) throws BeansException {
String parentPropertyName = getParentPropertyName(propertyName);
Object parentValue;
try {
parentValue = getPropertyValue(parentPropertyName);
}
catch (NotReadablePropertyException e) {
throw new NotWritablePropertyException(getTargetClass(), propertyName, "parent property is not readable", e);
}
if (parentValue == null) {
if (isWritableProperty(parentPropertyName)) {
throw new NullValueInNestedPathException(getTargetClass(), propertyName);
}
else {
throw new NotWritablePropertyException(getTargetClass(), propertyName);
}
}
Object[] indices;
try {
indices = getIndices(propertyName);
}
catch (Exception e) {
throw new NotWritablePropertyException(getTargetClass(), propertyName, "wrong index type", e);
}
Object index = indices[indices.length - 1];
Object newParentValue = setAssemblageValue(getPropertyType(parentPropertyName), parentValue, index, value);
if (newParentValue != parentValue) {
setPropertyValue(parentPropertyName, newParentValue);
}
}
public void setSimplePropertyValue(String propertyName, Object value) throws BeansException {
Member writeAccessor = getWritePropertyAccessor(propertyName);
if (writeAccessor == null) {
throw new NotWritablePropertyException(getTargetClass(), propertyName,
"Neither non-static, non-final field nor set-method does exist");
}
Object target = getTarget();
if (target == null) {
throw new NullValueInNestedPathException(getTargetClass(), propertyName);
}
try {
ReflectionUtils.makeAccessible(writeAccessor);
if (writeAccessor instanceof Field) {
((Field) writeAccessor).set(target, value);
}
else {// writeAccessor instanceof Method
((Method) writeAccessor).invoke(target, new Object[] { value });
}
}
catch (IllegalAccessException e) {
throw new InvalidPropertyException(getTargetClass(), propertyName, "Property is not accessible", e);
}
catch (InvocationTargetException e) {
ReflectionUtils.handleInvocationTargetException(e);
throw new IllegalStateException(
"An unexpected state occured during setPropertyValue(String, Object). This may be a bug.");
}
}
protected AbstractNestedMemberPropertyAccessor createChildPropertyAccessor(String propertyName) {
return new DefaultMemberPropertyAccessor(this, propertyName);
}
public Object convertIfNecessary(Object value, Class requiredType, MethodParameter methodParam)
throws TypeMismatchException {
// TODO Auto-generated method stub
return null;
}
}
| |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.webkit;
import android.view.KeyEvent;
import android.webkit.JavascriptInterface;
import android.webkit.WebView;
import androidx.annotation.Nullable;
import androidx.concurrent.futures.ResolvableFuture;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
@LargeTest
@RunWith(AndroidJUnit4.class)
public class WebViewRenderProcessClientTest {
WebViewOnUiThread mWebViewOnUiThread;
@Before
public void setUp() {
mWebViewOnUiThread = new androidx.webkit.WebViewOnUiThread();
}
@After
public void tearDown() {
if (mWebViewOnUiThread != null) {
mWebViewOnUiThread.cleanUp();
}
}
private static class JSBlocker {
// A CountDownLatch is used here, instead of a Future, because that makes it
// easier to support requiring variable numbers of releaseBlock() calls
// to unblock.
private CountDownLatch mLatch;
private ResolvableFuture<Void> mBecameBlocked;
JSBlocker(int requiredReleaseCount) {
mLatch = new CountDownLatch(requiredReleaseCount);
mBecameBlocked = ResolvableFuture.create();
}
JSBlocker() {
this(1);
}
public void releaseBlock() {
mLatch.countDown();
}
@JavascriptInterface
public void block() throws Exception {
// This blocks indefinitely (until signalled) on a background thread.
// The actual test timeout is not determined by this wait, but by other
// code waiting for the onRenderProcessUnresponsive() call.
mBecameBlocked.set(null);
mLatch.await();
}
public void waitForBlocked() {
WebkitUtils.waitForFuture(mBecameBlocked);
}
}
private WebViewRenderProcessClient makeWebViewRenderProcessClient(
@Nullable Runnable onResponsive,
@Nullable Runnable onUnresponsive) {
return new WebViewRenderProcessClient() {
@Override
public void onRenderProcessUnresponsive(WebView view, WebViewRenderProcess renderer) {
if (onResponsive != null) {
onResponsive.run();
}
}
@Override
public void onRenderProcessResponsive(WebView view, WebViewRenderProcess renderer) {
if (onUnresponsive != null) {
onUnresponsive.run();
}
}
};
}
private WebViewRenderProcessClient makeWebViewRenderProcessClient() {
return makeWebViewRenderProcessClient(null, null);
}
private void blockRenderProcess(final JSBlocker blocker) {
WebkitUtils.onMainThreadSync(() -> {
WebView webView = mWebViewOnUiThread.getWebViewOnCurrentThread();
webView.evaluateJavascript("blocker.block();", null);
});
// Wait on the test instrumentation thread not the main thread. Blocking the main thread
// may block other async calls such as initializing the GPU service channel that happens on
// the UI thread and has to finish before the renderer can execute any javascript,
// see https://crbug.com/1269552.
blocker.waitForBlocked();
WebkitUtils.onMainThreadSync(() -> {
WebView webView = mWebViewOnUiThread.getWebViewOnCurrentThread();
// Sending an input event that does not get acknowledged will cause
// the unresponsive renderer event to fire.
webView.dispatchKeyEvent(
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_ENTER));
});
}
private void addJsBlockerInterface(final JSBlocker blocker) {
WebkitUtils.onMainThreadSync(() -> {
WebView webView = mWebViewOnUiThread.getWebViewOnCurrentThread();
webView.getSettings().setJavaScriptEnabled(true);
webView.addJavascriptInterface(blocker, "blocker");
});
}
private void testWebViewRenderProcessClientOnExecutor(Executor executor) throws Throwable {
WebkitUtils.checkFeature(WebViewFeature.WEB_VIEW_RENDERER_CLIENT_BASIC_USAGE);
final JSBlocker blocker = new JSBlocker();
final ResolvableFuture<Void> rendererUnblocked = ResolvableFuture.create();
WebViewRenderProcessClient client = makeWebViewRenderProcessClient(
blocker::releaseBlock, () -> rendererUnblocked.set(null));
if (executor == null) {
mWebViewOnUiThread.setWebViewRenderProcessClient(client);
} else {
mWebViewOnUiThread.setWebViewRenderProcessClient(executor, client);
}
addJsBlockerInterface(blocker);
mWebViewOnUiThread.loadUrlAndWaitForCompletion("about:blank");
blockRenderProcess(blocker);
WebkitUtils.waitForFuture(rendererUnblocked);
}
@Test
public void testWebViewRenderProcessClientWithoutExecutor() throws Throwable {
testWebViewRenderProcessClientOnExecutor(null);
}
@Test
public void testWebViewRenderProcessClientWithExecutor() throws Throwable {
final AtomicInteger executorCount = new AtomicInteger();
testWebViewRenderProcessClientOnExecutor(r -> {
executorCount.incrementAndGet();
r.run();
});
Assert.assertEquals(2, executorCount.get());
}
@Test
public void testSetNullWebViewRenderProcessClient() throws Throwable {
WebkitUtils.checkFeature(WebViewFeature.WEB_VIEW_RENDERER_CLIENT_BASIC_USAGE);
final AtomicBoolean clientCalled = new AtomicBoolean();
Assert.assertNull("Initially the renderer client should be null",
mWebViewOnUiThread.getWebViewRenderProcessClient());
WebViewRenderProcessClient client = makeWebViewRenderProcessClient(
() -> clientCalled.set(true),
() -> clientCalled.set(true)
);
mWebViewOnUiThread.setWebViewRenderProcessClient(client);
mWebViewOnUiThread.setWebViewRenderProcessClient(null);
Assert.assertNull("After setting renderer client to null, getting it should return null",
mWebViewOnUiThread.getWebViewRenderProcessClient());
final JSBlocker blocker = new JSBlocker();
final ResolvableFuture<Void> rendererUnblocked = ResolvableFuture.create();
addJsBlockerInterface(blocker);
mWebViewOnUiThread.loadUrlAndWaitForCompletion("about:blank");
blockRenderProcess(blocker);
// When no WebViewRenderProcessClient is set, we can't directly observe the triggering of
// the unresponsive renderer message. Instead, wait for 6s, which should be long enough for
// the message to have been triggered, and then unblock.
WebkitUtils.onMainThreadDelayed(6000, () -> {
blocker.releaseBlock();
rendererUnblocked.set(null);
});
WebkitUtils.waitForFuture(rendererUnblocked);
Assert.assertFalse(clientCalled.get());
}
@Test
public void testSetWebViewRenderProcessClient() throws Throwable {
WebkitUtils.checkFeature(WebViewFeature.WEB_VIEW_RENDERER_CLIENT_BASIC_USAGE);
WebViewRenderProcessClient client = makeWebViewRenderProcessClient();
mWebViewOnUiThread.setWebViewRenderProcessClient(client);
Assert.assertSame(
"After the renderer client is set, getting it should return the same object",
client, mWebViewOnUiThread.getWebViewRenderProcessClient());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.rest;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.apache.geode.test.junit.assertions.ClusterManagementRealizationResultAssert.assertManagementResult;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.net.URL;
import java.nio.file.Path;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.geode.cache.execute.Execution;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.classloader.ClassPathLoader;
import org.apache.geode.management.api.ClusterManagementService;
import org.apache.geode.management.cluster.client.ClusterManagementServiceBuilder;
import org.apache.geode.management.configuration.Deployment;
import org.apache.geode.test.compiler.ClassBuilder;
import org.apache.geode.test.dunit.rules.ClusterStartupRule;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.junit.rules.MemberStarterRule;
import org.apache.geode.test.junit.rules.serializable.SerializableTemporaryFolder;
public class DeploymentManagementRedployDUnitTest {
private static final String VERSION1 = "Version1";
private static final String VERSION2 = "Version2";
private static final String JAR_NAME_A = "DeployCommandRedeployDUnitTestA.jar";
private static final String FUNCTION_A = "DeployCommandRedeployDUnitFunctionA";
private File jarAVersion1;
private File jarAVersion2;
private static final String JAR_NAME_B = "DeployCommandRedeployDUnitTestB.jar";
private static final String FUNCTION_B = "DeployCommandRedeployDUnitFunctionB";
private static final String PACKAGE_B = "jddunit.function";
private static final String FULLY_QUALIFIED_FUNCTION_B = PACKAGE_B + "." + FUNCTION_B;
private File jarBVersion1;
private File jarBVersion2;
private MemberVM locator;
private MemberVM server;
@Rule
public SerializableTemporaryFolder temporaryFolder = new SerializableTemporaryFolder();
@Rule
public ClusterStartupRule lsRule = new ClusterStartupRule();
private ClusterManagementService client;
private Deployment deployment;
@Before
public void setup() throws Exception {
jarAVersion1 = createJarWithFunctionA(VERSION1);
jarAVersion2 = createJarWithFunctionA(VERSION2);
jarBVersion1 = createJarWithFunctionB(VERSION1);
jarBVersion2 = createJarWithFunctionB(VERSION2);
locator = lsRule.startLocatorVM(0, MemberStarterRule::withHttpService);
server = lsRule.startServerVM(1, locator.getPort());
client = new ClusterManagementServiceBuilder()
.setPort(locator.getHttpPort())
.build();
deployment = new Deployment();
}
@Test
public void redeployJarsWithNewVersionsOfFunctions() {
deployment.setFile(jarAVersion1);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION1));
deployment.setFile(jarBVersion1);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatCanLoad(JAR_NAME_B, FULLY_QUALIFIED_FUNCTION_B));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION1));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_B, VERSION1));
deployment.setFile(jarBVersion2);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatCanLoad(JAR_NAME_B, FULLY_QUALIFIED_FUNCTION_B));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION1));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_B, VERSION2));
deployment.setFile(jarAVersion2);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatCanLoad(JAR_NAME_B, FULLY_QUALIFIED_FUNCTION_B));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION2));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_B, VERSION2));
}
@Test
public void redeployJarsWithNewVersionsOfFunctionsAndMultipleLocators() throws IOException {
Properties props = new Properties();
props.setProperty("locators", "localhost[" + locator.getPort() + "]");
MemberVM locator2 = lsRule.startLocatorVM(2, props);
deployment.setFile(jarAVersion1);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION1));
deployment.setFile(jarAVersion2);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION2));
server.stop(false);
Path locator1Jar =
locator.getWorkingDir().toPath().resolve("cluster_config/cluster/" + JAR_NAME_A);
Path locator2Jar =
locator2.getWorkingDir().toPath().resolve("cluster_config/cluster/" + JAR_NAME_A);
await().pollDelay(1, TimeUnit.SECONDS)
.until(() -> FileUtils.contentEquals(locator1Jar.toFile(), jarAVersion2) &&
FileUtils.contentEquals(locator2Jar.toFile(), jarAVersion2));
lsRule.startServerVM(1, locator.getPort());
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION2));
}
private static LoopingFunctionExecutor executor;
@Test
public void hotDeployShouldNotResultInAnyFailedFunctionExecutions() {
deployment.setFile(jarAVersion1);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION1));
server.invoke(() -> {
executor = new LoopingFunctionExecutor();
executor.startExecuting(FUNCTION_A);
executor.waitForExecutions(100);
});
deployment.setFile(jarAVersion2);
assertManagementResult(client.create(deployment)).isSuccessful();
server.invoke(() -> assertThatCanLoad(JAR_NAME_A, FUNCTION_A));
server.invoke(() -> assertThatFunctionHasVersion(FUNCTION_A, VERSION2));
server.invoke(() -> {
executor.waitForExecutions(100);
executor.stopExecutionAndThrowAnyException();
});
}
// Note that jar A is a Declarable Function, while jar B is only a Function.
// Also, the function for jar A resides in the default package, whereas jar B specifies a package.
// This ensures that this test has identical coverage to some tests that it replaced.
private File createJarWithFunctionA(String version) throws Exception {
URL classTemplateUrl = DeploymentManagementRedployDUnitTest.class
.getResource("DeployCommandRedeployDUnitTest_FunctionATemplate");
assertThat(classTemplateUrl).isNotNull();
String classContents = FileUtils.readFileToString(new File(classTemplateUrl.toURI()), "UTF-8");
classContents = classContents.replaceAll("FUNCTION_A", FUNCTION_A);
classContents = classContents.replaceAll("VERSION", version);
File jar = new File(temporaryFolder.newFolder(JAR_NAME_A + version), JAR_NAME_A);
ClassBuilder functionClassBuilder = new ClassBuilder();
functionClassBuilder.writeJarFromContent(FUNCTION_A, classContents, jar);
return jar;
}
private File createJarWithFunctionB(String version) throws Exception {
URL classTemplateUrl = DeploymentManagementRedployDUnitTest.class
.getResource("DeployCommandRedeployDUnitTest_FunctionBTemplate");
assertThat(classTemplateUrl).isNotNull();
String classContents = FileUtils.readFileToString(new File(classTemplateUrl.toURI()), "UTF-8");
classContents = classContents.replaceAll("PACKAGE_B", PACKAGE_B);
classContents = classContents.replaceAll("FUNCTION_B", FUNCTION_B);
classContents = classContents.replaceAll("VERSION", version);
File jar = new File(temporaryFolder.newFolder(JAR_NAME_B + version), JAR_NAME_B);
ClassBuilder functionClassBuilder = new ClassBuilder();
functionClassBuilder.writeJarFromContent("jddunit/function/" + FUNCTION_B, classContents, jar);
return jar;
}
private static void assertThatFunctionHasVersion(String functionId, String version) {
@SuppressWarnings("deprecation")
GemFireCacheImpl gemFireCache = GemFireCacheImpl.getInstance();
DistributedSystem distributedSystem = gemFireCache.getDistributedSystem();
@SuppressWarnings("unchecked")
Execution<Void, String, List<String>> execution =
FunctionService.onMember(distributedSystem.getDistributedMember());
List<String> result = execution.execute(functionId).getResult();
assertThat(result.get(0)).isEqualTo(version);
}
private static void assertThatCanLoad(String jarName, String className)
throws ClassNotFoundException {
assertThat(ClassPathLoader.getLatest().getJarDeploymentService()
.getDeployed(FilenameUtils.getBaseName(jarName)).isSuccessful()).isTrue();
assertThat(ClassPathLoader.getLatest().forName(className)).isNotNull();
}
private static class LoopingFunctionExecutor implements Serializable {
private final AtomicInteger COUNT_OF_EXECUTIONS = new AtomicInteger();
private final AtomicReference<Exception> EXCEPTION = new AtomicReference<>();
private final ExecutorService EXECUTOR_SERVICE = Executors.newSingleThreadExecutor();
public void startExecuting(String functionId) {
ExecutorService EXECUTOR_SERVICE = Executors.newSingleThreadExecutor();
EXECUTOR_SERVICE.submit(() -> {
@SuppressWarnings("deprecation")
GemFireCacheImpl gemFireCache = GemFireCacheImpl.getInstance();
DistributedSystem distributedSystem = gemFireCache.getDistributedSystem();
while (!Thread.currentThread().isInterrupted()) {
try {
COUNT_OF_EXECUTIONS.incrementAndGet();
FunctionService.onMember(distributedSystem.getDistributedMember()).execute(functionId)
.getResult();
} catch (Exception e) {
EXCEPTION.set(e);
}
}
});
}
public void waitForExecutions(int numberOfExecutions) {
int initialCount = COUNT_OF_EXECUTIONS.get();
int countToWaitFor = initialCount + numberOfExecutions;
Callable<Boolean> doneWaiting = () -> COUNT_OF_EXECUTIONS.get() >= countToWaitFor;
await().until(doneWaiting);
}
public void stopExecutionAndThrowAnyException() throws Exception {
EXECUTOR_SERVICE.shutdownNow();
Exception e = EXCEPTION.get();
if (e != null) {
throw e;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.api.common.JobID;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.testutils.CommonTestUtils;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.state.SharedStateRegistry;
import org.apache.flink.runtime.state.filesystem.FileStateHandle;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import java.io.File;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class CompletedCheckpointTest {
@Rule
public final TemporaryFolder tmpFolder = new TemporaryFolder();
/**
* Tests that persistent checkpoints discard their header file.
*/
@Test
public void testDiscard() throws Exception {
File file = tmpFolder.newFile();
assertEquals(true, file.exists());
OperatorState state = mock(OperatorState.class);
Map<OperatorID, OperatorState> taskStates = new HashMap<>();
taskStates.put(new OperatorID(), state);
// Verify discard call is forwarded to state
CompletedCheckpoint checkpoint = new CompletedCheckpoint(
new JobID(), 0, 0, 1,
taskStates,
Collections.<MasterState>emptyList(),
CheckpointProperties.forStandardCheckpoint(),
new FileStateHandle(new Path(file.toURI()), file.length()),
file.getAbsolutePath());
checkpoint.discardOnShutdown(JobStatus.FAILED);
assertEquals(false, file.exists());
}
/**
* Tests that the garbage collection properties are respected when subsuming checkpoints.
*/
@Test
public void testCleanUpOnSubsume() throws Exception {
OperatorState state = mock(OperatorState.class);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
operatorStates.put(new OperatorID(), state);
boolean discardSubsumed = true;
CheckpointProperties props = new CheckpointProperties(false, false, discardSubsumed, true, true, true, true);
CompletedCheckpoint checkpoint = new CompletedCheckpoint(
new JobID(), 0, 0, 1,
operatorStates,
Collections.<MasterState>emptyList(),
props,
null,
null);
SharedStateRegistry sharedStateRegistry = new SharedStateRegistry();
checkpoint.registerSharedStatesAfterRestored(sharedStateRegistry);
verify(state, times(1)).registerSharedStates(sharedStateRegistry);
// Subsume
checkpoint.discardOnSubsume();
verify(state, times(1)).discardState();
}
/**
* Tests that the garbage collection properties are respected when shutting down.
*/
@Test
public void testCleanUpOnShutdown() throws Exception {
File file = tmpFolder.newFile();
String externalPath = file.getAbsolutePath();
JobStatus[] terminalStates = new JobStatus[] {
JobStatus.FINISHED, JobStatus.CANCELED, JobStatus.FAILED, JobStatus.SUSPENDED
};
OperatorState state = mock(OperatorState.class);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
operatorStates.put(new OperatorID(), state);
for (JobStatus status : terminalStates) {
Mockito.reset(state);
// Keep
CheckpointProperties props = new CheckpointProperties(false, true, false, false, false, false, false);
CompletedCheckpoint checkpoint = new CompletedCheckpoint(
new JobID(), 0, 0, 1,
new HashMap<>(operatorStates),
Collections.<MasterState>emptyList(),
props,
new FileStateHandle(new Path(file.toURI()), file.length()),
externalPath);
SharedStateRegistry sharedStateRegistry = new SharedStateRegistry();
checkpoint.registerSharedStatesAfterRestored(sharedStateRegistry);
checkpoint.discardOnShutdown(status);
verify(state, times(0)).discardState();
assertEquals(true, file.exists());
// Discard
props = new CheckpointProperties(false, false, true, true, true, true, true);
checkpoint = new CompletedCheckpoint(
new JobID(), 0, 0, 1,
new HashMap<>(operatorStates),
Collections.<MasterState>emptyList(),
props,
null,
null);
checkpoint.discardOnShutdown(status);
verify(state, times(1)).discardState();
}
}
/**
* Tests that the stats callbacks happen if the callback is registered.
*/
@Test
public void testCompletedCheckpointStatsCallbacks() throws Exception {
OperatorState state = mock(OperatorState.class);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
operatorStates.put(new OperatorID(), state);
CompletedCheckpoint completed = new CompletedCheckpoint(
new JobID(),
0,
0,
1,
new HashMap<>(operatorStates),
Collections.<MasterState>emptyList(),
CheckpointProperties.forStandardCheckpoint(),
null,
null);
CompletedCheckpointStats.DiscardCallback callback = mock(CompletedCheckpointStats.DiscardCallback.class);
completed.setDiscardCallback(callback);
completed.discardOnShutdown(JobStatus.FINISHED);
verify(callback, times(1)).notifyDiscardedCheckpoint();
}
@Test
public void testIsJavaSerializable() throws Exception {
TaskStateStats task1 = new TaskStateStats(new JobVertexID(), 3);
TaskStateStats task2 = new TaskStateStats(new JobVertexID(), 4);
HashMap<JobVertexID, TaskStateStats> taskStats = new HashMap<>();
taskStats.put(task1.getJobVertexId(), task1);
taskStats.put(task2.getJobVertexId(), task2);
CompletedCheckpointStats completed = new CompletedCheckpointStats(
123123123L,
10123L,
CheckpointProperties.forStandardCheckpoint(),
1337,
taskStats,
1337,
123129837912L,
123819239812L,
new SubtaskStateStats(123, 213123, 123123, 0, 0, 0, 0),
null);
CompletedCheckpointStats copy = CommonTestUtils.createCopySerializable(completed);
assertEquals(completed.getCheckpointId(), copy.getCheckpointId());
assertEquals(completed.getTriggerTimestamp(), copy.getTriggerTimestamp());
assertEquals(completed.getProperties(), copy.getProperties());
assertEquals(completed.getNumberOfSubtasks(), copy.getNumberOfSubtasks());
assertEquals(completed.getNumberOfAcknowledgedSubtasks(), copy.getNumberOfAcknowledgedSubtasks());
assertEquals(completed.getEndToEndDuration(), copy.getEndToEndDuration());
assertEquals(completed.getStateSize(), copy.getStateSize());
assertEquals(completed.getLatestAcknowledgedSubtaskStats().getSubtaskIndex(), copy.getLatestAcknowledgedSubtaskStats().getSubtaskIndex());
assertEquals(completed.getStatus(), copy.getStatus());
}
}
| |
/**
* Copyright 2015 David Karnok and Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package hu.akarnokd.rxjava2.internal.subscriptions;
import static org.junit.Assert.*;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.*;
import org.junit.Test;
import org.reactivestreams.Subscription;
import hu.akarnokd.rxjava2.disposables.Disposable;
public class AsyncSubscriptionTest {
@Test
public void testNoResource() {
AsyncSubscription as = new AsyncSubscription();
Subscription s = mock(Subscription.class);
assertTrue(as.setSubscription(s));
as.request(1);
as.cancel();
verify(s).request(1);
verify(s).cancel();
}
@Test
public void testRequestBeforeSet() {
AsyncSubscription as = new AsyncSubscription();
Subscription s = mock(Subscription.class);
as.request(1);
assertTrue(as.setSubscription(s));
as.cancel();
verify(s).request(1);
verify(s).cancel();
}
@Test
public void testCancelBeforeSet() {
AsyncSubscription as = new AsyncSubscription();
Subscription s = mock(Subscription.class);
as.request(1);
as.cancel();
assertFalse(as.setSubscription(s));
verify(s, never()).request(1);
verify(s).cancel();
}
@Test
public void testSingleSet() {
AsyncSubscription as = new AsyncSubscription();
Subscription s = mock(Subscription.class);
assertTrue(as.setSubscription(s));
Subscription s1 = mock(Subscription.class);
assertTrue(as.setSubscription(s1));
assertSame(as.actual, s);
verify(s1).cancel();
}
@Test
public void testInitialResource() {
Disposable r = mock(Disposable.class);
AsyncSubscription as = new AsyncSubscription(r);
as.cancel();
verify(r).dispose();
}
@Test
public void testSetResource() {
AsyncSubscription as = new AsyncSubscription();
Disposable r = mock(Disposable.class);
assertTrue(as.setResource(r));
as.cancel();
verify(r).dispose();
}
@Test
public void testReplaceResource() {
AsyncSubscription as = new AsyncSubscription();
Disposable r = mock(Disposable.class);
assertTrue(as.replaceResource(r));
as.cancel();
verify(r).dispose();
}
@Test
public void testSetResource2() {
AsyncSubscription as = new AsyncSubscription();
Disposable r = mock(Disposable.class);
assertTrue(as.setResource(r));
Disposable r2 = mock(Disposable.class);
assertTrue(as.setResource(r2));
as.cancel();
verify(r).dispose();
verify(r2).dispose();
}
@Test
public void testReplaceResource2() {
AsyncSubscription as = new AsyncSubscription();
Disposable r = mock(Disposable.class);
assertTrue(as.replaceResource(r));
Disposable r2 = mock(Disposable.class);
assertTrue(as.replaceResource(r2));
as.cancel();
verify(r, never()).dispose();
verify(r2).dispose();
}
@Test
public void testSetResourceAfterCancel() {
AsyncSubscription as = new AsyncSubscription();
as.cancel();
Disposable r = mock(Disposable.class);
assertFalse(as.setResource(r));
verify(r).dispose();
}
@Test
public void testReplaceResourceAfterCancel() {
AsyncSubscription as = new AsyncSubscription();
as.cancel();
Disposable r = mock(Disposable.class);
assertFalse(as.replaceResource(r));
verify(r).dispose();
}
@Test
public void testCancelOnce() {
Disposable r = mock(Disposable.class);
AsyncSubscription as = new AsyncSubscription(r);
Subscription s = mock(Subscription.class);
assertTrue(as.setSubscription(s));
as.cancel();
as.cancel();
as.cancel();
verify(s, never()).request(anyLong());
verify(s).cancel();
verify(r).dispose();
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.dataservices.core.odata.expression.operation;
import org.apache.olingo.commons.api.edm.EdmType;
import org.apache.olingo.commons.api.http.HttpStatusCode;
import org.apache.olingo.server.api.ODataApplicationException;
import org.apache.olingo.server.api.uri.queryoption.expression.BinaryOperatorKind;
import org.wso2.carbon.dataservices.core.odata.expression.ODataConstants;
import org.wso2.carbon.dataservices.core.odata.expression.operand.TypedOperand;
import org.wso2.carbon.dataservices.core.odata.expression.operand.VisitorOperand;
import org.wso2.carbon.dataservices.core.odata.expression.primitive.EdmNull;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Locale;
public class BinaryOperator {
private TypedOperand right;
private TypedOperand left;
public BinaryOperator(final VisitorOperand leftOperand, final VisitorOperand rightOperand)
throws ODataApplicationException {
left = leftOperand.asTypedOperand();
right = rightOperand.asTypedOperand();
left = left.castToCommonType(right);
right = right.castToCommonType(left);
}
public VisitorOperand andOperator() throws ODataApplicationException {
Boolean result = null;
if (left.is(ODataConstants.primitiveBoolean) && right.is(ODataConstants.primitiveBoolean)) {
if (Boolean.TRUE.equals(left.getValue()) && Boolean.TRUE.equals(right.getValue())) {
result = true;
} else if (Boolean.FALSE.equals(left.getValue()) || Boolean.FALSE.equals(right.getValue())) {
result = false;
}
return new TypedOperand(result, ODataConstants.primitiveBoolean);
} else {
throw new ODataApplicationException("Add operator needs two binary operands",
HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT);
}
}
public VisitorOperand orOperator() throws ODataApplicationException {
Boolean result = null;
if (left.is(ODataConstants.primitiveBoolean) && right.is(ODataConstants.primitiveBoolean)) {
if (Boolean.TRUE.equals(left.getValue()) || Boolean.TRUE.equals(right.getValue())) {
result = true;
} else if (Boolean.FALSE.equals(left.getValue()) && Boolean.FALSE.equals(right.getValue())) {
result = false;
}
return new TypedOperand(result, ODataConstants.primitiveBoolean);
} else {
throw new ODataApplicationException("Or operator needs two binary operands",
HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT);
}
}
public VisitorOperand equalsOperator() {
final boolean result = isBinaryComparisonNecessary() && binaryComparison(ODataConstants.EQUALS);
return new TypedOperand(result, ODataConstants.primitiveBoolean);
}
public VisitorOperand notEqualsOperator() {
final VisitorOperand equalsOperator = equalsOperator();
return new TypedOperand(!(Boolean) equalsOperator.getValue(), ODataConstants.primitiveBoolean);
}
private boolean isBinaryComparisonNecessary() {
// binaryComparison() need to be called, if both operand are either null or not null, ^ (bitwise XOR)
return !(left.isNull() ^ right.isNull());
}
public VisitorOperand greaterEqualsOperator() {
final boolean result =
isBinaryComparisonNecessary() && binaryComparison(ODataConstants.GREATER_THAN, ODataConstants.EQUALS);
return new TypedOperand(result, ODataConstants.primitiveBoolean);
}
public VisitorOperand greaterThanOperator() {
final boolean result = isBinaryComparisonNecessary() && binaryComparison(ODataConstants.GREATER_THAN);
return new TypedOperand(result, ODataConstants.primitiveBoolean);
}
public VisitorOperand lessEqualsOperator() {
final boolean result =
isBinaryComparisonNecessary() && binaryComparison(ODataConstants.LESS_THAN, ODataConstants.EQUALS);
return new TypedOperand(result, ODataConstants.primitiveBoolean);
}
public VisitorOperand lessThanOperator() {
final boolean result = isBinaryComparisonNecessary() && binaryComparison(ODataConstants.LESS_THAN);
return new TypedOperand(result, ODataConstants.primitiveBoolean);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private boolean binaryComparison(final int... expect) {
int result;
if (left.isNull() && right.isNull()) {
result = 0; // null is equals to null
} else {
// left and right are not null!
if (left.isIntegerType()) {
result = left.getTypedValue(BigInteger.class).compareTo(right.getTypedValue(BigInteger.class));
} else if (left.isDecimalType()) {
result = left.getTypedValue(BigDecimal.class).compareTo(right.getTypedValue(BigDecimal.class));
} else if (left.getValue().getClass() == right.getValue().getClass() &&
left.getValue() instanceof Comparable) {
result = ((Comparable) left.getValue()).compareTo(right.getValue());
} else {
result = left.getValue().equals(right.getValue()) ? 0 : 1;
}
}
for (int expectedValue : expect) {
if (expectedValue == result) {
return true;
}
}
return false;
}
public VisitorOperand arithmeticOperator(final BinaryOperatorKind operator) throws ODataApplicationException {
if (left.isNull() || right.isNull()) {
return new TypedOperand(new Object(), EdmNull.getInstance());
} else {
if (left.isIntegerType()) {
final BigInteger result = integerArithmeticOperation(operator);
return new TypedOperand(result, determineResultType(result, left));
} else if (left.isDecimalType()) {
final BigDecimal result = decimalArithmeticOperation(operator);
return new TypedOperand(result, determineResultType(result, left));
} else if (left.is(ODataConstants.primitiveDate, ODataConstants.primitiveDuration,
ODataConstants.primitiveDateTimeOffset)) {
return dateArithmeticOperation(operator);
} else {
throw new ODataApplicationException("Invalid type", HttpStatusCode.BAD_REQUEST.getStatusCode(),
Locale.ROOT);
}
}
}
private EdmType determineResultType(final Number arithmeticResult, final TypedOperand leftOperand) {
// Left and right operand have the same typed, so it is enough to check the type of the left operand
if (leftOperand.isDecimalType()) {
final BigDecimal value = (BigDecimal) arithmeticResult;
if (value.compareTo(ODataConstants.EDM_SINGLE_MIN) >= 0 &&
value.compareTo(ODataConstants.EDM_SINGLE_MAX) <= 0) {
return ODataConstants.primitiveSingle;
} else {
return ODataConstants.primitiveDouble;
}
} else {
final BigInteger value = (BigInteger) arithmeticResult;
if (value.compareTo(ODataConstants.EDN_SBYTE_MAX) <= 0 &&
value.compareTo(ODataConstants.EDM_SBYTE_MIN) >= 0) {
return ODataConstants.primitiveSByte;
}
if (value.compareTo(ODataConstants.EDM_BYTE_MAX) <= 0 &&
value.compareTo(ODataConstants.EDM_BYTE_MIN) >= 0) {
return ODataConstants.primitiveByte;
}
if (value.compareTo(ODataConstants.EDM_INT16_MAX) <= 0 &&
value.compareTo(ODataConstants.EDM_INT16_MIN) >= 0) {
return ODataConstants.primitiveInt16;
}
if (value.compareTo(ODataConstants.EDM_INT32_MAX) <= 0 &&
value.compareTo(ODataConstants.EDM_INT32_MIN) >= 0) {
return ODataConstants.primitiveInt32;
}
if (value.compareTo(ODataConstants.EDM_INT64_MAX) <= 0 &&
value.compareTo(ODataConstants.EDM_INT64_MIN) >= 0) {
return ODataConstants.primitiveInt64;
}
// Choose double instead single because precision is higher (52 bits instead of 23)
return ODataConstants.primitiveDouble;
}
}
private VisitorOperand dateArithmeticOperation(final BinaryOperatorKind operator) throws ODataApplicationException {
VisitorOperand result = null;
if (left.is(ODataConstants.primitiveDate)) {
if (right.is(ODataConstants.primitiveDate) && operator == BinaryOperatorKind.SUB) {
long millis = left.getTypedValue(Calendar.class).getTimeInMillis() -
left.getTypedValue(Calendar.class).getTimeInMillis();
result = new TypedOperand(new BigDecimal(millis).divide(ODataConstants.FACTOR_SECOND),
ODataConstants.primitiveDuration);
} else if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.ADD) {
long millis = left.getTypedValue(Calendar.class).getTimeInMillis() +
(right.getTypedValue(BigDecimal.class).longValue() * ODataConstants.FACTOR_SECOND_INT);
result = new TypedOperand(new Timestamp(millis), ODataConstants.primitiveDateTimeOffset);
} else if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.SUB) {
long millis = left.getTypedValue(Calendar.class).getTimeInMillis() -
(right.getTypedValue(BigDecimal.class).longValue() * ODataConstants.FACTOR_SECOND_INT);
result = new TypedOperand(new Timestamp(millis), ODataConstants.primitiveDateTimeOffset);
}
} else if (left.is(ODataConstants.primitiveDuration)) {
if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.ADD) {
long seconds = left.getTypedValue(BigDecimal.class).longValue() +
right.getTypedValue(BigDecimal.class).longValue();
result = new TypedOperand(new BigDecimal(seconds), ODataConstants.primitiveDuration);
} else if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.SUB) {
long seconds = left.getTypedValue(BigDecimal.class).longValue() -
right.getTypedValue(BigDecimal.class).longValue();
result = new TypedOperand(new BigDecimal(seconds), ODataConstants.primitiveDuration);
}
} else if (left.is(ODataConstants.primitiveDateTimeOffset)) {
if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.ADD) {
long millis = left.getTypedValue(Timestamp.class).getTime() +
(right.getTypedValue(BigDecimal.class).longValue() * ODataConstants.FACTOR_SECOND_INT);
result = new TypedOperand(new Timestamp(millis), ODataConstants.primitiveDateTimeOffset);
} else if (right.is(ODataConstants.primitiveDuration) && operator == BinaryOperatorKind.SUB) {
long millis = left.getTypedValue(Timestamp.class).getTime() -
(right.getTypedValue(BigDecimal.class).longValue() * ODataConstants.FACTOR_SECOND_INT);
result = new TypedOperand(new Timestamp(millis), ODataConstants.primitiveDateTimeOffset);
} else if (right.is(ODataConstants.primitiveDateTimeOffset) && operator == BinaryOperatorKind.SUB) {
long millis =
left.getTypedValue(Timestamp.class).getTime() - right.getTypedValue(Timestamp.class).getTime();
result = new TypedOperand(new BigDecimal(millis).divide(ODataConstants.FACTOR_SECOND),
ODataConstants.primitiveDuration);
}
}
if (result == null) {
throw new ODataApplicationException("Invalid operation / operand",
HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT);
} else {
return result;
}
}
private BigDecimal decimalArithmeticOperation(final BinaryOperatorKind operator) throws ODataApplicationException {
final BigDecimal left = this.left.getTypedValue(BigDecimal.class);
final BigDecimal right = this.right.getTypedValue(BigDecimal.class);
switch (operator) {
case ADD:
return left.add(right);
case DIV:
return left.divide(right);
case MUL:
return left.multiply(right);
case SUB:
return left.subtract(right);
default:
throw new ODataApplicationException("Operator not valid", HttpStatusCode.BAD_REQUEST.getStatusCode(),
Locale.ROOT);
}
}
private BigInteger integerArithmeticOperation(final BinaryOperatorKind operator) throws ODataApplicationException {
final BigInteger left = this.left.getTypedValue(BigInteger.class);
final BigInteger right = this.right.getTypedValue(BigInteger.class);
switch (operator) {
case ADD:
return left.add(right);
case DIV:
return left.divide(right);
case MUL:
return left.multiply(right);
case SUB:
return left.subtract(right);
case MOD:
return left.mod(right);
default:
throw new ODataApplicationException("Operator not valid", HttpStatusCode.BAD_REQUEST.getStatusCode(),
Locale.ROOT);
}
}
}
| |
/*
* Copyright (C) 2009 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.odk.collect.android.tasks;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.javarosa.core.model.FormDef;
import org.javarosa.core.services.transport.payload.ByteArrayPayload;
import org.javarosa.form.api.FormEntryController;
import org.odk.collect.android.application.Collect;
import org.odk.collect.android.listeners.FormSavedListener;
import org.odk.collect.android.logic.FormController;
import org.odk.collect.android.provider.FormsProviderAPI.FormsColumns;
import org.odk.collect.android.provider.InstanceProviderAPI;
import org.odk.collect.android.provider.InstanceProviderAPI.InstanceColumns;
import org.odk.collect.android.utilities.EncryptionUtils;
import org.odk.collect.android.utilities.EncryptionUtils.EncryptedFormInformation;
import android.content.ContentValues;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.util.Log;
/**
* Background task for loading a form.
*
* @author Carl Hartung (carlhartung@gmail.com)
* @author Yaw Anokwa (yanokwa@gmail.com)
*/
public class SaveToDiskTask extends AsyncTask<Void, String, Integer> {
private final static String t = "SaveToDiskTask";
private FormSavedListener mSavedListener;
private Boolean mSave;
private Boolean mMarkCompleted;
private Uri mUri;
private String mInstanceName;
public static final int SAVED = 500;
public static final int SAVE_ERROR = 501;
public static final int VALIDATE_ERROR = 502;
public static final int VALIDATED = 503;
public static final int SAVED_AND_EXIT = 504;
public SaveToDiskTask(Uri uri, Boolean saveAndExit, Boolean markCompleted, String updatedName) {
mUri = uri;
mSave = saveAndExit;
mMarkCompleted = markCompleted;
mInstanceName = updatedName;
}
/**
* Initialize {@link FormEntryController} with {@link FormDef} from binary or from XML. If given
* an instance, it will be used to fill the {@link FormDef}.
*/
@Override
protected Integer doInBackground(Void... nothing) {
FormController formController = Collect.getInstance().getFormController();
// validation failed, pass specific failure
int validateStatus = formController.validateAnswers(mMarkCompleted);
if (validateStatus != FormEntryController.ANSWER_OK) {
return validateStatus;
}
if (mMarkCompleted) {
formController.postProcessInstance();
}
Collect.getInstance().getActivityLogger().logInstanceAction(this, "save", Boolean.toString(mMarkCompleted));
// if there is a meta/instanceName field, be sure we are using the latest value
// just in case the validate somehow triggered an update.
String updatedSaveName = formController.getSubmissionMetadata().instanceName;
if ( updatedSaveName != null ) {
mInstanceName = updatedSaveName;
}
boolean saveOutcome = exportData(mMarkCompleted);
// attempt to remove any scratch file
File shadowInstance = savepointFile(formController.getInstancePath());
if ( shadowInstance.exists() ) {
shadowInstance.delete();
}
if (saveOutcome) {
return mSave ? SAVED_AND_EXIT : SAVED;
}
return SAVE_ERROR;
}
private void updateInstanceDatabase(boolean incomplete, boolean canEditAfterCompleted) {
FormController formController = Collect.getInstance().getFormController();
// Update the instance database...
ContentValues values = new ContentValues();
if (mInstanceName != null) {
values.put(InstanceColumns.DISPLAY_NAME, mInstanceName);
}
if (incomplete || !mMarkCompleted) {
values.put(InstanceColumns.STATUS, InstanceProviderAPI.STATUS_INCOMPLETE);
} else {
values.put(InstanceColumns.STATUS, InstanceProviderAPI.STATUS_COMPLETE);
}
// update this whether or not the status is complete...
values.put(InstanceColumns.CAN_EDIT_WHEN_COMPLETE, Boolean.toString(canEditAfterCompleted));
// If FormEntryActivity was started with an Instance, just update that instance
if (Collect.getInstance().getContentResolver().getType(mUri) == InstanceColumns.CONTENT_ITEM_TYPE) {
int updated = Collect.getInstance().getContentResolver().update(mUri, values, null, null);
if (updated > 1) {
Log.w(t, "Updated more than one entry, that's not good: " + mUri.toString());
} else if (updated == 1) {
Log.i(t, "Instance successfully updated");
} else {
Log.e(t, "Instance doesn't exist but we have its Uri!! " + mUri.toString());
}
} else if (Collect.getInstance().getContentResolver().getType(mUri) == FormsColumns.CONTENT_ITEM_TYPE) {
// If FormEntryActivity was started with a form, then it's likely the first time we're
// saving.
// However, it could be a not-first time saving if the user has been using the manual
// 'save data' option from the menu. So try to update first, then make a new one if that
// fails.
String instancePath = formController.getInstancePath().getAbsolutePath();
String where = InstanceColumns.INSTANCE_FILE_PATH + "=?";
String[] whereArgs = {
instancePath
};
int updated =
Collect.getInstance().getContentResolver()
.update(InstanceColumns.CONTENT_URI, values, where, whereArgs);
if (updated > 1) {
Log.w(t, "Updated more than one entry, that's not good: " + instancePath);
} else if (updated == 1) {
Log.i(t, "Instance found and successfully updated: " + instancePath);
// already existed and updated just fine
} else {
Log.i(t, "No instance found, creating");
// Entry didn't exist, so create it.
Cursor c = null;
try {
// retrieve the form definition...
c = Collect.getInstance().getContentResolver().query(mUri, null, null, null, null);
c.moveToFirst();
String jrformid = c.getString(c.getColumnIndex(FormsColumns.JR_FORM_ID));
String jrversion = c.getString(c.getColumnIndex(FormsColumns.JR_VERSION));
String formname = c.getString(c.getColumnIndex(FormsColumns.DISPLAY_NAME));
String submissionUri = null;
if ( !c.isNull(c.getColumnIndex(FormsColumns.SUBMISSION_URI)) ) {
submissionUri = c.getString(c.getColumnIndex(FormsColumns.SUBMISSION_URI));
}
// add missing fields into values
values.put(InstanceColumns.INSTANCE_FILE_PATH, instancePath);
values.put(InstanceColumns.SUBMISSION_URI, submissionUri);
if (mInstanceName != null) {
values.put(InstanceColumns.DISPLAY_NAME, mInstanceName);
} else {
values.put(InstanceColumns.DISPLAY_NAME, formname);
}
values.put(InstanceColumns.JR_FORM_ID, jrformid);
values.put(InstanceColumns.JR_VERSION, jrversion);
} finally {
if ( c != null ) {
c.close();
}
}
mUri = Collect.getInstance().getContentResolver()
.insert(InstanceColumns.CONTENT_URI, values);
}
}
}
/**
* Return the name of the savepoint file for a given instance.
*
* @param instancePath
* @return
*/
public static File savepointFile(File instancePath) {
File tempDir = new File(Collect.CACHE_PATH);
File temp = new File(tempDir, instancePath.getName() + ".save");
return temp;
}
/**
* Blocking write of the instance data to a temp file. Used to safeguard data
* during intent launches for, e.g., taking photos.
*
* @param tempPath
* @return
*/
public static String blockingExportTempData() {
FormController formController = Collect.getInstance().getFormController();
long start = System.currentTimeMillis();
File temp = savepointFile(formController.getInstancePath());
ByteArrayPayload payload;
try {
payload = formController.getFilledInFormXml();
// write out xml
if ( exportXmlFile(payload, temp.getAbsolutePath()) ) {
return temp.getAbsolutePath();
}
return null;
} catch (IOException e) {
Log.e(t, "Error creating serialized payload");
e.printStackTrace();
return null;
} finally {
long end = System.currentTimeMillis();
Log.i(t, "Savepoint ms: " + Long.toString(end - start));
}
}
/**
* Write's the data to the sdcard, and updates the instances content provider.
* In theory we don't have to write to disk, and this is where you'd add
* other methods.
* @param markCompleted
* @return
*/
private boolean exportData(boolean markCompleted) {
FormController formController = Collect.getInstance().getFormController();
ByteArrayPayload payload;
try {
payload = formController.getFilledInFormXml();
// write out xml
String instancePath = formController.getInstancePath().getAbsolutePath();
exportXmlFile(payload, instancePath);
} catch (IOException e) {
Log.e(t, "Error creating serialized payload");
e.printStackTrace();
return false;
}
// update the mUri. We have exported the reloadable instance, so update status...
// Since we saved a reloadable instance, it is flagged as re-openable so that if any error
// occurs during the packaging of the data for the server fails (e.g., encryption),
// we can still reopen the filled-out form and re-save it at a later time.
updateInstanceDatabase(true, true);
if ( markCompleted ) {
// now see if the packaging of the data for the server would make it
// non-reopenable (e.g., encryption or send an SMS or other fraction of the form).
boolean canEditAfterCompleted = formController.isSubmissionEntireForm();
boolean isEncrypted = false;
// build a submission.xml to hold the data being submitted
// and (if appropriate) encrypt the files on the side
// pay attention to the ref attribute of the submission profile...
try {
payload = formController.getSubmissionXml();
} catch (IOException e) {
Log.e(t, "Error creating serialized payload");
e.printStackTrace();
return false;
}
File instanceXml = formController.getInstancePath();
File submissionXml = new File(instanceXml.getParentFile(), "submission.xml");
// write out submission.xml -- the data to actually submit to aggregate
exportXmlFile(payload, submissionXml.getAbsolutePath());
// see if the form is encrypted and we can encrypt it...
EncryptedFormInformation formInfo = EncryptionUtils.getEncryptedFormInformation(mUri,
formController.getSubmissionMetadata());
if ( formInfo != null ) {
// if we are encrypting, the form cannot be reopened afterward
canEditAfterCompleted = false;
// and encrypt the submission (this is a one-way operation)...
if ( !EncryptionUtils.generateEncryptedSubmission(instanceXml, submissionXml, formInfo) ) {
return false;
}
isEncrypted = true;
}
// At this point, we have:
// 1. the saved original instanceXml,
// 2. all the plaintext attachments
// 2. the submission.xml that is the completed xml (whether encrypting or not)
// 3. all the encrypted attachments if encrypting (isEncrypted = true).
//
// NEXT:
// 1. Update the instance database (with status complete).
// 2. Overwrite the instanceXml with the submission.xml
// and remove the plaintext attachments if encrypting
updateInstanceDatabase(false, canEditAfterCompleted);
if ( !canEditAfterCompleted ) {
// AT THIS POINT, there is no going back. We are committed
// to returning "success" (true) whether or not we can
// rename "submission.xml" to instanceXml and whether or
// not we can delete the plaintext media files.
//
// Handle the fall-out for a failed "submission.xml" rename
// in the InstanceUploader task. Leftover plaintext media
// files are handled during form deletion.
// delete the restore Xml file.
if ( !instanceXml.delete() ) {
Log.e(t, "Error deleting " + instanceXml.getAbsolutePath()
+ " prior to renaming submission.xml");
return true;
}
// rename the submission.xml to be the instanceXml
if ( !submissionXml.renameTo(instanceXml) ) {
Log.e(t, "Error renaming submission.xml to " + instanceXml.getAbsolutePath());
return true;
}
} else {
// try to delete the submissionXml file, since it is
// identical to the existing instanceXml file
// (we don't need to delete and rename anything).
if ( !submissionXml.delete() ) {
Log.w(t, "Error deleting " + submissionXml.getAbsolutePath()
+ " (instance is re-openable)");
}
}
// if encrypted, delete all plaintext files
// (anything not named instanceXml or anything not ending in .enc)
if ( isEncrypted ) {
if ( !EncryptionUtils.deletePlaintextFiles(instanceXml) ) {
Log.e(t, "Error deleting plaintext files for " + instanceXml.getAbsolutePath());
}
}
}
return true;
}
/**
* This method actually writes the xml to disk.
* @param payload
* @param path
* @return
*/
private static boolean exportXmlFile(ByteArrayPayload payload, String path) {
// create data stream
InputStream is = payload.getPayloadStream();
FileOutputStream fout = null;
byte[] buffer = new byte[2048];
try {
fout = new FileOutputStream(path);
BufferedOutputStream out = new BufferedOutputStream(fout);
// read from data stream
int len = is.read(buffer);
while ( len != -1 ) {
out.write(buffer, 0, len);
len = is.read(buffer);
}
out.flush();
fout.getChannel().force(false);
out.close();
fout = null;
return true;
} catch (IOException e) {
Log.e(t, "Error reading from payload data stream or writing to storage " + e.toString());
e.printStackTrace();
return false;
} finally {
if ( fout != null ) {
try {
fout.close();
} catch (IOException e) {
// ignored
}
}
}
}
@Override
protected void onPostExecute(Integer result) {
synchronized (this) {
if (mSavedListener != null)
mSavedListener.savingComplete(result);
}
}
public void setFormSavedListener(FormSavedListener fsl) {
synchronized (this) {
mSavedListener = fsl;
}
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.segment;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.metamx.common.Pair;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Druids;
import io.druid.query.QueryRunner;
import io.druid.query.Result;
import io.druid.query.TestQueryRunners;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleMaxAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.DoubleMinAggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.aggregation.post.ArithmeticPostAggregator;
import io.druid.query.aggregation.post.ConstantPostAggregator;
import io.druid.query.aggregation.post.FieldAccessPostAggregator;
import io.druid.query.filter.DimFilter;
import io.druid.query.search.SearchResultValue;
import io.druid.query.search.search.SearchHit;
import io.druid.query.search.search.SearchQuery;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.query.timeboundary.TimeBoundaryQuery;
import io.druid.query.timeboundary.TimeBoundaryResultValue;
import io.druid.query.timeseries.TimeseriesQuery;
import io.druid.query.timeseries.TimeseriesResultValue;
import io.druid.query.topn.TopNQuery;
import io.druid.query.topn.TopNQueryBuilder;
import io.druid.query.topn.TopNResultValue;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*/
@Ignore
public class AppendTest
{
private static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{
new DoubleSumAggregatorFactory("index", "index"),
new CountAggregatorFactory("count"),
new HyperUniquesAggregatorFactory("quality_uniques", "quality")
};
private static final AggregatorFactory[] METRIC_AGGS_NO_UNIQ = new AggregatorFactory[]{
new DoubleSumAggregatorFactory("index", "index"),
new CountAggregatorFactory("count")
};
final String dataSource = "testing";
final QueryGranularity allGran = QueryGranularity.ALL;
final String dimensionValue = "dimension";
final String valueValue = "value";
final String marketDimension = "market";
final String qualityDimension = "quality";
final String placementDimension = "placement";
final String placementishDimension = "placementish";
final String indexMetric = "index";
final CountAggregatorFactory rowsCount = new CountAggregatorFactory("rows");
final DoubleSumAggregatorFactory indexDoubleSum = new DoubleSumAggregatorFactory("index", "index");
final HyperUniquesAggregatorFactory uniques = new HyperUniquesAggregatorFactory("uniques", "quality_uniques");
final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
final ArithmeticPostAggregator addRowsIndexConstant =
new ArithmeticPostAggregator(
"addRowsIndexConstant", "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)
);
final List<AggregatorFactory> commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques);
final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec(
Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"))
);
private Segment segment;
private Segment segment2;
private Segment segment3;
@Before
public void setUp() throws Exception
{
// (1, 2) cover overlapping segments of the form
// |------|
// |--------|
QueryableIndex appendedIndex = SchemalessIndex.getAppendedIncrementalIndex(
Arrays.asList(
new Pair<String, AggregatorFactory[]>("append.json.1", METRIC_AGGS_NO_UNIQ),
new Pair<String, AggregatorFactory[]>("append.json.2", METRIC_AGGS)
),
Arrays.asList(
new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"),
new Interval("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z")
)
);
segment = new QueryableIndexSegment(null, appendedIndex);
// (3, 4) cover overlapping segments of the form
// |------------|
// |-----|
QueryableIndex append2 = SchemalessIndex.getAppendedIncrementalIndex(
Arrays.asList(
new Pair<String, AggregatorFactory[]>("append.json.3", METRIC_AGGS_NO_UNIQ),
new Pair<String, AggregatorFactory[]>("append.json.4", METRIC_AGGS)
),
Arrays.asList(
new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"),
new Interval("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z")
)
);
segment2 = new QueryableIndexSegment(null, append2);
// (5, 6, 7) test gaps that can be created in data because of rows being discounted
// |-------------|
// |---|
// |---|
QueryableIndex append3 = SchemalessIndex.getAppendedIncrementalIndex(
Arrays.asList(
new Pair<String, AggregatorFactory[]>("append.json.5", METRIC_AGGS),
new Pair<String, AggregatorFactory[]>("append.json.6", METRIC_AGGS),
new Pair<String, AggregatorFactory[]>("append.json.7", METRIC_AGGS)
),
Arrays.asList(
new Interval("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"),
new Interval("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"),
new Interval("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z")
)
);
segment3 = new QueryableIndexSegment(null, append3);
}
@Test
public void testTimeBoundary()
{
List<Result<TimeBoundaryResultValue>> expectedResults = Arrays.asList(
new Result<TimeBoundaryResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeBoundaryResultValue(
ImmutableMap.of(
TimeBoundaryQuery.MIN_TIME,
new DateTime("2011-01-12T00:00:00.000Z"),
TimeBoundaryQuery.MAX_TIME,
new DateTime("2011-01-15T02:00:00.000Z")
)
)
)
);
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder()
.dataSource(dataSource)
.build();
QueryRunner runner = TestQueryRunners.makeTimeBoundaryQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testTimeBoundary2()
{
List<Result<TimeBoundaryResultValue>> expectedResults = Arrays.asList(
new Result<TimeBoundaryResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeBoundaryResultValue(
ImmutableMap.of(
TimeBoundaryQuery.MIN_TIME,
new DateTime("2011-01-12T00:00:00.000Z"),
TimeBoundaryQuery.MAX_TIME,
new DateTime("2011-01-15T00:00:00.000Z")
)
)
)
);
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder()
.dataSource(dataSource)
.build();
QueryRunner runner = TestQueryRunners.makeTimeBoundaryQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testTimeSeries()
{
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
new Result<TimeseriesResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeseriesResultValue(
ImmutableMap.<String, Object>builder()
.put("rows", 8L)
.put("index", 700.0D)
.put("addRowsIndexConstant", 709.0D)
.put("uniques", 1.0002442201269182D)
.put("maxIndex", 100.0D)
.put("minIndex", 0.0D)
.build()
)
)
);
TimeseriesQuery query = makeTimeseriesQuery();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testTimeSeries2()
{
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
new Result<TimeseriesResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeseriesResultValue(
ImmutableMap.<String, Object>builder()
.put("rows", 7L)
.put("index", 500.0D)
.put("addRowsIndexConstant", 508.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0D)
.put("minIndex", 0.0D)
.build()
)
)
);
TimeseriesQuery query = makeTimeseriesQuery();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredTimeSeries()
{
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
new Result<TimeseriesResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeseriesResultValue(
ImmutableMap.<String, Object>builder()
.put("rows", 5L)
.put("index", 500.0D)
.put("addRowsIndexConstant", 506.0D)
.put("uniques", 1.0002442201269182D)
.put("maxIndex", 100.0D)
.put("minIndex", 100.0D)
.build()
)
)
);
TimeseriesQuery query = makeFilteredTimeseriesQuery();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredTimeSeries2()
{
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
new Result<TimeseriesResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeseriesResultValue(
ImmutableMap.<String, Object>builder()
.put("rows", 4L)
.put("index", 400.0D)
.put("addRowsIndexConstant", 405.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0D)
.put("minIndex", 100.0D)
.build()
)
)
);
TimeseriesQuery query = makeFilteredTimeseriesQuery();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testTopNSeries()
{
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
new Result<TopNResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TopNResultValue(
Arrays.<Map<String, Object>>asList(
ImmutableMap.<String, Object>builder()
.put("market", "spot")
.put("rows", 3L)
.put("index", 300.0D)
.put("addRowsIndexConstant", 304.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0)
.put("minIndex", 100.0)
.build(),
new HashMap<String, Object>()
{{
put("market", null);
put("rows", 3L);
put("index", 200.0D);
put("addRowsIndexConstant", 204.0D);
put("uniques", 0.0D);
put("maxIndex", 100.0);
put("minIndex", 0.0);
}},
ImmutableMap.<String, Object>builder()
.put("market", "total_market")
.put("rows", 2L)
.put("index", 200.0D)
.put("addRowsIndexConstant", 203.0D)
.put("uniques", 1.0002442201269182D)
.put("maxIndex", 100.0D)
.put("minIndex", 100.0D)
.build()
)
)
)
);
TopNQuery query = makeTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testTopNSeries2()
{
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
new Result<TopNResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TopNResultValue(
Arrays.<Map<String, Object>>asList(
ImmutableMap.<String, Object>builder()
.put("market", "total_market")
.put("rows", 3L)
.put("index", 300.0D)
.put("addRowsIndexConstant", 304.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0D)
.put("minIndex", 100.0D)
.build(),
new HashMap<String, Object>()
{{
put("market", null);
put("rows", 3L);
put("index", 100.0D);
put("addRowsIndexConstant", 104.0D);
put("uniques", 0.0D);
put("maxIndex", 100.0);
put("minIndex", 0.0);
}},
ImmutableMap.<String, Object>builder()
.put("market", "spot")
.put("rows", 1L)
.put("index", 100.0D)
.put("addRowsIndexConstant", 102.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0)
.put("minIndex", 100.0)
.build()
)
)
)
);
TopNQuery query = makeTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredTopNSeries()
{
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
new Result<TopNResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TopNResultValue(
Arrays.<Map<String, Object>>asList(
ImmutableMap.<String, Object>builder()
.put("market", "spot")
.put("rows", 1L)
.put("index", 100.0D)
.put("addRowsIndexConstant", 102.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0)
.put("minIndex", 100.0)
.build()
)
)
)
);
TopNQuery query = makeFilteredTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredTopNSeries2()
{
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
new Result<TopNResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TopNResultValue(
Lists.<Map<String, Object>>newArrayList()
)
)
);
TopNQuery query = makeFilteredTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testSearch()
{
List<Result<SearchResultValue>> expectedResults = Arrays.asList(
new Result<SearchResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new SearchResultValue(
Arrays.<SearchHit>asList(
new SearchHit(placementishDimension, "a"),
new SearchHit(qualityDimension, "automotive"),
new SearchHit(placementDimension, "mezzanine"),
new SearchHit(marketDimension, "total_market")
)
)
)
);
SearchQuery query = makeSearchQuery();
QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testSearchWithOverlap()
{
List<Result<SearchResultValue>> expectedResults = Arrays.asList(
new Result<SearchResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new SearchResultValue(
Arrays.<SearchHit>asList(
new SearchHit(placementishDimension, "a"),
new SearchHit(placementDimension, "mezzanine"),
new SearchHit(marketDimension, "total_market")
)
)
)
);
SearchQuery query = makeSearchQuery();
QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredSearch()
{
List<Result<SearchResultValue>> expectedResults = Arrays.asList(
new Result<SearchResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new SearchResultValue(
Arrays.<SearchHit>asList(
new SearchHit(placementDimension, "mezzanine"),
new SearchHit(marketDimension, "total_market")
)
)
)
);
SearchQuery query = makeFilteredSearchQuery();
QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testFilteredSearch2()
{
List<Result<SearchResultValue>> expectedResults = Arrays.asList(
new Result<SearchResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new SearchResultValue(
Arrays.<SearchHit>asList(
new SearchHit(placementishDimension, "a"),
new SearchHit(placementDimension, "mezzanine"),
new SearchHit(marketDimension, "total_market")
)
)
)
);
SearchQuery query = makeFilteredSearchQuery();
QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment2);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
@Test
public void testRowFiltering()
{
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
new Result<TimeseriesResultValue>(
new DateTime("2011-01-12T00:00:00.000Z"),
new TimeseriesResultValue(
ImmutableMap.<String, Object>builder()
.put("rows", 5L)
.put("index", 500.0D)
.put("addRowsIndexConstant", 506.0D)
.put("uniques", 0.0D)
.put("maxIndex", 100.0D)
.put("minIndex", 100.0D)
.build()
)
)
);
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.intervals(fullOnInterval)
.filters(marketDimension, "breakstuff")
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant))
.build();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment3);
HashMap<String,Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
private TimeseriesQuery makeTimeseriesQuery()
{
return Druids.newTimeseriesQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.intervals(fullOnInterval)
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant))
.build();
}
private TimeseriesQuery makeFilteredTimeseriesQuery()
{
return Druids.newTimeseriesQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.intervals(fullOnInterval)
.filters(
Druids.newOrDimFilterBuilder()
.fields(
Arrays.<DimFilter>asList(
Druids.newSelectorDimFilterBuilder()
.dimension(marketDimension)
.value("spot")
.build(),
Druids.newSelectorDimFilterBuilder()
.dimension(marketDimension)
.value("total_market")
.build()
)
).build()
)
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant))
.build();
}
private TopNQuery makeTopNQuery()
{
return new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.threshold(3)
.intervals(fullOnInterval)
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant))
.build();
}
private TopNQuery makeFilteredTopNQuery()
{
return new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.threshold(3)
.filters(
Druids.newAndDimFilterBuilder()
.fields(
Arrays.<DimFilter>asList(
Druids.newSelectorDimFilterBuilder()
.dimension(marketDimension)
.value("spot")
.build(),
Druids.newSelectorDimFilterBuilder()
.dimension(placementDimension)
.value("preferred")
.build()
)
).build()
)
.intervals(fullOnInterval)
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant))
.build();
}
private SearchQuery makeSearchQuery()
{
return Druids.newSearchQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.intervals(fullOnInterval)
.query("a")
.build();
}
private SearchQuery makeFilteredSearchQuery()
{
return Druids.newSearchQueryBuilder()
.dataSource(dataSource)
.filters(
Druids.newNotDimFilterBuilder()
.field(
Druids.newSelectorDimFilterBuilder()
.dimension(marketDimension)
.value("spot")
.build()
).build()
)
.granularity(allGran)
.intervals(fullOnInterval)
.query("a")
.build();
}
}
| |
// Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.change;
import static com.google.gerrit.extensions.client.ListChangesOption.ALL_COMMITS;
import static com.google.gerrit.extensions.client.ListChangesOption.ALL_FILES;
import static com.google.gerrit.extensions.client.ListChangesOption.ALL_REVISIONS;
import static com.google.gerrit.extensions.client.ListChangesOption.CHECK;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_ACTIONS;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_COMMIT;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_FILES;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_REVISION;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_ACCOUNTS;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_LABELS;
import static com.google.gerrit.extensions.client.ListChangesOption.DOWNLOAD_COMMANDS;
import static com.google.gerrit.extensions.client.ListChangesOption.DRAFT_COMMENTS;
import static com.google.gerrit.extensions.client.ListChangesOption.LABELS;
import static com.google.gerrit.extensions.client.ListChangesOption.MESSAGES;
import static com.google.gerrit.extensions.client.ListChangesOption.REVIEWED;
import static com.google.gerrit.extensions.client.ListChangesOption.WEB_LINKS;
import com.google.auto.value.AutoValue;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.MoreObjects;
import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.LabelTypes;
import com.google.gerrit.common.data.LabelValue;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.extensions.api.changes.FixInput;
import com.google.gerrit.extensions.client.ListChangesOption;
import com.google.gerrit.extensions.common.AccountInfo;
import com.google.gerrit.extensions.common.ActionInfo;
import com.google.gerrit.extensions.common.ApprovalInfo;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeMessageInfo;
import com.google.gerrit.extensions.common.CommitInfo;
import com.google.gerrit.extensions.common.FetchInfo;
import com.google.gerrit.extensions.common.GitPerson;
import com.google.gerrit.extensions.common.LabelInfo;
import com.google.gerrit.extensions.common.ProblemInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.extensions.common.WebLinkInfo;
import com.google.gerrit.extensions.config.DownloadCommand;
import com.google.gerrit.extensions.config.DownloadScheme;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.restapi.RestView;
import com.google.gerrit.extensions.restapi.Url;
import com.google.gerrit.extensions.webui.PrivateInternals_UiActionDescription;
import com.google.gerrit.extensions.webui.UiAction;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.Patch;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.client.PatchSetInfo.ParentInfo;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.UserIdentity;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.AnonymousUser;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PatchLineCommentsUtil;
import com.google.gerrit.server.WebLinks;
import com.google.gerrit.server.account.AccountLoader;
import com.google.gerrit.server.extensions.webui.UiActions;
import com.google.gerrit.server.git.LabelNormalizer;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.patch.PatchListNotAvailableException;
import com.google.gerrit.server.patch.PatchSetInfoFactory;
import com.google.gerrit.server.patch.PatchSetInfoNotAvailableException;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.project.SubmitRuleEvaluator;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.ChangeData.ChangedLines;
import com.google.gerrit.server.query.change.QueryResult;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
public class ChangeJson {
private static final Logger log = LoggerFactory.getLogger(ChangeJson.class);
private static final List<ChangeMessage> NO_MESSAGES =
ImmutableList.of();
private final Provider<ReviewDb> db;
private final LabelNormalizer labelNormalizer;
private final Provider<CurrentUser> userProvider;
private final AnonymousUser anonymous;
private final IdentifiedUser.GenericFactory userFactory;
private final ChangeData.Factory changeDataFactory;
private final PatchSetInfoFactory patchSetInfoFactory;
private final FileInfoJson fileInfoJson;
private final AccountLoader.Factory accountLoaderFactory;
private final DynamicMap<DownloadScheme> downloadSchemes;
private final DynamicMap<DownloadCommand> downloadCommands;
private final DynamicMap<RestView<ChangeResource>> changeViews;
private final Revisions revisions;
private final WebLinks webLinks;
private final EnumSet<ListChangesOption> options;
private final ChangeMessagesUtil cmUtil;
private final PatchLineCommentsUtil plcUtil;
private final Provider<ConsistencyChecker> checkerProvider;
private AccountLoader accountLoader;
private FixInput fix;
@Inject
ChangeJson(
Provider<ReviewDb> db,
LabelNormalizer ln,
Provider<CurrentUser> user,
AnonymousUser au,
IdentifiedUser.GenericFactory uf,
ChangeData.Factory cdf,
PatchSetInfoFactory psi,
FileInfoJson fileInfoJson,
AccountLoader.Factory ailf,
DynamicMap<DownloadScheme> downloadSchemes,
DynamicMap<DownloadCommand> downloadCommands,
DynamicMap<RestView<ChangeResource>> changeViews,
Revisions revisions,
WebLinks webLinks,
ChangeMessagesUtil cmUtil,
PatchLineCommentsUtil plcUtil,
Provider<ConsistencyChecker> checkerProvider) {
this.db = db;
this.labelNormalizer = ln;
this.userProvider = user;
this.anonymous = au;
this.userFactory = uf;
this.changeDataFactory = cdf;
this.patchSetInfoFactory = psi;
this.fileInfoJson = fileInfoJson;
this.accountLoaderFactory = ailf;
this.downloadSchemes = downloadSchemes;
this.downloadCommands = downloadCommands;
this.changeViews = changeViews;
this.revisions = revisions;
this.webLinks = webLinks;
this.cmUtil = cmUtil;
this.plcUtil = plcUtil;
this.checkerProvider = checkerProvider;
options = EnumSet.noneOf(ListChangesOption.class);
}
public ChangeJson addOption(ListChangesOption o) {
options.add(o);
return this;
}
public ChangeJson addOptions(Collection<ListChangesOption> o) {
options.addAll(o);
return this;
}
public ChangeJson fix(FixInput fix) {
this.fix = fix;
return this;
}
public ChangeInfo format(ChangeResource rsrc) throws OrmException {
return format(changeDataFactory.create(db.get(), rsrc.getControl()));
}
public ChangeInfo format(Change change) throws OrmException {
return format(changeDataFactory.create(db.get(), change));
}
public ChangeInfo format(Change.Id id) throws OrmException {
Change c;
try {
c = db.get().changes().get(id);
} catch (OrmException e) {
if (!has(CHECK)) {
throw e;
}
return checkOnly(changeDataFactory.create(db.get(), id));
}
return format(changeDataFactory.create(db.get(), c));
}
public ChangeInfo format(ChangeData cd) throws OrmException {
return format(cd, Optional.<PatchSet.Id> absent());
}
private ChangeInfo format(ChangeData cd, Optional<PatchSet.Id> limitToPsId)
throws OrmException {
try {
accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS));
Set<Change.Id> reviewed = Sets.newHashSet();
if (has(REVIEWED)) {
reviewed = loadReviewed(Collections.singleton(cd));
}
ChangeInfo res = toChangeInfo(cd, reviewed, limitToPsId);
accountLoader.fill();
return res;
} catch (OrmException | RuntimeException e) {
if (!has(CHECK)) {
throw e;
}
return checkOnly(cd);
}
}
public ChangeInfo format(RevisionResource rsrc) throws OrmException {
ChangeData cd = changeDataFactory.create(db.get(), rsrc.getControl());
return format(cd, Optional.of(rsrc.getPatchSet().getId()));
}
public List<List<ChangeInfo>> formatQueryResults(List<QueryResult> in)
throws OrmException {
accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS));
Iterable<ChangeData> all = FluentIterable.from(in)
.transformAndConcat(new Function<QueryResult, List<ChangeData>>() {
@Override
public List<ChangeData> apply(QueryResult in) {
return in.changes();
}
});
ChangeData.ensureChangeLoaded(all);
if (has(ALL_REVISIONS)) {
ChangeData.ensureAllPatchSetsLoaded(all);
} else {
ChangeData.ensureCurrentPatchSetLoaded(all);
}
Set<Change.Id> reviewed = Sets.newHashSet();
if (has(REVIEWED)) {
reviewed = loadReviewed(all);
}
ChangeData.ensureCurrentApprovalsLoaded(all);
List<List<ChangeInfo>> res = Lists.newArrayListWithCapacity(in.size());
Map<Change.Id, ChangeInfo> out = Maps.newHashMap();
for (QueryResult r : in) {
List<ChangeInfo> infos = toChangeInfo(out, r.changes(), reviewed);
if (r.moreChanges()) {
infos.get(infos.size() - 1)._moreChanges = true;
}
res.add(infos);
}
accountLoader.fill();
return res;
}
private boolean has(ListChangesOption option) {
return options.contains(option);
}
private List<ChangeInfo> toChangeInfo(Map<Change.Id, ChangeInfo> out,
List<ChangeData> changes, Set<Change.Id> reviewed) {
List<ChangeInfo> info = Lists.newArrayListWithCapacity(changes.size());
for (ChangeData cd : changes) {
ChangeInfo i = out.get(cd.getId());
if (i == null) {
try {
i = toChangeInfo(cd, reviewed, Optional.<PatchSet.Id> absent());
} catch (OrmException | RuntimeException e) {
if (has(CHECK)) {
i = checkOnly(cd);
} else {
log.warn(
"Omitting corrupt change " + cd.getId() + " from results", e);
continue;
}
}
out.put(cd.getId(), i);
}
info.add(i);
}
return info;
}
private ChangeInfo checkOnly(ChangeData cd) {
ConsistencyChecker.Result result = checkerProvider.get().check(cd, fix);
ChangeInfo info;
Change c = result.change();
if (c != null) {
info = new ChangeInfo();
info.project = c.getProject().get();
info.branch = c.getDest().getShortName();
info.topic = c.getTopic();
info.changeId = c.getKey().get();
info.subject = c.getSubject();
info.status = c.getStatus().asChangeStatus();
info.owner = new AccountInfo(c.getOwner().get());
info.created = c.getCreatedOn();
info.updated = c.getLastUpdatedOn();
info._number = c.getId().get();
info.problems = result.problems();
finish(info);
} else {
info = new ChangeInfo();
info._number = result.id().get();
info.problems = result.problems();
}
return info;
}
private ChangeInfo toChangeInfo(ChangeData cd, Set<Change.Id> reviewed,
Optional<PatchSet.Id> limitToPsId) throws OrmException {
ChangeInfo out = new ChangeInfo();
if (has(CHECK)) {
out.problems = checkerProvider.get().check(cd.change(), fix).problems();
// If any problems were fixed, the ChangeData needs to be reloaded.
for (ProblemInfo p : out.problems) {
if (p.status == ProblemInfo.Status.FIXED) {
cd = changeDataFactory.create(cd.db(), cd.getId());
break;
}
}
}
Change in = cd.change();
ChangeControl ctl = cd.changeControl().forUser(userProvider.get());
out.project = in.getProject().get();
out.branch = in.getDest().getShortName();
out.topic = in.getTopic();
out.hashtags = ctl.getNotes().load().getHashtags();
out.changeId = in.getKey().get();
// TODO(dborowitz): This gets the submit type, so we could include that in
// the response and avoid making a request to /submit_type from the UI.
out.mergeable = in.getStatus() == Change.Status.MERGED
? null : cd.isMergeable();
ChangedLines changedLines = cd.changedLines();
if (changedLines != null) {
out.insertions = changedLines.insertions;
out.deletions = changedLines.deletions;
}
out.subject = in.getSubject();
out.status = in.getStatus().asChangeStatus();
out.owner = accountLoader.get(in.getOwner());
out.created = in.getCreatedOn();
out.updated = in.getLastUpdatedOn();
out._number = in.getId().get();
out.starred = userProvider.get().getStarredChanges().contains(in.getId())
? true
: null;
out.reviewed = in.getStatus().isOpen()
&& has(REVIEWED)
&& reviewed.contains(cd.getId()) ? true : null;
out.labels = labelsFor(ctl, cd, has(LABELS), has(DETAILED_LABELS));
if (out.labels != null && has(DETAILED_LABELS)) {
// If limited to specific patch sets but not the current patch set, don't
// list permitted labels, since users can't vote on those patch sets.
if (!limitToPsId.isPresent()
|| limitToPsId.get().equals(in.currentPatchSetId())) {
out.permittedLabels = permittedLabels(ctl, cd);
}
out.removableReviewers = removableReviewers(ctl, out.labels.values());
}
Map<PatchSet.Id, PatchSet> src = loadPatchSets(cd, limitToPsId);
if (has(MESSAGES)) {
out.messages = messages(ctl, cd, src);
}
finish(out);
if (has(ALL_REVISIONS)
|| has(CURRENT_REVISION)
|| limitToPsId.isPresent()) {
out.revisions = revisions(ctl, cd, src);
if (out.revisions != null) {
for (Map.Entry<String, RevisionInfo> entry : out.revisions.entrySet()) {
if (entry.getValue().isCurrent) {
out.currentRevision = entry.getKey();
break;
}
}
}
}
if (has(CURRENT_ACTIONS) && userProvider.get().isIdentifiedUser()) {
out.actions = Maps.newTreeMap();
for (UiAction.Description d : UiActions.from(
changeViews,
new ChangeResource(ctl),
userProvider)) {
out.actions.put(d.getId(), new ActionInfo(d));
}
if (userProvider.get().isIdentifiedUser()
&& in.getStatus().isOpen()) {
UiAction.Description descr = new UiAction.Description();
PrivateInternals_UiActionDescription.setId(descr, "followup");
PrivateInternals_UiActionDescription.setMethod(descr, "POST");
descr.setTitle("Create follow-up change");
out.actions.put(descr.getId(), new ActionInfo(descr));
}
}
return out;
}
private List<SubmitRecord> submitRecords(ChangeData cd) throws OrmException {
if (cd.getSubmitRecords() != null) {
return cd.getSubmitRecords();
}
PatchSet ps = cd.currentPatchSet();
if (ps == null) {
return ImmutableList.of();
}
cd.setSubmitRecords(new SubmitRuleEvaluator(cd).setPatchSet(ps)
.setFastEvalLabels(true)
.setAllowDraft(true)
.canSubmit());
return cd.getSubmitRecords();
}
private Map<String, LabelInfo> labelsFor(ChangeControl ctl,
ChangeData cd, boolean standard, boolean detailed) throws OrmException {
if (!standard && !detailed) {
return null;
}
if (ctl == null) {
return null;
}
LabelTypes labelTypes = ctl.getLabelTypes();
Map<String, LabelWithStatus> withStatus = cd.change().getStatus().isOpen()
? labelsForOpenChange(ctl, cd, labelTypes, standard, detailed)
: labelsForClosedChange(cd, labelTypes, standard, detailed);
return ImmutableMap.copyOf(
Maps.transformValues(withStatus, LabelWithStatus.TO_LABEL_INFO));
}
private Map<String, LabelWithStatus> labelsForOpenChange(ChangeControl ctl,
ChangeData cd, LabelTypes labelTypes, boolean standard, boolean detailed)
throws OrmException {
Map<String, LabelWithStatus> labels = initLabels(cd, labelTypes, standard);
if (detailed) {
setAllApprovals(ctl, cd, labels);
}
for (Map.Entry<String, LabelWithStatus> e : labels.entrySet()) {
LabelType type = labelTypes.byLabel(e.getKey());
if (type == null) {
continue;
}
if (standard) {
for (PatchSetApproval psa : cd.currentApprovals()) {
if (type.matches(psa)) {
short val = psa.getValue();
Account.Id accountId = psa.getAccountId();
setLabelScores(type, e.getValue(), val, accountId);
}
}
}
if (detailed) {
setLabelValues(type, e.getValue());
}
}
return labels;
}
private Map<String, LabelWithStatus> initLabels(ChangeData cd,
LabelTypes labelTypes, boolean standard) throws OrmException {
// Don't use Maps.newTreeMap(Comparator) due to OpenJDK bug 100167.
Map<String, LabelWithStatus> labels = new TreeMap<>(labelTypes.nameComparator());
for (SubmitRecord rec : submitRecords(cd)) {
if (rec.labels == null) {
continue;
}
for (SubmitRecord.Label r : rec.labels) {
LabelWithStatus p = labels.get(r.label);
if (p == null || p.status().compareTo(r.status) < 0) {
LabelInfo n = new LabelInfo();
if (standard) {
switch (r.status) {
case OK:
n.approved = accountLoader.get(r.appliedBy);
break;
case REJECT:
n.rejected = accountLoader.get(r.appliedBy);
n.blocking = true;
break;
default:
break;
}
}
n.optional = r.status == SubmitRecord.Label.Status.MAY ? true : null;
labels.put(r.label, LabelWithStatus.create(n, r.status));
}
}
}
return labels;
}
private void setLabelScores(LabelType type,
LabelWithStatus l, short score, Account.Id accountId) {
if (l.label().approved != null || l.label().rejected != null) {
return;
}
if (type.getMin() == null || type.getMax() == null) {
// Can't set score for unknown or misconfigured type.
return;
}
if (score != 0) {
if (score == type.getMin().getValue()) {
l.label().rejected = accountLoader.get(accountId);
} else if (score == type.getMax().getValue()) {
l.label().approved = accountLoader.get(accountId);
} else if (score < 0) {
l.label().disliked = accountLoader.get(accountId);
l.label().value = score;
} else if (score > 0 && l.label().disliked == null) {
l.label().recommended = accountLoader.get(accountId);
l.label().value = score;
}
}
}
private void setAllApprovals(ChangeControl baseCtrl, ChangeData cd,
Map<String, LabelWithStatus> labels) throws OrmException {
// Include a user in the output for this label if either:
// - They are an explicit reviewer.
// - They ever voted on this change.
Set<Account.Id> allUsers = Sets.newHashSet();
allUsers.addAll(cd.reviewers().values());
for (PatchSetApproval psa : cd.approvals().values()) {
allUsers.add(psa.getAccountId());
}
Table<Account.Id, String, PatchSetApproval> current = HashBasedTable.create(
allUsers.size(), baseCtrl.getLabelTypes().getLabelTypes().size());
for (PatchSetApproval psa : cd.currentApprovals()) {
current.put(psa.getAccountId(), psa.getLabel(), psa);
}
for (Account.Id accountId : allUsers) {
IdentifiedUser user = userFactory.create(accountId);
ChangeControl ctl = baseCtrl.forUser(user);
for (Map.Entry<String, LabelWithStatus> e : labels.entrySet()) {
LabelType lt = ctl.getLabelTypes().byLabel(e.getKey());
if (lt == null) {
// Ignore submit record for undefined label; likely the submit rule
// author didn't intend for the label to show up in the table.
continue;
}
Integer value;
Timestamp date = null;
PatchSetApproval psa = current.get(accountId, lt.getName());
if (psa != null) {
value = Integer.valueOf(psa.getValue());
date = psa.getGranted();
} else {
// Either the user cannot vote on this label, or they were added as a
// reviewer but have not responded yet. Explicitly check whether the
// user can vote on this label.
value = labelNormalizer.canVote(ctl, lt, accountId) ? 0 : null;
}
addApproval(e.getValue().label(), approvalInfo(accountId, value, date));
}
}
}
private Map<String, LabelWithStatus> labelsForClosedChange(ChangeData cd,
LabelTypes labelTypes, boolean standard, boolean detailed)
throws OrmException {
Set<Account.Id> allUsers = Sets.newHashSet();
for (PatchSetApproval psa : cd.approvals().values()) {
allUsers.add(psa.getAccountId());
}
// We can only approximately reconstruct what the submit rule evaluator
// would have done. These should really come from a stored submit record.
Set<String> labelNames = Sets.newHashSet();
Multimap<Account.Id, PatchSetApproval> current = HashMultimap.create();
for (PatchSetApproval a : cd.currentApprovals()) {
LabelType type = labelTypes.byLabel(a.getLabelId());
if (type != null) {
labelNames.add(type.getName());
// Not worth the effort to distinguish between votable/non-votable for 0
// values on closed changes, since they can't vote anyway.
current.put(a.getAccountId(), a);
}
}
// Don't use Maps.newTreeMap(Comparator) due to OpenJDK bug 100167.
Map<String, LabelWithStatus> labels =
new TreeMap<>(labelTypes.nameComparator());
for (String name : labelNames) {
LabelType type = labelTypes.byLabel(name);
LabelWithStatus l = LabelWithStatus.create(new LabelInfo(), null);
if (detailed) {
setLabelValues(type, l);
}
labels.put(type.getName(), l);
}
for (Account.Id accountId : allUsers) {
Map<String, ApprovalInfo> byLabel =
Maps.newHashMapWithExpectedSize(labels.size());
if (detailed) {
for (Map.Entry<String, LabelWithStatus> entry : labels.entrySet()) {
ApprovalInfo ai = approvalInfo(accountId, 0, null);
byLabel.put(entry.getKey(), ai);
addApproval(entry.getValue().label(), ai);
}
}
for (PatchSetApproval psa : current.get(accountId)) {
LabelType type = labelTypes.byLabel(psa.getLabelId());
if (type == null) {
continue;
}
short val = psa.getValue();
ApprovalInfo info = byLabel.get(type.getName());
if (info != null) {
info.value = Integer.valueOf(val);
info.date = psa.getGranted();
}
if (!standard) {
continue;
}
setLabelScores(type, labels.get(type.getName()), val, accountId);
}
}
return labels;
}
private ApprovalInfo approvalInfo(Account.Id id, Integer value, Timestamp date) {
ApprovalInfo ai = new ApprovalInfo(id.get());
ai.value = value;
ai.date = date;
accountLoader.put(ai);
return ai;
}
private static boolean isOnlyZero(Collection<String> values) {
return values.isEmpty() || (values.size() == 1 && values.contains(" 0"));
}
private void setLabelValues(LabelType type, LabelWithStatus l) {
l.label().defaultValue = type.getDefaultValue();
l.label().values = Maps.newLinkedHashMap();
for (LabelValue v : type.getValues()) {
l.label().values.put(v.formatValue(), v.getText());
}
if (isOnlyZero(l.label().values.keySet())) {
l.label().values = null;
}
}
private Map<String, Collection<String>> permittedLabels(ChangeControl ctl, ChangeData cd)
throws OrmException {
if (ctl == null) {
return null;
}
LabelTypes labelTypes = ctl.getLabelTypes();
SetMultimap<String, String> permitted = LinkedHashMultimap.create();
for (SubmitRecord rec : submitRecords(cd)) {
if (rec.labels == null) {
continue;
}
for (SubmitRecord.Label r : rec.labels) {
LabelType type = labelTypes.byLabel(r.label);
if (type == null) {
continue;
}
PermissionRange range = ctl.getRange(Permission.forLabel(r.label));
for (LabelValue v : type.getValues()) {
if (range.contains(v.getValue())) {
permitted.put(r.label, v.formatValue());
}
}
}
}
List<String> toClear =
Lists.newArrayListWithCapacity(permitted.keySet().size());
for (Map.Entry<String, Collection<String>> e
: permitted.asMap().entrySet()) {
if (isOnlyZero(e.getValue())) {
toClear.add(e.getKey());
}
}
for (String label : toClear) {
permitted.removeAll(label);
}
return permitted.asMap();
}
private Collection<ChangeMessageInfo> messages(ChangeControl ctl, ChangeData cd,
Map<PatchSet.Id, PatchSet> map)
throws OrmException {
List<ChangeMessage> messages = cmUtil.byChange(db.get(), cd.notes());
if (messages.isEmpty()) {
return Collections.emptyList();
}
// chronological order
Collections.sort(messages, new Comparator<ChangeMessage>() {
@Override
public int compare(ChangeMessage a, ChangeMessage b) {
return a.getWrittenOn().compareTo(b.getWrittenOn());
}
});
List<ChangeMessageInfo> result =
Lists.newArrayListWithCapacity(messages.size());
for (ChangeMessage message : messages) {
PatchSet.Id patchNum = message.getPatchSetId();
PatchSet ps = patchNum != null ? map.get(patchNum) : null;
if (patchNum == null || ctl.isPatchVisible(ps, db.get())) {
ChangeMessageInfo cmi = new ChangeMessageInfo();
cmi.id = message.getKey().get();
cmi.author = accountLoader.get(message.getAuthor());
cmi.date = message.getWrittenOn();
cmi.message = message.getMessage();
cmi._revisionNumber = patchNum != null ? patchNum.get() : null;
result.add(cmi);
}
}
return result;
}
private Collection<AccountInfo> removableReviewers(ChangeControl ctl,
Collection<LabelInfo> labels) {
Set<Account.Id> fixed = Sets.newHashSetWithExpectedSize(labels.size());
Set<Account.Id> removable = Sets.newHashSetWithExpectedSize(labels.size());
for (LabelInfo label : labels) {
if (label.all == null) {
continue;
}
for (ApprovalInfo ai : label.all) {
Account.Id id = new Account.Id(ai._accountId);
if (ctl.canRemoveReviewer(id, MoreObjects.firstNonNull(ai.value, 0))) {
removable.add(id);
} else {
fixed.add(id);
}
}
}
removable.removeAll(fixed);
List<AccountInfo> result = Lists.newArrayListWithCapacity(removable.size());
for (Account.Id id : removable) {
result.add(accountLoader.get(id));
}
return result;
}
private Set<Change.Id> loadReviewed(Iterable<ChangeData> all)
throws OrmException {
Set<Change.Id> reviewed = Sets.newHashSet();
if (userProvider.get().isIdentifiedUser()) {
Account.Id self = ((IdentifiedUser) userProvider.get()).getAccountId();
for (List<ChangeData> batch : Iterables.partition(all, 50)) {
List<List<ChangeMessage>> m =
Lists.newArrayListWithCapacity(batch.size());
for (ChangeData cd : batch) {
PatchSet.Id ps = cd.change().currentPatchSetId();
if (ps != null && cd.change().getStatus().isOpen()) {
m.add(cmUtil.byPatchSet(db.get(), cd.notes(), ps));
} else {
m.add(NO_MESSAGES);
}
}
for (int i = 0; i < m.size(); i++) {
if (isChangeReviewed(self, batch.get(i), m.get(i))) {
reviewed.add(batch.get(i).getId());
}
}
}
}
return reviewed;
}
private boolean isChangeReviewed(Account.Id self, ChangeData cd,
List<ChangeMessage> msgs) throws OrmException {
// Sort messages to keep the most recent ones at the beginning.
msgs = ChangeNotes.MESSAGE_BY_TIME.sortedCopy(msgs);
Collections.reverse(msgs);
Account.Id changeOwnerId = cd.change().getOwner();
for (ChangeMessage cm : msgs) {
if (self.equals(cm.getAuthor())) {
return true;
} else if (changeOwnerId.equals(cm.getAuthor())) {
return false;
}
}
return false;
}
private Map<String, RevisionInfo> revisions(ChangeControl ctl, ChangeData cd,
Map<PatchSet.Id, PatchSet> map) throws OrmException {
Map<String, RevisionInfo> res = Maps.newLinkedHashMap();
for (PatchSet in : map.values()) {
if ((has(ALL_REVISIONS)
|| in.getId().equals(cd.change().currentPatchSetId()))
&& ctl.isPatchVisible(in, db.get())) {
res.put(in.getRevision().get(), toRevisionInfo(ctl, cd, in));
}
}
return res;
}
private Map<PatchSet.Id, PatchSet> loadPatchSets(ChangeData cd,
Optional<PatchSet.Id> limitToPsId) throws OrmException {
Collection<PatchSet> src;
if (has(ALL_REVISIONS) || has(MESSAGES)) {
src = cd.patches();
} else {
PatchSet ps;
if (limitToPsId.isPresent()) {
ps = cd.patch(limitToPsId.get());
if (ps == null) {
throw new OrmException("missing patch set " + limitToPsId.get());
}
} else {
ps = cd.currentPatchSet();
if (ps == null) {
throw new OrmException(
"missing current patch set for change " + cd.getId());
}
}
src = Collections.singletonList(ps);
}
Map<PatchSet.Id, PatchSet> map = Maps.newHashMapWithExpectedSize(src.size());
for (PatchSet patchSet : src) {
map.put(patchSet.getId(), patchSet);
}
return map;
}
private RevisionInfo toRevisionInfo(ChangeControl ctl, ChangeData cd,
PatchSet in) throws OrmException {
RevisionInfo out = new RevisionInfo();
out.isCurrent = in.getId().equals(cd.change().currentPatchSetId());
out._number = in.getId().get();
out.ref = in.getRefName();
out.draft = in.isDraft() ? true : null;
out.fetch = makeFetchMap(ctl, in);
if (has(ALL_COMMITS) || (out.isCurrent && has(CURRENT_COMMIT))) {
try {
out.commit = toCommit(in, cd.change().getProject(), has(WEB_LINKS));
} catch (PatchSetInfoNotAvailableException e) {
log.warn("Cannot load PatchSetInfo " + in.getId(), e);
}
}
if (has(ALL_FILES) || (out.isCurrent && has(CURRENT_FILES))) {
try {
out.files = fileInfoJson.toFileInfoMap(cd.change(), in);
out.files.remove(Patch.COMMIT_MSG);
} catch (PatchListNotAvailableException e) {
log.warn("Cannot load PatchList " + in.getId(), e);
}
}
if ((out.isCurrent || (out.draft != null && out.draft))
&& has(CURRENT_ACTIONS)
&& userProvider.get().isIdentifiedUser()) {
out.actions = Maps.newTreeMap();
for (UiAction.Description d : UiActions.from(
revisions,
new RevisionResource(new ChangeResource(ctl), in),
userProvider)) {
out.actions.put(d.getId(), new ActionInfo(d));
}
}
if (has(DRAFT_COMMENTS)
&& userProvider.get().isIdentifiedUser()) {
IdentifiedUser user = (IdentifiedUser)userProvider.get();
out.hasDraftComments =
plcUtil.draftByPatchSetAuthor(db.get(), in.getId(),
user.getAccountId(), ctl.getNotes()).iterator().hasNext()
? true
: null;
}
return out;
}
CommitInfo toCommit(PatchSet in, Project.NameKey project, boolean addLinks)
throws PatchSetInfoNotAvailableException {
PatchSetInfo info = patchSetInfoFactory.get(db.get(), in.getId());
CommitInfo commit = new CommitInfo();
commit.parents = Lists.newArrayListWithCapacity(info.getParents().size());
commit.author = toGitPerson(info.getAuthor());
commit.committer = toGitPerson(info.getCommitter());
commit.subject = info.getSubject();
commit.message = info.getMessage();
if (addLinks) {
FluentIterable<WebLinkInfo> links =
webLinks.getPatchSetLinks(project, in.getRevision().get());
commit.webLinks = links.isEmpty() ? null : links.toList();
}
for (ParentInfo parent : info.getParents()) {
CommitInfo i = new CommitInfo();
i.commit = parent.id.get();
i.subject = parent.shortMessage;
if (addLinks) {
FluentIterable<WebLinkInfo> parentLinks =
webLinks.getPatchSetLinks(project, parent.id.get());
i.webLinks = parentLinks.isEmpty() ? null : parentLinks.toList();
}
commit.parents.add(i);
}
return commit;
}
private Map<String, FetchInfo> makeFetchMap(ChangeControl ctl, PatchSet in)
throws OrmException {
Map<String, FetchInfo> r = Maps.newLinkedHashMap();
for (DynamicMap.Entry<DownloadScheme> e : downloadSchemes) {
String schemeName = e.getExportName();
DownloadScheme scheme = e.getProvider().get();
if (!scheme.isEnabled()
|| (scheme.isAuthRequired() && !userProvider.get().isIdentifiedUser())) {
continue;
}
if (!scheme.isAuthSupported()
&& !ctl.forUser(anonymous).isPatchVisible(in, db.get())) {
continue;
}
String projectName = ctl.getProject().getNameKey().get();
String url = scheme.getUrl(projectName);
String refName = in.getRefName();
FetchInfo fetchInfo = new FetchInfo(url, refName);
r.put(schemeName, fetchInfo);
if (has(DOWNLOAD_COMMANDS)) {
populateFetchMap(scheme, downloadCommands, projectName, refName,
fetchInfo);
}
}
return r;
}
public static void populateFetchMap(DownloadScheme scheme,
DynamicMap<DownloadCommand> commands, String projectName,
String refName, FetchInfo fetchInfo) {
for (DynamicMap.Entry<DownloadCommand> e2 : commands) {
String commandName = e2.getExportName();
DownloadCommand command = e2.getProvider().get();
String c = command.getCommand(scheme, projectName, refName);
if (c != null) {
addCommand(fetchInfo, commandName, c);
}
}
}
private static void addCommand(FetchInfo fetchInfo, String commandName,
String c) {
if (fetchInfo.commands == null) {
fetchInfo.commands = Maps.newTreeMap();
}
fetchInfo.commands.put(commandName, c);
}
private static GitPerson toGitPerson(UserIdentity committer) {
GitPerson p = new GitPerson();
p.name = committer.getName();
p.email = committer.getEmail();
p.date = committer.getDate();
p.tz = committer.getTimeZone();
return p;
}
static void finish(ChangeInfo info) {
info.id = Joiner.on('~').join(
Url.encode(info.project),
Url.encode(info.branch),
Url.encode(info.changeId));
}
private static void addApproval(LabelInfo label, ApprovalInfo approval) {
if (label.all == null) {
label.all = Lists.newArrayList();
}
label.all.add(approval);
}
@AutoValue
abstract static class LabelWithStatus {
private static final Function<LabelWithStatus, LabelInfo> TO_LABEL_INFO =
new Function<LabelWithStatus, LabelInfo>() {
@Override
public LabelInfo apply(LabelWithStatus in) {
return in.label();
}
};
private static LabelWithStatus create(LabelInfo label,
SubmitRecord.Label.Status status) {
return new AutoValue_ChangeJson_LabelWithStatus(label, status);
}
abstract LabelInfo label();
@Nullable abstract SubmitRecord.Label.Status status();
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.exceptions;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/**
* Represents an exception that is a composite of one or more other exceptions. A {@code CompositeException}
* does not modify the structure of any exception it wraps, but at print-time it iterates through the list of
* Throwables contained in the composite in order to print them all.
*
* Its invariant is to contain an immutable, ordered (by insertion order), unique list of non-composite
* exceptions. You can retrieve individual exceptions in this list with {@link #getExceptions()}.
*
* The {@link #printStackTrace()} implementation handles the StackTrace in a customized way instead of using
* {@code getCause()} so that it can avoid circular references.
*
* If you invoke {@link #getCause()}, it will lazily create the causal chain but will stop if it finds any
* Throwable in the chain that it has already seen.
*/
public final class CompositeException extends RuntimeException {
private static final long serialVersionUID = 3026362227162912146L;
private final List<Throwable> exceptions;
private final String message;
public CompositeException(String messagePrefix, Collection<? extends Throwable> errors) {
Set<Throwable> deDupedExceptions = new LinkedHashSet<Throwable>();
List<Throwable> _exceptions = new ArrayList<Throwable>();
if (errors != null) {
for (Throwable ex : errors) {
if (ex instanceof CompositeException) {
deDupedExceptions.addAll(((CompositeException) ex).getExceptions());
} else
if (ex != null) {
deDupedExceptions.add(ex);
} else {
deDupedExceptions.add(new NullPointerException());
}
}
} else {
deDupedExceptions.add(new NullPointerException());
}
_exceptions.addAll(deDupedExceptions);
this.exceptions = Collections.unmodifiableList(_exceptions);
this.message = exceptions.size() + " exceptions occurred. ";
}
public CompositeException(Collection<? extends Throwable> errors) {
this(null, errors);
}
/**
* Retrieves the list of exceptions that make up the {@code CompositeException}
*
* @return the exceptions that make up the {@code CompositeException}, as a {@link List} of {@link Throwable}s
*/
public List<Throwable> getExceptions() {
return exceptions;
}
@Override
public String getMessage() {
return message;
}
private Throwable cause = null;
@Override
public synchronized Throwable getCause() {
if (cause == null) {
// we lazily generate this causal chain if this is called
CompositeExceptionCausalChain _cause = new CompositeExceptionCausalChain();
Set<Throwable> seenCauses = new HashSet<Throwable>();
Throwable chain = _cause;
for (Throwable e : exceptions) {
if (seenCauses.contains(e)) {
// already seen this outer Throwable so skip
continue;
}
seenCauses.add(e);
List<Throwable> listOfCauses = getListOfCauses(e);
// check if any of them have been seen before
for(Throwable child : listOfCauses) {
if (seenCauses.contains(child)) {
// already seen this outer Throwable so skip
e = new RuntimeException("Duplicate found in causal chain so cropping to prevent loop ...");
continue;
}
seenCauses.add(child);
}
// we now have 'e' as the last in the chain
try {
chain.initCause(e);
} catch (Throwable t) {
// ignore
// the javadocs say that some Throwables (depending on how they're made) will never
// let me call initCause without blowing up even if it returns null
}
chain = chain.getCause();
}
cause = _cause;
}
return cause;
}
/**
* All of the following {@code printStackTrace} functionality is derived from JDK {@link Throwable}
* {@code printStackTrace}. In particular, the {@code PrintStreamOrWriter} abstraction is copied wholesale.
*
* Changes from the official JDK implementation:<ul>
* <li>no infinite loop detection</li>
* <li>smaller critical section holding {@link PrintStream} lock</li>
* <li>explicit knowledge about the exceptions {@link List} that this loops through</li>
* </ul>
*/
@Override
public void printStackTrace() {
printStackTrace(System.err);
}
@Override
public void printStackTrace(PrintStream s) {
printStackTrace(new WrappedPrintStream(s));
}
@Override
public void printStackTrace(PrintWriter s) {
printStackTrace(new WrappedPrintWriter(s));
}
/**
* Special handling for printing out a {@code CompositeException}.
* Loops through all inner exceptions and prints them out.
*
* @param s
* stream to print to
*/
private void printStackTrace(PrintStreamOrWriter s) {
StringBuilder bldr = new StringBuilder();
bldr.append(this).append("\n");
for (StackTraceElement myStackElement : getStackTrace()) {
bldr.append("\tat ").append(myStackElement).append("\n");
}
int i = 1;
for (Throwable ex : exceptions) {
bldr.append(" ComposedException ").append(i).append(" :").append("\n");
appendStackTrace(bldr, ex, "\t");
i++;
}
synchronized (s.lock()) {
s.println(bldr.toString());
}
}
private void appendStackTrace(StringBuilder bldr, Throwable ex, String prefix) {
bldr.append(prefix).append(ex).append("\n");
for (StackTraceElement stackElement : ex.getStackTrace()) {
bldr.append("\t\tat ").append(stackElement).append("\n");
}
if (ex.getCause() != null) {
bldr.append("\tCaused by: ");
appendStackTrace(bldr, ex.getCause(), "");
}
}
private abstract static class PrintStreamOrWriter {
/** Returns the object to be locked when using this StreamOrWriter */
abstract Object lock();
/** Prints the specified string as a line on this StreamOrWriter */
abstract void println(Object o);
}
/**
* Same abstraction and implementation as in JDK to allow PrintStream and PrintWriter to share implementation
*/
private static class WrappedPrintStream extends PrintStreamOrWriter {
private final PrintStream printStream;
WrappedPrintStream(PrintStream printStream) {
this.printStream = printStream;
}
@Override
Object lock() {
return printStream;
}
@Override
void println(Object o) {
printStream.println(o);
}
}
private static class WrappedPrintWriter extends PrintStreamOrWriter {
private final PrintWriter printWriter;
WrappedPrintWriter(PrintWriter printWriter) {
this.printWriter = printWriter;
}
@Override
Object lock() {
return printWriter;
}
@Override
void println(Object o) {
printWriter.println(o);
}
}
/* package-private */final static class CompositeExceptionCausalChain extends RuntimeException {
private static final long serialVersionUID = 3875212506787802066L;
/* package-private */static String MESSAGE = "Chain of Causes for CompositeException In Order Received =>";
@Override
public String getMessage() {
return MESSAGE;
}
}
private final List<Throwable> getListOfCauses(Throwable ex) {
List<Throwable> list = new ArrayList<Throwable>();
Throwable root = ex.getCause();
if (root == null) {
return list;
} else {
while(true) {
list.add(root);
if (root.getCause() == null) {
return list;
} else {
root = root.getCause();
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.client.cli;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.table.client.gateway.ResultDescriptor;
import org.apache.flink.table.client.gateway.SqlExecutionException;
import org.apache.flink.table.client.gateway.TypedResult;
import org.apache.flink.table.utils.PrintUtils;
import org.apache.flink.types.Row;
import org.jline.keymap.KeyMap;
import org.jline.utils.AttributedString;
import org.jline.utils.AttributedStringBuilder;
import org.jline.utils.AttributedStyle;
import org.jline.utils.InfoCmp.Capability;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.apache.flink.table.client.cli.CliUtils.TIME_FORMATTER;
import static org.apache.flink.table.client.cli.CliUtils.formatTwoLineHelpOptions;
import static org.apache.flink.table.client.cli.CliUtils.normalizeColumn;
import static org.apache.flink.table.client.cli.CliUtils.repeatChar;
import static org.jline.keymap.KeyMap.ctrl;
import static org.jline.keymap.KeyMap.esc;
import static org.jline.keymap.KeyMap.key;
/**
* CLI view for retrieving and displaying a table.
*/
public class CliTableResultView extends CliResultView<CliTableResultView.ResultTableOperation> {
private int pageCount;
private int page;
private LocalTime lastRetrieval;
private int previousResultsPage;
private static final int DEFAULT_REFRESH_INTERVAL = 3; // every 1s
private static final int MIN_REFRESH_INTERVAL = 1; // every 100ms
private static final int LAST_PAGE = 0;
public CliTableResultView(CliClient client, ResultDescriptor resultDescriptor) {
super(client, resultDescriptor);
refreshInterval = DEFAULT_REFRESH_INTERVAL;
pageCount = 1;
page = LAST_PAGE;
previousResults = Collections.emptyList();
previousResultsPage = 1;
results = Collections.emptyList();
}
// --------------------------------------------------------------------------------------------
@Override
protected String[] getRow(String[] resultRow) {
return resultRow;
}
@Override
protected int computeColumnWidth(int idx) {
return MAX_COLUMN_WIDTH;
}
@Override
protected void refresh() {
// take snapshot
TypedResult<Integer> result;
try {
result = client.getExecutor().snapshotResult(client.getSessionId(), resultDescriptor.getResultId(), getVisibleMainHeight());
} catch (SqlExecutionException e) {
close(e);
return;
}
// stop retrieval if job is done
if (result.getType() == TypedResult.ResultType.EOS) {
stopRetrieval(false);
}
// update page
else if (result.getType() == TypedResult.ResultType.PAYLOAD) {
int newPageCount = result.getPayload();
pageCount = newPageCount;
if (page > newPageCount) {
page = LAST_PAGE;
}
updatePage();
}
lastRetrieval = LocalTime.now();
// reset view
resetAllParts();
}
@Override
protected KeyMap<ResultTableOperation> getKeys() {
final KeyMap<ResultTableOperation> keys = new KeyMap<>();
keys.setAmbiguousTimeout(200); // make ESC quicker
keys.bind(ResultTableOperation.QUIT, "q", "Q", esc(), ctrl('c'));
keys.bind(ResultTableOperation.REFRESH, "r", "R", key(client.getTerminal(), Capability.key_f5));
keys.bind(ResultTableOperation.UP, "w", "W", key(client.getTerminal(), Capability.key_up));
keys.bind(ResultTableOperation.DOWN, "s", "S", key(client.getTerminal(), Capability.key_down));
keys.bind(ResultTableOperation.LEFT, "a", "A", key(client.getTerminal(), Capability.key_left));
keys.bind(ResultTableOperation.RIGHT, "d", "D", key(client.getTerminal(), Capability.key_right));
keys.bind(ResultTableOperation.OPEN, "o", "O", "\r");
keys.bind(ResultTableOperation.GOTO, "g", "G");
keys.bind(ResultTableOperation.NEXT, "n", "N");
keys.bind(ResultTableOperation.PREV, "p", "P");
keys.bind(ResultTableOperation.LAST, "l", "L", key(client.getTerminal(), Capability.key_end));
keys.bind(ResultTableOperation.INC_REFRESH, "+");
keys.bind(ResultTableOperation.DEC_REFRESH, "-");
return keys;
}
@Override
protected void evaluate(ResultTableOperation operation, String binding) {
switch (operation) {
case QUIT:
close();
break;
case REFRESH:
refresh();
break;
case UP:
selectRowUp();
break;
case DOWN:
selectRowDown();
break;
case OPEN:
openRow();
break;
case GOTO:
gotoPage();
break;
case NEXT:
gotoNextPage();
break;
case PREV:
gotoPreviousPage();
break;
case LAST:
gotoLastPage();
break;
case LEFT:
scrollLeft();
break;
case RIGHT:
scrollRight();
break;
case INC_REFRESH:
increaseRefreshInterval();
break;
case DEC_REFRESH:
decreaseRefreshInterval(MIN_REFRESH_INTERVAL);
break;
}
}
@Override
protected String getTitle() {
return CliStrings.RESULT_TITLE + " (" + CliStrings.RESULT_TABLE + ")";
}
@Override
protected List<AttributedString> computeHeaderLines() {
final AttributedStringBuilder statusLine = new AttributedStringBuilder();
statusLine.style(AttributedStyle.INVERSE);
// left
final String left;
if (isRetrieving()) {
left = CliStrings.DEFAULT_MARGIN + CliStrings.RESULT_REFRESH_INTERVAL + ' ' + REFRESH_INTERVALS.get(refreshInterval).f0;
} else {
left = CliStrings.DEFAULT_MARGIN + CliStrings.RESULT_STOPPED;
}
// middle
final StringBuilder middleBuilder = new StringBuilder();
middleBuilder.append(CliStrings.RESULT_PAGE);
middleBuilder.append(' ');
if (page == LAST_PAGE) {
middleBuilder.append(CliStrings.RESULT_LAST_PAGE);
} else {
middleBuilder.append(page);
}
middleBuilder.append(CliStrings.RESULT_PAGE_OF);
middleBuilder.append(pageCount);
final String middle = middleBuilder.toString();
// right
final String right;
if (lastRetrieval == null) {
right = CliStrings.RESULT_LAST_REFRESH + ' ' + CliStrings.RESULT_REFRESH_UNKNOWN + CliStrings.DEFAULT_MARGIN;
} else {
right = CliStrings.RESULT_LAST_REFRESH + ' ' + lastRetrieval.format(TIME_FORMATTER) + CliStrings.DEFAULT_MARGIN;
}
// all together
final int totalLeftSpace = getWidth() - middle.length();
final int leftSpace = totalLeftSpace / 2 - left.length();
statusLine.append(left);
repeatChar(statusLine, ' ', leftSpace);
statusLine.append(middle);
final int rightSpacing = getWidth() - statusLine.length() - right.length();
repeatChar(statusLine, ' ', rightSpacing);
statusLine.append(right);
return Arrays.asList(statusLine.toAttributedString(), AttributedString.EMPTY);
}
@Override
protected List<AttributedString> computeMainHeaderLines() {
final AttributedStringBuilder schemaHeader = new AttributedStringBuilder();
Arrays.stream(resultDescriptor.getResultSchema().getFieldNames()).forEach(s -> {
schemaHeader.append(' ');
schemaHeader.style(AttributedStyle.DEFAULT.underline());
normalizeColumn(schemaHeader, s, MAX_COLUMN_WIDTH);
schemaHeader.style(AttributedStyle.DEFAULT);
});
return Collections.singletonList(schemaHeader.toAttributedString());
}
@Override
protected List<AttributedString> computeFooterLines() {
return formatTwoLineHelpOptions(getWidth(), getHelpOptions());
}
// --------------------------------------------------------------------------------------------
private void updatePage() {
// retrieve page
final int retrievalPage = page == LAST_PAGE ? pageCount : page;
final List<Row> rows;
try {
rows = client.getExecutor().retrieveResultPage(resultDescriptor.getResultId(), retrievalPage);
} catch (SqlExecutionException e) {
close(e);
return;
}
// convert page
final List<String[]> stringRows = rows
.stream()
.map(PrintUtils::rowToString)
.collect(Collectors.toList());
// update results
if (previousResultsPage == retrievalPage) {
// only use the previous results if the current page number has not changed
// this allows for updated results when the key space remains constant
previousResults = results;
} else {
previousResults = null;
previousResultsPage = retrievalPage;
}
results = stringRows;
// check if selected row is still valid
if (selectedRow != NO_ROW_SELECTED) {
if (selectedRow >= results.size()) {
selectedRow = NO_ROW_SELECTED;
}
}
// reset view
resetAllParts();
}
private List<Tuple2<String, String>> getHelpOptions() {
final List<Tuple2<String, String>> options = new ArrayList<>();
options.add(Tuple2.of("Q", CliStrings.RESULT_QUIT));
options.add(Tuple2.of("R", CliStrings.RESULT_REFRESH));
options.add(Tuple2.of("+", CliStrings.RESULT_INC_REFRESH));
options.add(Tuple2.of("-", CliStrings.RESULT_DEC_REFRESH));
options.add(Tuple2.of("G", CliStrings.RESULT_GOTO));
options.add(Tuple2.of("L", CliStrings.RESULT_LAST));
options.add(Tuple2.of("N", CliStrings.RESULT_NEXT));
options.add(Tuple2.of("P", CliStrings.RESULT_PREV));
options.add(Tuple2.of("O", CliStrings.RESULT_OPEN));
return options;
}
private void gotoPage() {
final CliInputView view = new CliInputView(
client,
CliStrings.INPUT_ENTER_PAGE + " [1 to " + pageCount + "]",
(s) -> {
// validate input
final int newPage;
try {
newPage = Integer.parseInt(s);
} catch (NumberFormatException e) {
return false;
}
return newPage > 0 && newPage <= pageCount;
});
view.open(); // enter view
if (view.getResult() != null) {
page = Integer.parseInt(view.getResult());
updatePage();
}
}
private void gotoNextPage() {
final int curPageIndex = page == LAST_PAGE ? pageCount : page;
if (curPageIndex < pageCount) {
page = curPageIndex + 1;
}
updatePage();
}
private void gotoPreviousPage() {
final int curPageIndex = page == LAST_PAGE ? pageCount : page;
if (curPageIndex > 1) {
page = curPageIndex - 1;
}
updatePage();
}
private void gotoLastPage() {
page = LAST_PAGE;
updatePage();
}
// --------------------------------------------------------------------------------------------
/**
* Available operations for this view.
*/
public enum ResultTableOperation {
QUIT, // leave view
REFRESH, // refresh current table page
UP, // row selection up
DOWN, // row selection down
OPEN, // shows a full row
GOTO, // enter table page number
NEXT, // next table page
PREV, // previous table page
LAST, // last table page
LEFT, // scroll left if row is large
RIGHT, // scroll right if row is large
INC_REFRESH, // increase refresh rate
DEC_REFRESH, // decrease refresh rate
}
}
| |
/*
* Copyright 2019 Immutables Authors and Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.immutables.criteria.geode;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import org.immutables.criteria.backend.PathNaming;
import org.immutables.criteria.expression.AbstractExpressionVisitor;
import org.immutables.criteria.expression.Call;
import org.immutables.criteria.expression.ComparableOperators;
import org.immutables.criteria.expression.Constant;
import org.immutables.criteria.expression.Expression;
import org.immutables.criteria.expression.Expressions;
import org.immutables.criteria.expression.IterableOperators;
import org.immutables.criteria.expression.Operator;
import org.immutables.criteria.expression.Operators;
import org.immutables.criteria.expression.OptionalOperators;
import org.immutables.criteria.expression.Path;
import org.immutables.criteria.expression.StringOperators;
import org.immutables.criteria.expression.Visitors;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Generates <a href="https://geode.apache.org/docs/guide/16/developing/querying_basics/query_basics.html">Geode OQL</a>
* based on existing expression.
*/
class GeodeQueryVisitor extends AbstractExpressionVisitor<Oql> {
private final PathNaming pathNaming;
/**
* Bind variables. Remains empty if variables are not used
*/
private final List<Object> variables;
private final boolean useBindVariables;
/**
* @param useBindVariables wherever query should be generated with bind variables or not
*/
GeodeQueryVisitor(boolean useBindVariables, PathNaming pathNaming) {
super(e -> { throw new UnsupportedOperationException(); });
this.pathNaming = Objects.requireNonNull(pathNaming, "pathFn");
this.variables = new ArrayList<>();
this.useBindVariables = useBindVariables;
}
@Override
public Oql visit(Call call) {
final Operator op = call.operator();
final List<Expression> args = call.arguments();
if (op == Operators.NOT_IN || op == IterableOperators.NOT_EMPTY) {
// geode doesn't understand syntax foo not in [1, 2, 3]
// convert "foo not in [1, 2, 3]" into "not (foo in [1, 2, 3])"
return visit(Expressions.not(Expressions.call(inverseOp(op), call.arguments())));
}
if (op == Operators.AND || op == Operators.OR) {
Preconditions.checkArgument(!args.isEmpty(), "Size should be >=1 for %s but was %s", op, args.size());
final String join = ") " + op.name() + " (";
final String newOql = "(" + args.stream().map(a -> a.accept(this)).map(Oql::oql).collect(Collectors.joining(join)) + ")";
return new Oql(variables, newOql);
}
if (op.arity() == Operator.Arity.BINARY) {
return binaryOperator(call);
}
if (op.arity() == Operator.Arity.UNARY) {
return unaryOperator(call);
}
throw new UnsupportedOperationException("Don't know how to handle " + call);
}
private static Operator inverseOp(Operator op) {
if (op == Operators.NOT_IN) {
return Operators.IN;
} else if (op == IterableOperators.NOT_EMPTY) {
return IterableOperators.IS_EMPTY;
} else {
throw new IllegalArgumentException("Don't know inverse operator of " + op);
}
}
/**
* Operator with single operator: {@code NOT}, {@code IS_PRESENT}
*/
private Oql unaryOperator(Call call) {
final Operator op = call.operator();
final List<Expression> args = call.arguments();
Preconditions.checkArgument(args.size() == 1,
"Size should be == 1 for unary operator %s but was %s", op, args.size());
Expression arg0 = args.get(0);
String path = arg0.accept(this).oql();
if (op instanceof OptionalOperators) {
// use IS_DEFINED / IS_UNDEFINED functions
String expr;
if (op == OptionalOperators.IS_PRESENT) {
expr = String.format("is_defined(%s) AND %s != null", path, path);
} else {
expr = String.format("is_undefined(%s) OR %s = null", path, path);
}
return oql(expr);
} else if (op == Operators.NOT) {
return oql(String.format("NOT (%s)", path));
} else if (op == IterableOperators.IS_EMPTY || op == StringOperators.TO_LOWER_CASE || op == StringOperators.TO_UPPER_CASE) {
return oql(String.format("%s.%s()", path, toMethodName(op)));
}
throw new UnsupportedOperationException("Unknown unary operator " + call);
}
/**
* Used for operators with two arguments like {@code =}, {@code IN} etc.
*/
private Oql binaryOperator(Call call) {
final Operator op = call.operator();
final List<Expression> args = call.arguments();
Preconditions.checkArgument(args.size() == 2, "Size should be 2 for %s but was %s on call %s", op, args.size(), call);
Expression left = args.get(0); // left node
Expression right = args.get(1); // right node
if (op == IterableOperators.CONTAINS || op == StringOperators.MATCHES
|| op == StringOperators.CONTAINS || op == StringOperators.STARTS_WITH
|| op == StringOperators.ENDS_WITH) {
return oql(String.format("%s.%s(%s)", left.accept(this).oql(), toMethodName(op), right.accept(this).oql()));
}
if (op == StringOperators.HAS_LENGTH || op == IterableOperators.HAS_SIZE) {
return oql(String.format("%s.%s = %s", left.accept(this).oql(), toMethodName(op), right.accept(this).oql()));
}
final String operator;
if (op == Operators.EQUAL || op == Operators.NOT_EQUAL) {
operator = op == Operators.EQUAL ? "=" : "!=";
} else if (op == Operators.IN || op == Operators.NOT_IN) {
if (right instanceof Constant) {
// optimization for IN / NOT IN operators
// make constant value(s) distinct using Set
Set<Object> newValues = ImmutableSet.copyOf(Visitors.toConstant(right).values());
right = Expressions.constant(newValues);
}
operator = op == Operators.IN ? "IN" : "NOT IN";
} else if (op == ComparableOperators.GREATER_THAN) {
operator = ">";
} else if (op == ComparableOperators.GREATER_THAN_OR_EQUAL) {
operator = ">=";
} else if (op == ComparableOperators.LESS_THAN) {
operator = "<";
} else if (op == ComparableOperators.LESS_THAN_OR_EQUAL) {
operator = "<=";
} else {
throw new IllegalArgumentException("Unknown binary operator " + call);
}
return oql(String.format("%s %s %s", left.accept(this).oql(), operator, right.accept(this).oql()));
}
@Override
public Oql visit(Path path) {
String name = pathNaming.name(path);
Type type = path.returnType();
if (!useBindVariables && (type instanceof Class<?>) && ((Class<?>) type).isEnum()) {
// for enums we need to add ".name" function (queries without bind variables)
// OQL should be "enum.name = 'VALUE'"
name = name + ".name";
}
return oql(name);
}
@Override
public Oql visit(Constant constant) {
String oqlAsString;
if (useBindVariables) {
variables.add(constant.value());
oqlAsString = "$" + variables.size();
} else {
oqlAsString = valueToString(constant.value());
}
return oql(oqlAsString);
}
/**
* Return new query but with same variables
*/
private Oql oql(String oql) {
return new Oql(variables, oql);
}
private static String toMethodName(Operator op) {
if (op == IterableOperators.IS_EMPTY) {
return "isEmpty";
} else if (op == StringOperators.TO_LOWER_CASE) {
return "toLowerCase";
} else if (op == StringOperators.TO_UPPER_CASE) {
return "toUpperCase";
} else if (op == StringOperators.HAS_LENGTH) {
return "length";
} else if (op == IterableOperators.HAS_SIZE) {
return "size";
} else if (op == StringOperators.CONTAINS || op == IterableOperators.CONTAINS) {
return "contains";
} else if (op == StringOperators.STARTS_WITH) {
return "startsWith";
} else if (op == StringOperators.ENDS_WITH) {
return "endsWith";
} else if (op == StringOperators.MATCHES) {
return "matches";
}
throw new UnsupportedOperationException("Don't know how to handle Operator " + op);
}
private static String valueToString(Object value) {
return OqlLiterals.fromObject(value);
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.greg.base.ui.dialog;
import org.apache.axis2.AxisFault;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.TableLayout;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.wso2.developerstudio.eclipse.greg.base.Activator;
import org.wso2.developerstudio.eclipse.greg.base.model.RegistryNode;
import org.wso2.developerstudio.eclipse.greg.base.model.RegistryResourceNode;
import org.wso2.developerstudio.eclipse.greg.base.util.BaseConstants;
import org.wso2.developerstudio.eclipse.greg.resource.authorization.ResourceAdmin;
import org.wso2.developerstudio.eclipse.greg.resource.authorization.ResourceAdmin.Role;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.usermgt.remote.UserManager;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class UserPermissionDialog extends Dialog {
private static IDeveloperStudioLog log=Logger.getLog(Activator.PLUGIN_ID);
private String[] titles = { "Role", "Read", "Write", "Delete", "Authorize" };
private Table table;
private RegistryResourceNode regResourceNode;
private Button check = null;
/**
* UserPermissionDialog constructor
* @param parentShell
* @param regNode
*/
public UserPermissionDialog(Shell parentShell,RegistryResourceNode regNode) {
super(parentShell);
this.regResourceNode = regNode;
}
/**
* create dialog
*/
public void create() {
super.create();
}
/**
* create dialog area for the user permission dialog
*/
protected Control createDialogArea(final Composite parent) {
parent.getShell().setText("Permissions for " +
regResourceNode.getConnectionInfo().getUrl().toString() +
regResourceNode.getRegistryResourcePath());
GridLayout gridLayout = new GridLayout(1, true);
gridLayout.marginWidth = 5;
parent.setLayout(gridLayout);
Group group = new Group(parent, SWT.FILL);
group.setLayoutData(new GridData(GridData.FILL_BOTH));
gridLayout = new GridLayout(1, true);
group.setLayout(gridLayout);
createTable(group);
try {
loadData();
} catch (RemoteException e) {
log.error(e);
} catch (Exception e) {
log.error(e);
}
return super.createDialogArea(parent);
}
/**
* create table
* @param composite
*/
public void createTable(Composite composite) {
table = new Table(composite, SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL);
table.setHeaderVisible(false);
table.setLinesVisible(false);
GridData data = new GridData(GridData.FILL_BOTH);
table.setLayoutData(data);
createTableColumns();
}
/**
* create table columns with checkbox
*/
public void createTableColumns() {
TableLayout tableLayout = new TableLayout();
table.setLayout(tableLayout);
tableLayout.addColumnData(new ColumnWeightData(10, 150, true));
TableColumn column = new TableColumn(table, SWT.NONE | SWT.DM_FILL_NONE);
column.setText(titles[0]);
column.setAlignment(SWT.LEFT);
for (int i = 1; i < titles.length; i++) {
tableLayout.addColumnData(new ColumnWeightData(10, 80, false));
column = new TableColumn(table, SWT.NONE);
column.setText(titles[i]);
column.setAlignment(SWT.CENTER);
}
column.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
}
public void widgetDefaultSelected(SelectionEvent event) {
}
});
}
private Map<String,Role> getRolePermissionMap(List<Role> roles){
Map<String,Role> roleMap=new HashMap<String, ResourceAdmin.Role>();
for (Role role : roles) {
roleMap.put(role.getRole(),role);
}
return roleMap;
}
/**
* load data for table
* @throws RemoteException
* @throws Exception
*/
public void loadData() throws RemoteException, Exception {
ResourceAdmin rsAd = regResourceNode.getConnectionInfo().getResourceAdmin();
ArrayList<Role> resourceInfo = rsAd.getPermissionPerResource(regResourceNode
.getRegistryResourcePath());
ArrayList<String> string=new ArrayList<String>();
string.addAll(Arrays.asList(regResourceNode.getConnectionInfo().getUserManagerContent().getUserManager().getRoles("*")));
table.removeAll();
Map<String, Role> rolePermissionMap = getRolePermissionMap(resourceInfo);
for (String roleName : string) {
if (roleName.equals("admin")){
continue;
}
TableItem tableItem=null;
RoleData roleData=null;
if (rolePermissionMap.containsKey(roleName)){
Role role=rolePermissionMap.get(roleName);
ArrayList<String> permissionListPerRole = role.getAssignedPermission();
roleData = getRoleData(role.getRole(),regResourceNode.getRegistryResourcePath());
tableItem = new TableItem(table, SWT.NULL);
for (int j = 0; j < permissionListPerRole.size(); j++) {
if (permissionListPerRole.get(j).equals(BaseConstants.READ)) {
roleData.setReadPerm(true);
}
if (permissionListPerRole.get(j).equals(BaseConstants.WRITE)) {
roleData.setWritePerm(true);
}
if (permissionListPerRole.get(j).equals(BaseConstants.DELETE)) {
roleData.setDeletePerm(true);
}
if (permissionListPerRole.get(j).equals(BaseConstants.AUTHORIZE)) {
roleData.setAuthPerm(true);
}
}
}else{
roleData = getRoleData(roleName,regResourceNode.getRegistryResourcePath());
tableItem = new TableItem(table, SWT.NULL);
}
tableItem.setText(roleData.role);
createTable(tableItem, roleData);
}
// for (int i = 0; i < resourceInfo.size(); i++) {
//
// string.remove(resourceInfo.get(i).getRole());
//
// ArrayList<String> permissionListPerRole = resourceInfo.get(i).getAssignedPermission();
// RoleData roleData = getRoleData(resourceInfo.get(i).getRole(),
// regResourceNode.getRegistryResourcePath());
//
// TableItem tableItem = new TableItem(table, SWT.NULL);
// tableItem.setText(resourceInfo.get(i).getRole());
// for (int j = 0; j < permissionListPerRole.size(); j++) {
// if (permissionListPerRole.get(j).equals(BaseConstants.READ)) {
// roleData.setReadPerm(true);
// }
// if (permissionListPerRole.get(j).equals(BaseConstants.WRITE)) {
// roleData.setWritePerm(true);
// }
// if (permissionListPerRole.get(j).equals(BaseConstants.DELETE)) {
// roleData.setDeletePerm(true);
// }
// if (permissionListPerRole.get(j).equals(BaseConstants.AUTHORIZE)) {
// roleData.setAuthPerm(true);
// }
//
// }
// createTable(tableItem, roleData);
// }
//
// for (Iterator iterator = string.iterator(); iterator.hasNext();) {
// String string2 = (String) iterator.next();
// RoleData roleData = getRoleData(string2,
// regResourceNode.getRegistryResourcePath());
// TableItem tableItem = new TableItem(table, SWT.NULL);
// tableItem.setText(string2);
// createTable(tableItem, roleData);
// }
}
private void createTable(TableItem tableItem,RoleData roleData){
tableItem.setData(roleData);
createCheckBoxes(1, tableItem, 0, roleData);
createCheckBoxes(2, tableItem, 1, roleData);
createCheckBoxes(3, tableItem, 2, roleData);
createCheckBoxes(4, tableItem, 3, roleData);
}
/**
* get role data
* @param roleName
* @param resourcePath
* @return
*/
private RoleData getRoleData(String roleName, String resourcePath) {
return new RoleData(roleName, resourcePath);
}
/**
* create check boxes
* @param tableIndex
* @param tabItem
* @param type
* @param roleData
*/
private void createCheckBoxes(int tableIndex,
TableItem tabItem,
int type,
RoleData roleData) {
TableEditor editor = new TableEditor(table);
boolean value = false;
String permission = "";
if (roleData.role.equals(BaseConstants.ADMIN)) {
check = new Button(table, SWT.CHECK | SWT.READ_ONLY);
value = true;
if (type == 0) {
permission = BaseConstants.READ;
}
if (type == 1) {
permission = BaseConstants.WRITE;
}
if (type == 2) {
permission = BaseConstants.DELETE;
}
if (type == 3) {
permission = BaseConstants.AUTHORIZE;
}
} else {
check = new Button(table, SWT.CHECK);
if (type == 0) {
permission = BaseConstants.READ;
value = roleData.readPerm;
}
if (type == 1) {
permission = BaseConstants.WRITE;
value = roleData.writePerm;
}
if (type == 2) {
permission = BaseConstants.DELETE;
value = roleData.deletePerm;
}
if (type == 3) {
permission = BaseConstants.AUTHORIZE;
value = roleData.authPerm;
}
}
check.setBackground(table.getBackground());
editor.grabHorizontal = true;
editor.setEditor(check, tabItem, tableIndex);
check.setText(permission);
check.setSelection(value);
editor.layout();
check.addSelectionListener(new RolePermissionSelectionListener(roleData));
}
/**
* RolePermissionSelectionListener inner class
*
*
*/
public class RolePermissionSelectionListener implements SelectionListener {
RoleData roleData;
int type;
/**
* RolePermissionSelectionListener constructor
* set the type variable according to the permission type
* @param roleData
*/
public RolePermissionSelectionListener(RoleData roleData) {
this.roleData = roleData;
int type;
if (roleData.readPerm) {
type = 0;
} else if (roleData.writePerm) {
type = 1;
} else if (roleData.deletePerm) {
type = 2;
} else if (roleData.authPerm) {
type = 3;
} else {
type = -1;
}
}
/**
* triggered when check box is selected or de-selected
*/
public void widgetSelected(SelectionEvent event) {
boolean b = ((Button) (event.widget)).getSelection();
String permission = ((Button) (event.widget)).getText();
if (permission == BaseConstants.READ) {
roleData.setReadPerm(b);
type = 0;
} else if (permission == BaseConstants.WRITE) {
roleData.setWritePerm(b);
type = 1;
} else if (permission == BaseConstants.DELETE) {
roleData.setDeletePerm(b);
type = 2;
} else if (permission == BaseConstants.AUTHORIZE) {
roleData.setAuthPerm(b);
type = 3;
}
try {
grantPermissionToRole(roleData);
} catch (AxisFault e) {
log.error(e);
} catch (RemoteException e) {
log.error(e);
} catch (Exception e) {
log.error(e);
}
}
public void widgetDefaultSelected(SelectionEvent event) {
}
}
/**
* grant permission to the role
* @param roleData
* @throws Exception
*/
private void grantPermissionToRole(RoleData roleData) throws Exception {
String permission = null;
RegistryNode regNode = regResourceNode.getConnectionInfo();
ResourceAdmin resourceAdmin = regNode.getResourceAdmin();
if (roleData.isReadPerm()) {
permission = BaseConstants.READ;
resourceAdmin.setPermissionPerResource(regResourceNode.getRegistryResourcePath(),
permission,
roleData.role,
"1");
}
if (roleData.isWritePerm()) {
permission = BaseConstants.WRITE;
resourceAdmin.setPermissionPerResource(regResourceNode.getRegistryResourcePath(),
permission,
roleData.role,
"1");
}
if (roleData.isDeletePerm()) {
permission = BaseConstants.DELETE;
resourceAdmin.setPermissionPerResource(regResourceNode.getRegistryResourcePath(),
permission,
roleData.role,
"1");
}
if (roleData.isAuthPerm()) {
permission = BaseConstants.AUTHORIZE;
resourceAdmin.setPermissionPerResource(regResourceNode.getRegistryResourcePath(),
permission,
roleData.role,
"1");
}
}
/**
* ok pressed event of the dialog
*/
protected void okPressed() {
super.okPressed();
}
/**
* get roles for the user
* @return
*/
public String[] getRolesPerUser() {
UserManager um = regResourceNode.getConnectionInfo().getUserManagerContent().getUserManager();
String[] roles = um.getRoles("*");
return roles;
}
/**
* get roles for the selected resource
* @return
*/
public ArrayList<String> getRolesPerResource() {
String[] allRoles = getRolesPerUser();
ArrayList<String> validRoles = new ArrayList<String>();
for (int i = 0; i < allRoles.length; i++) {
if (!allRoles[i].equals("admin")) {
validRoles.add(allRoles[i]);
}
}
return validRoles;
}
/**
*
* inner class to represent role
*
*/
public static class RoleData {
public String role;
String[] allRoles;
String[] allPermList;
String[] selectedPermissionList;
RegistryNode regData;
String resourcePath;
private boolean readPerm = false;
private boolean writePerm = false;
private boolean deletePerm = false;
private boolean authPerm = false;
public RoleData(String role, String resourcePath) {
this.role = role;
this.resourcePath = resourcePath;
}
public String getResourcePath() {
return resourcePath;
}
public void setResourcePath(String resourcePath) {
this.resourcePath = resourcePath;
}
public boolean isReadPerm() {
return readPerm;
}
public void setReadPerm(boolean readPerm) {
this.readPerm = readPerm;
}
public boolean isWritePerm() {
return writePerm;
}
public void setWritePerm(boolean writePerm) {
this.writePerm = writePerm;
}
public boolean isDeletePerm() {
return deletePerm;
}
public void setDeletePerm(boolean deletePerm) {
this.deletePerm = deletePerm;
}
public boolean isAuthPerm() {
return authPerm;
}
public void setAuthPerm(boolean authPerm) {
this.authPerm = authPerm;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.