gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.readlearncode.dukesbookshop.restclient;
import com.readlearncode.dukesbookshop.domain.*;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.enterprise.context.ApplicationScoped;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Link;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* Source code github.com/readlearncode
*
* @author Alex Theedom www.readlearncode.com
* @version 1.0
*/
@ApplicationScoped
public class BookServiceImpl implements BookService {
private static final String API_URL = "http://localhost:8081/rest-server";
private static final String BOOKS_ENDPOINT = API_URL + "/api/books";
private List<Book> cachedBooks = new ArrayList<>();
private Client client;
@PostConstruct
public void initialise() {
client = ClientBuilder.newClient();
}
// @Override
// public List<Book> getBooks() {
//
// List<Book> allBooks = new ArrayList<>();
//
// WebTarget target = client.target(BOOKS_ENDPOINT);
//
// Future<ArrayList<Book>> bookCall =
// target.request(MediaType.APPLICATION_JSON).async().get(
// new InvocationCallback<ArrayList<Book>>() {
//
// @Override
// public void completed(ArrayList<Book> arrayListGenericType) {
// allBooks.addAll(arrayListGenericType);
// // populate cache with new books
// cachedBooks.clear();
// cachedBooks.addAll(arrayListGenericType);
// }
//
// @Override
// public void failed(Throwable throwable) {
// // use cached book list
// allBooks.addAll(cachedBooks);
// }
// });
//
// while (!bookCall.isDone()) ;
//
// System.out.println("AllBooks: " + allBooks);
//
// return Collections.unmodifiableList(allBooks);
// }
@Override
public List<Book> getBooks() {
cachedBooks = new ArrayList<>(); // Fix this hack
WebTarget target = client.target(BOOKS_ENDPOINT);
Response rep = target.request(MediaType.APPLICATION_JSON).get();
// If links are embedded in the HTTP Header, uncomment this code
Set<Link> links = rep.getLinks();
System.out.println("links: " + links);
JsonArray response = rep.readEntity(JsonArray.class);
// List<Book> response = rep.readEntity(new GenericType<List<Book>>() {});
System.out.println("response: " + response);
// JsonArray response .get(JsonArray.class);
// Response response = target.request(MediaType.APPLICATION_JSON).get().getLinks();
// List<Book> allBooks = response.readEntity(new GenericType<ArrayList<Book>>(){});
// System.out.println("AllBooks: " + allBooks);
System.out.println("JsonArray response: " + response);
for (int i = 0; i < response.size(); i++) {
JsonObject bookJson = response.getJsonObject(i);
List<Author> authors = extractAuthors(bookJson.getJsonArray("authors"));
List<LinkResource> hyperlinks = extractLinks(bookJson.getJsonArray("links"));
Book book = new BookBuilder()
.setId(bookJson.getString("id"))
.setTitle(bookJson.getString("title"))
.setDescription(bookJson.getString("description"))
.setImageFileName(bookJson.getString("imageFileName"))
.setAuthors(authors)
.setPublished(bookJson.getString("published"))
.setLink(bookJson.getString("link"))
.setHyperlinks(hyperlinks)
.createBook();
cachedBooks.add(book);
}
return Collections.unmodifiableList(cachedBooks);
}
@Override
public Book getBook(String id) {
WebTarget target = client.target(BOOKS_ENDPOINT + "/" + id);
// JsonObject jsonResponse = target.request(MediaType.APPLICATION_JSON).get(JsonObject.class);
Future<Response> bookCall = target.request(MediaType.APPLICATION_JSON).async().get();
while(!bookCall.isDone());
Response response = null;
try {
response = bookCall.get(60_000, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
e.printStackTrace();
}
Set<Link> links = response.getLinks();
System.out.println("links: " + links);
JsonObject jsonResponse = response.readEntity(JsonObject.class);
System.out.println("jsonResponse: " + jsonResponse);
List<Author> authors = extractAuthors(jsonResponse.getJsonArray("authors"));
System.out.println("authors: " + authors);
List<LinkResource> hyperlinks = extractLinks(jsonResponse.getJsonArray("links"));
Book book = new BookBuilder()
.setId(jsonResponse.getString("id"))
.setTitle(jsonResponse.getString("title"))
.setDescription(jsonResponse.getString("description"))
.setPrice((float) jsonResponse.getInt("price"))
.setImageFileName(API_URL + jsonResponse.getString("imageFileName"))
.setAuthors(authors)
.setPublished(jsonResponse.getString("published"))
.setLink(jsonResponse.getString("link"))
.setHyperlinks(hyperlinks)
.createBook();
System.out.println("book: " + book);
return book;
}
@Override
public void deleteBook(String isbn) {
// Use cached book list to determine delete URI
System.out.println("deleteBook method: " + cachedBooks);
System.out.println("deleteBook isbn: " + isbn);
String uri = cachedBooks.stream()
.filter(book -> book.getId().equals(isbn))
.map(Hypermedia::getLinks)
.findFirst()
.get()
.stream()
.filter(linkResource -> linkResource.getRel().equals("delete"))
.findFirst()
.get()
.getUri();
System.out.println("Delete Book uri: " + uri);
WebTarget target = client.target(uri);
Response response = target.request(MediaType.APPLICATION_JSON).delete();
System.out.println("Delete Book ISBN: " + isbn);
System.out.println("Delete Book ISBN: response " + response);
}
@Override
public Book saveBook(Book book) {
throw new UnsupportedOperationException();
}
/**
* Extracts the author list form the json object
*
* @param authorArray the JSON Array that contains the author list
* @return list of authors
*/
public List<Author> extractAuthors(JsonArray authorArray) {
List<Author> authors = new ArrayList<>();
for (int j = 0; j < authorArray.size(); j++) {
JsonObject jObject = authorArray.getJsonObject(j);
String id = jObject.getString("id", "");
String firstName = jObject.getString("firstName", "");
String lastName = jObject.getString("lastName", "");
String blogURL = jObject.getString("blogURL", "");
authors.add(new Author(id, firstName, lastName, blogURL));
}
return Collections.unmodifiableList(authors);
}
/**
* Extracts the links from the json object
*
* @param linkArray the JSON array that contains the link list
* @return list of links
*/
private List<LinkResource> extractLinks(JsonArray linkArray) {
List<LinkResource> links = new ArrayList<>();
for (int j = 0; j < linkArray.size(); j++) {
JsonObject jObject = linkArray.getJsonObject(j);
String rel = jObject.getString("rel", "");
String type = jObject.getString("type", "");
String uri = jObject.getString("uri", "");
links.add(new LinkResource(rel, type, uri));
}
return Collections.unmodifiableList(links);
}
@PreDestroy
private void destroy() {
client.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow;
import static org.apache.beam.sdk.options.ExperimentalOptions.hasExperiment;
import static org.hamcrest.MatcherAssert.assertThat;
import com.google.api.services.dataflow.model.JobMessage;
import com.google.api.services.dataflow.model.JobMetrics;
import com.google.api.services.dataflow.model.MetricUpdate;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.List;
import java.util.concurrent.Callable;
import javax.annotation.Nullable;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.runners.dataflow.util.MonitoringUtil;
import org.apache.beam.runners.dataflow.util.MonitoringUtil.JobMessagesHandler;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.PipelineRunner;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestPipelineOptions;
import org.joda.time.Duration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link TestDataflowRunner} is a pipeline runner that wraps a {@link DataflowRunner} when running
* tests against the {@link TestPipeline}.
*
* @see TestPipeline
*/
public class TestDataflowRunner extends PipelineRunner<DataflowPipelineJob> {
private static final String TENTATIVE_COUNTER = "tentative";
private static final Logger LOG = LoggerFactory.getLogger(TestDataflowRunner.class);
private final TestDataflowPipelineOptions options;
private final DataflowClient dataflowClient;
private final DataflowRunner runner;
private int expectedNumberOfAssertions = 0;
TestDataflowRunner(TestDataflowPipelineOptions options, DataflowClient client) {
this.options = options;
this.dataflowClient = client;
this.runner = DataflowRunner.fromOptions(options);
}
/** Constructs a runner from the provided options. */
public static TestDataflowRunner fromOptions(PipelineOptions options) {
TestDataflowPipelineOptions dataflowOptions = options.as(TestDataflowPipelineOptions.class);
String tempLocation =
Joiner.on("/")
.join(dataflowOptions.getTempRoot(), dataflowOptions.getJobName(), "output", "results");
dataflowOptions.setTempLocation(tempLocation);
return new TestDataflowRunner(
dataflowOptions, DataflowClient.create(options.as(DataflowPipelineOptions.class)));
}
@VisibleForTesting
static TestDataflowRunner fromOptionsAndClient(
TestDataflowPipelineOptions options, DataflowClient client) {
return new TestDataflowRunner(options, client);
}
@Override
public DataflowPipelineJob run(Pipeline pipeline) {
return run(pipeline, runner);
}
DataflowPipelineJob run(Pipeline pipeline, DataflowRunner runner) {
updatePAssertCount(pipeline);
TestPipelineOptions testPipelineOptions = options.as(TestPipelineOptions.class);
final DataflowPipelineJob job;
job = runner.run(pipeline);
LOG.info(
"Running Dataflow job {} with {} expected assertions.",
job.getJobId(),
expectedNumberOfAssertions);
assertThat(job, testPipelineOptions.getOnCreateMatcher());
Boolean jobSuccess;
Optional<Boolean> allAssertionsPassed;
ErrorMonitorMessagesHandler messageHandler =
new ErrorMonitorMessagesHandler(job, new MonitoringUtil.LoggingHandler());
if (options.isStreaming()) {
jobSuccess = waitForStreamingJobTermination(job, messageHandler);
// No metrics in streaming
allAssertionsPassed = Optional.absent();
} else {
jobSuccess = waitForBatchJobTermination(job, messageHandler);
allAssertionsPassed = checkForPAssertSuccess(job);
}
// If there is a certain assertion failure, throw the most precise exception we can.
// There are situations where the metric will not be available, but as long as we recover
// the actionable message from the logs it is acceptable.
if (!allAssertionsPassed.isPresent()) {
LOG.warn("Dataflow job {} did not output a success or failure metric.", job.getJobId());
} else if (!allAssertionsPassed.get()) {
throw new AssertionError(errorMessage(job, messageHandler));
}
// Other failures, or jobs where metrics fell through for some reason, will manifest
// as simply job failures.
if (!jobSuccess) {
throw new RuntimeException(errorMessage(job, messageHandler));
}
// If there is no reason to immediately fail, run the success matcher.
assertThat(job, testPipelineOptions.getOnSuccessMatcher());
return job;
}
/**
* Return {@code true} if the job succeeded or {@code false} if it terminated in any other manner.
*/
private boolean waitForStreamingJobTermination(
final DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) {
// In streaming, there are infinite retries, so rather than timeout
// we try to terminate early by polling and canceling if we see
// an error message
options.getExecutorService().submit(new CancelOnError(job, messageHandler));
// Whether we canceled or not, this gets the final state of the job or times out
State finalState;
try {
finalState =
job.waitUntilFinish(
Duration.standardSeconds(options.getTestTimeoutSeconds()), messageHandler);
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
Thread.interrupted();
return false;
}
// Getting the final state may have timed out; it may not indicate a failure.
// This cancellation may be the second
if (finalState == null || !finalState.isTerminal()) {
LOG.info(
"Dataflow job {} took longer than {} seconds to complete, cancelling.",
job.getJobId(),
options.getTestTimeoutSeconds());
try {
job.cancel();
} catch (IOException e) {
throw new RuntimeException(e);
}
return false;
} else {
return finalState == State.DONE && !messageHandler.hasSeenError();
}
}
/**
* Return {@code true} if job state is {@code State.DONE}. {@code false} otherwise.
*/
private boolean waitForBatchJobTermination(
DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) {
{
try {
job.waitUntilFinish(Duration.standardSeconds(-1), messageHandler);
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
Thread.interrupted();
return false;
}
return job.getState() == State.DONE;
}
}
private static String errorMessage(
DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) {
return Strings.isNullOrEmpty(messageHandler.getErrorMessage())
? String.format(
"Dataflow job %s terminated in state %s but did not return a failure reason.",
job.getJobId(), job.getState())
: messageHandler.getErrorMessage();
}
@VisibleForTesting
void updatePAssertCount(Pipeline pipeline) {
if (hasExperiment(options, "beam_fn_api")) {
// TODO[BEAM-1866]: FnAPI does not support metrics, so expect 0 assertions.
expectedNumberOfAssertions = 0;
} else {
expectedNumberOfAssertions = PAssert.countAsserts(pipeline);
}
}
/**
* Check that PAssert expectations were met.
*
* <p>If the pipeline is not in a failed/cancelled state and no PAsserts were used within the
* pipeline, then this method will state that all PAsserts succeeded.
*
* @return Optional.of(false) if we are certain a PAssert failed. Optional.of(true) if we are
* certain all PAsserts passed. Optional.absent() if the evidence is inconclusive, including
* when the pipeline may have failed for other reasons.
*/
@VisibleForTesting
Optional<Boolean> checkForPAssertSuccess(DataflowPipelineJob job) {
JobMetrics metrics = getJobMetrics(job);
if (metrics == null || metrics.getMetrics() == null) {
LOG.warn("Metrics not present for Dataflow job {}.", job.getJobId());
return Optional.absent();
}
int successes = 0;
int failures = 0;
for (MetricUpdate metric : metrics.getMetrics()) {
if (metric.getName() == null
|| metric.getName().getContext() == null
|| !metric.getName().getContext().containsKey(TENTATIVE_COUNTER)) {
// Don't double count using the non-tentative version of the metric.
continue;
}
if (PAssert.SUCCESS_COUNTER.equals(metric.getName().getName())) {
successes += ((BigDecimal) metric.getScalar()).intValue();
} else if (PAssert.FAILURE_COUNTER.equals(metric.getName().getName())) {
failures += ((BigDecimal) metric.getScalar()).intValue();
}
}
if (failures > 0) {
LOG.info(
"Failure result for Dataflow job {}. Found {} success, {} failures out of "
+ "{} expected assertions.",
job.getJobId(),
successes,
failures,
expectedNumberOfAssertions);
return Optional.of(false);
} else if (successes >= expectedNumberOfAssertions) {
LOG.info(
"Success result for Dataflow job {}."
+ " Found {} success, {} failures out of {} expected assertions.",
job.getJobId(),
successes,
failures,
expectedNumberOfAssertions);
return Optional.of(true);
}
// If the job failed, this is a definite failure. We only cancel jobs when they fail.
State state = job.getState();
if (state == State.FAILED || state == State.CANCELLED) {
LOG.info(
"Dataflow job {} terminated in failure state {} without reporting a failed assertion",
job.getJobId(),
state);
return Optional.absent();
}
LOG.info(
"Inconclusive results for Dataflow job {}."
+ " Found {} success, {} failures out of {} expected assertions.",
job.getJobId(),
successes,
failures,
expectedNumberOfAssertions);
return Optional.absent();
}
@Nullable
@VisibleForTesting
JobMetrics getJobMetrics(DataflowPipelineJob job) {
JobMetrics metrics = null;
try {
metrics = dataflowClient.getJobMetrics(job.getJobId());
} catch (IOException e) {
LOG.warn("Failed to get job metrics: ", e);
}
return metrics;
}
@Override
public String toString() {
return "TestDataflowRunner#" + options.getAppName();
}
/**
* Monitors job log output messages for errors.
*
* <p>Creates an error message representing the concatenation of all error messages seen.
*/
private static class ErrorMonitorMessagesHandler implements JobMessagesHandler {
private final DataflowPipelineJob job;
private final JobMessagesHandler messageHandler;
private final StringBuffer errorMessage;
private volatile boolean hasSeenError;
private ErrorMonitorMessagesHandler(
DataflowPipelineJob job, JobMessagesHandler messageHandler) {
this.job = job;
this.messageHandler = messageHandler;
this.errorMessage = new StringBuffer();
this.hasSeenError = false;
}
@Override
public void process(List<JobMessage> messages) {
messageHandler.process(messages);
for (JobMessage message : messages) {
if ("JOB_MESSAGE_ERROR".equals(message.getMessageImportance())) {
LOG.info(
"Dataflow job {} threw exception. Failure message was: {}",
job.getJobId(),
message.getMessageText());
errorMessage.append(message.getMessageText());
hasSeenError = true;
}
}
}
boolean hasSeenError() {
return hasSeenError;
}
String getErrorMessage() {
return errorMessage.toString();
}
}
private static class CancelOnError implements Callable<Void> {
private final DataflowPipelineJob job;
private final ErrorMonitorMessagesHandler messageHandler;
public CancelOnError(DataflowPipelineJob job, ErrorMonitorMessagesHandler messageHandler) {
this.job = job;
this.messageHandler = messageHandler;
}
@Override
public Void call() throws Exception {
while (true) {
State jobState = job.getState();
// If we see an error, cancel and note failure
if (messageHandler.hasSeenError() && !job.getState().isTerminal()) {
job.cancel();
LOG.info("Cancelling Dataflow job {}", job.getJobId());
return null;
}
if (jobState.isTerminal()) {
return null;
}
Thread.sleep(3000L);
}
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkmanagement/v1/trace.proto
package com.google.cloud.networkmanagement.v1;
/**
*
*
* <pre>
* Details of the final state "deliver" and associated resource.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1.DeliverInfo}
*/
public final class DeliverInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1.DeliverInfo)
DeliverInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeliverInfo.newBuilder() to construct.
private DeliverInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeliverInfo() {
target_ = 0;
resourceUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeliverInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeliverInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
int rawValue = input.readEnum();
target_ = rawValue;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
resourceUri_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1.TraceProto
.internal_static_google_cloud_networkmanagement_v1_DeliverInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1.TraceProto
.internal_static_google_cloud_networkmanagement_v1_DeliverInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1.DeliverInfo.class,
com.google.cloud.networkmanagement.v1.DeliverInfo.Builder.class);
}
/**
*
*
* <pre>
* Deliver target types:
* </pre>
*
* Protobuf enum {@code google.cloud.networkmanagement.v1.DeliverInfo.Target}
*/
public enum Target implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Target not specified.
* </pre>
*
* <code>TARGET_UNSPECIFIED = 0;</code>
*/
TARGET_UNSPECIFIED(0),
/**
*
*
* <pre>
* Target is a Compute Engine instance.
* </pre>
*
* <code>INSTANCE = 1;</code>
*/
INSTANCE(1),
/**
*
*
* <pre>
* Target is the internet.
* </pre>
*
* <code>INTERNET = 2;</code>
*/
INTERNET(2),
/**
*
*
* <pre>
* Target is a Google API.
* </pre>
*
* <code>GOOGLE_API = 3;</code>
*/
GOOGLE_API(3),
/**
*
*
* <pre>
* Target is a Google Kubernetes Engine cluster master.
* </pre>
*
* <code>GKE_MASTER = 4;</code>
*/
GKE_MASTER(4),
/**
*
*
* <pre>
* Target is a Cloud SQL instance.
* </pre>
*
* <code>CLOUD_SQL_INSTANCE = 5;</code>
*/
CLOUD_SQL_INSTANCE(5),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Target not specified.
* </pre>
*
* <code>TARGET_UNSPECIFIED = 0;</code>
*/
public static final int TARGET_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Target is a Compute Engine instance.
* </pre>
*
* <code>INSTANCE = 1;</code>
*/
public static final int INSTANCE_VALUE = 1;
/**
*
*
* <pre>
* Target is the internet.
* </pre>
*
* <code>INTERNET = 2;</code>
*/
public static final int INTERNET_VALUE = 2;
/**
*
*
* <pre>
* Target is a Google API.
* </pre>
*
* <code>GOOGLE_API = 3;</code>
*/
public static final int GOOGLE_API_VALUE = 3;
/**
*
*
* <pre>
* Target is a Google Kubernetes Engine cluster master.
* </pre>
*
* <code>GKE_MASTER = 4;</code>
*/
public static final int GKE_MASTER_VALUE = 4;
/**
*
*
* <pre>
* Target is a Cloud SQL instance.
* </pre>
*
* <code>CLOUD_SQL_INSTANCE = 5;</code>
*/
public static final int CLOUD_SQL_INSTANCE_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Target valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Target forNumber(int value) {
switch (value) {
case 0:
return TARGET_UNSPECIFIED;
case 1:
return INSTANCE;
case 2:
return INTERNET;
case 3:
return GOOGLE_API;
case 4:
return GKE_MASTER;
case 5:
return CLOUD_SQL_INSTANCE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Target> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Target> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Target>() {
public Target findValueByNumber(int number) {
return Target.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1.DeliverInfo.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final Target[] VALUES = values();
public static Target valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Target(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.networkmanagement.v1.DeliverInfo.Target)
}
public static final int TARGET_FIELD_NUMBER = 1;
private int target_;
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @return The enum numeric value on the wire for target.
*/
@java.lang.Override
public int getTargetValue() {
return target_;
}
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @return The target.
*/
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo.Target getTarget() {
@SuppressWarnings("deprecation")
com.google.cloud.networkmanagement.v1.DeliverInfo.Target result =
com.google.cloud.networkmanagement.v1.DeliverInfo.Target.valueOf(target_);
return result == null
? com.google.cloud.networkmanagement.v1.DeliverInfo.Target.UNRECOGNIZED
: result;
}
public static final int RESOURCE_URI_FIELD_NUMBER = 2;
private volatile java.lang.Object resourceUri_;
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @return The resourceUri.
*/
@java.lang.Override
public java.lang.String getResourceUri() {
java.lang.Object ref = resourceUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @return The bytes for resourceUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceUriBytes() {
java.lang.Object ref = resourceUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (target_
!= com.google.cloud.networkmanagement.v1.DeliverInfo.Target.TARGET_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, target_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceUri_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (target_
!= com.google.cloud.networkmanagement.v1.DeliverInfo.Target.TARGET_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, target_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceUri_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkmanagement.v1.DeliverInfo)) {
return super.equals(obj);
}
com.google.cloud.networkmanagement.v1.DeliverInfo other =
(com.google.cloud.networkmanagement.v1.DeliverInfo) obj;
if (target_ != other.target_) return false;
if (!getResourceUri().equals(other.getResourceUri())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TARGET_FIELD_NUMBER;
hash = (53 * hash) + target_;
hash = (37 * hash) + RESOURCE_URI_FIELD_NUMBER;
hash = (53 * hash) + getResourceUri().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.networkmanagement.v1.DeliverInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Details of the final state "deliver" and associated resource.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1.DeliverInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1.DeliverInfo)
com.google.cloud.networkmanagement.v1.DeliverInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1.TraceProto
.internal_static_google_cloud_networkmanagement_v1_DeliverInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1.TraceProto
.internal_static_google_cloud_networkmanagement_v1_DeliverInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1.DeliverInfo.class,
com.google.cloud.networkmanagement.v1.DeliverInfo.Builder.class);
}
// Construct using com.google.cloud.networkmanagement.v1.DeliverInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
target_ = 0;
resourceUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkmanagement.v1.TraceProto
.internal_static_google_cloud_networkmanagement_v1_DeliverInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo getDefaultInstanceForType() {
return com.google.cloud.networkmanagement.v1.DeliverInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo build() {
com.google.cloud.networkmanagement.v1.DeliverInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo buildPartial() {
com.google.cloud.networkmanagement.v1.DeliverInfo result =
new com.google.cloud.networkmanagement.v1.DeliverInfo(this);
result.target_ = target_;
result.resourceUri_ = resourceUri_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkmanagement.v1.DeliverInfo) {
return mergeFrom((com.google.cloud.networkmanagement.v1.DeliverInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkmanagement.v1.DeliverInfo other) {
if (other == com.google.cloud.networkmanagement.v1.DeliverInfo.getDefaultInstance())
return this;
if (other.target_ != 0) {
setTargetValue(other.getTargetValue());
}
if (!other.getResourceUri().isEmpty()) {
resourceUri_ = other.resourceUri_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.networkmanagement.v1.DeliverInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.networkmanagement.v1.DeliverInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int target_ = 0;
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @return The enum numeric value on the wire for target.
*/
@java.lang.Override
public int getTargetValue() {
return target_;
}
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @param value The enum numeric value on the wire for target to set.
* @return This builder for chaining.
*/
public Builder setTargetValue(int value) {
target_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @return The target.
*/
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo.Target getTarget() {
@SuppressWarnings("deprecation")
com.google.cloud.networkmanagement.v1.DeliverInfo.Target result =
com.google.cloud.networkmanagement.v1.DeliverInfo.Target.valueOf(target_);
return result == null
? com.google.cloud.networkmanagement.v1.DeliverInfo.Target.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @param value The target to set.
* @return This builder for chaining.
*/
public Builder setTarget(com.google.cloud.networkmanagement.v1.DeliverInfo.Target value) {
if (value == null) {
throw new NullPointerException();
}
target_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Target type where the packet is delivered to.
* </pre>
*
* <code>.google.cloud.networkmanagement.v1.DeliverInfo.Target target = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTarget() {
target_ = 0;
onChanged();
return this;
}
private java.lang.Object resourceUri_ = "";
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @return The resourceUri.
*/
public java.lang.String getResourceUri() {
java.lang.Object ref = resourceUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @return The bytes for resourceUri.
*/
public com.google.protobuf.ByteString getResourceUriBytes() {
java.lang.Object ref = resourceUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @param value The resourceUri to set.
* @return This builder for chaining.
*/
public Builder setResourceUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceUri_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceUri() {
resourceUri_ = getDefaultInstance().getResourceUri();
onChanged();
return this;
}
/**
*
*
* <pre>
* URI of the resource that the packet is delivered to.
* </pre>
*
* <code>string resource_uri = 2;</code>
*
* @param value The bytes for resourceUri to set.
* @return This builder for chaining.
*/
public Builder setResourceUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceUri_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1.DeliverInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1.DeliverInfo)
private static final com.google.cloud.networkmanagement.v1.DeliverInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1.DeliverInfo();
}
public static com.google.cloud.networkmanagement.v1.DeliverInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeliverInfo> PARSER =
new com.google.protobuf.AbstractParser<DeliverInfo>() {
@java.lang.Override
public DeliverInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeliverInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeliverInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeliverInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1.DeliverInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision;
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ResizeAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RestoreInProgressAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.ModuleTestCase;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.gateway.GatewayAllocator;
import org.elasticsearch.plugins.ClusterPlugin;
import org.elasticsearch.test.gateway.TestGatewayAllocator;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
public class ClusterModuleTests extends ModuleTestCase {
private ClusterInfoService clusterInfoService = EmptyClusterInfoService.INSTANCE;
private ClusterService clusterService;
private ThreadContext threadContext;
@Override
public void setUp() throws Exception {
super.setUp();
threadContext = new ThreadContext(Settings.EMPTY);
clusterService = new ClusterService(Settings.EMPTY,
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
static class FakeAllocationDecider extends AllocationDecider {
protected FakeAllocationDecider() {
}
}
static class FakeShardsAllocator implements ShardsAllocator {
@Override
public void allocate(RoutingAllocation allocation) {
// noop
}
@Override
public ShardAllocationDecision decideShardAllocation(ShardRouting shard, RoutingAllocation allocation) {
throw new UnsupportedOperationException("explain API not supported on FakeShardsAllocator");
}
}
public void testRegisterClusterDynamicSettingDuplicate() {
try {
new SettingsModule(Settings.EMPTY, EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(),
"Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
}
}
public void testRegisterClusterDynamicSetting() {
SettingsModule module = new SettingsModule(Settings.EMPTY,
Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope));
assertInstanceBinding(module, ClusterSettings.class, service -> service.isDynamicSetting("foo.bar"));
}
public void testRegisterIndexDynamicSettingDuplicate() {
try {
new SettingsModule(Settings.EMPTY, EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(),
"Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
}
}
public void testRegisterIndexDynamicSetting() {
SettingsModule module = new SettingsModule(Settings.EMPTY,
Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope));
assertInstanceBinding(module, IndexScopedSettings.class, service -> service.isDynamicSetting("index.foo.bar"));
}
public void testRegisterAllocationDeciderDuplicate() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
new ClusterModule(Settings.EMPTY, clusterService,
Collections.<ClusterPlugin>singletonList(new ClusterPlugin() {
@Override
public Collection<AllocationDecider> createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) {
return Collections.singletonList(new EnableAllocationDecider(settings, clusterSettings));
}
}), clusterInfoService, null, threadContext));
assertEquals(e.getMessage(),
"Cannot specify allocation decider [" + EnableAllocationDecider.class.getName() + "] twice");
}
public void testRegisterAllocationDecider() {
ClusterModule module = new ClusterModule(Settings.EMPTY, clusterService,
Collections.singletonList(new ClusterPlugin() {
@Override
public Collection<AllocationDecider> createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) {
return Collections.singletonList(new FakeAllocationDecider());
}
}), clusterInfoService, null, threadContext);
assertTrue(module.deciderList.stream().anyMatch(d -> d.getClass().equals(FakeAllocationDecider.class)));
}
private ClusterModule newClusterModuleWithShardsAllocator(Settings settings, String name, Supplier<ShardsAllocator> supplier) {
return new ClusterModule(settings, clusterService, Collections.singletonList(
new ClusterPlugin() {
@Override
public Map<String, Supplier<ShardsAllocator>> getShardsAllocators(Settings settings, ClusterSettings clusterSettings) {
return Collections.singletonMap(name, supplier);
}
}
), clusterInfoService, null, threadContext);
}
public void testRegisterShardsAllocator() {
Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "custom").build();
ClusterModule module = newClusterModuleWithShardsAllocator(settings, "custom", FakeShardsAllocator::new);
assertEquals(FakeShardsAllocator.class, module.shardsAllocator.getClass());
}
public void testRegisterShardsAllocatorAlreadyRegistered() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
newClusterModuleWithShardsAllocator(Settings.EMPTY, ClusterModule.BALANCED_ALLOCATOR, FakeShardsAllocator::new));
assertEquals("ShardsAllocator [" + ClusterModule.BALANCED_ALLOCATOR + "] already defined", e.getMessage());
}
public void testUnknownShardsAllocator() {
Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "dne").build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
new ClusterModule(settings, clusterService, Collections.emptyList(), clusterInfoService, null, threadContext));
assertEquals("Unknown ShardsAllocator [dne]", e.getMessage());
}
public void testShardsAllocatorFactoryNull() {
Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "bad").build();
expectThrows(NullPointerException.class, () -> newClusterModuleWithShardsAllocator(settings, "bad", () -> null));
}
// makes sure that the allocation deciders are setup in the correct order, such that the
// slower allocation deciders come last and we can exit early if there is a NO decision without
// running them. If the order of the deciders is changed for a valid reason, the order should be
// changed in the test too.
public void testAllocationDeciderOrder() {
List<Class<? extends AllocationDecider>> expectedDeciders = Arrays.asList(
MaxRetryAllocationDecider.class,
ResizeAllocationDecider.class,
ReplicaAfterPrimaryActiveAllocationDecider.class,
RebalanceOnlyWhenActiveAllocationDecider.class,
ClusterRebalanceAllocationDecider.class,
ConcurrentRebalanceAllocationDecider.class,
EnableAllocationDecider.class,
NodeVersionAllocationDecider.class,
SnapshotInProgressAllocationDecider.class,
RestoreInProgressAllocationDecider.class,
FilterAllocationDecider.class,
SameShardAllocationDecider.class,
DiskThresholdDecider.class,
ThrottlingAllocationDecider.class,
ShardsLimitAllocationDecider.class,
AwarenessAllocationDecider.class);
Collection<AllocationDecider> deciders = ClusterModule.createAllocationDeciders(Settings.EMPTY,
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), Collections.emptyList());
Iterator<AllocationDecider> iter = deciders.iterator();
int idx = 0;
while (iter.hasNext()) {
AllocationDecider decider = iter.next();
assertSame(decider.getClass(), expectedDeciders.get(idx++));
}
}
public void testRejectsReservedExistingShardsAllocatorName() {
final ClusterModule clusterModule = new ClusterModule(Settings.EMPTY, clusterService,
List.of(existingShardsAllocatorPlugin(GatewayAllocator.ALLOCATOR_NAME)), clusterInfoService, null, threadContext);
expectThrows(IllegalArgumentException.class, () -> clusterModule.setExistingShardsAllocators(new TestGatewayAllocator()));
}
public void testRejectsDuplicateExistingShardsAllocatorName() {
final ClusterModule clusterModule = new ClusterModule(Settings.EMPTY, clusterService,
List.of(existingShardsAllocatorPlugin("duplicate"), existingShardsAllocatorPlugin("duplicate")), clusterInfoService, null,
threadContext);
expectThrows(IllegalArgumentException.class, () -> clusterModule.setExistingShardsAllocators(new TestGatewayAllocator()));
}
private static ClusterPlugin existingShardsAllocatorPlugin(final String allocatorName) {
return new ClusterPlugin() {
@Override
public Map<String, ExistingShardsAllocator> getExistingShardsAllocators() {
return Collections.singletonMap(allocatorName, new TestGatewayAllocator());
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.cache.persistence.DiskRecoveryStore;
import com.gemstone.gemfire.internal.cache.lru.LRUClockNode;
import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionStamp;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
// macros whose definition changes this class:
// disk: DISK
// lru: LRU
// stats: STATS
// versioned: VERSIONED
// offheap: OFFHEAP
// One of the following key macros must be defined:
// key object: KEY_OBJECT
// key int: KEY_INT
// key long: KEY_LONG
// key uuid: KEY_UUID
// key string1: KEY_STRING1
// key string2: KEY_STRING2
/**
* Do not modify this class. It was generated.
* Instead modify LeafRegionEntry.cpp and then run
* bin/generateRegionEntryClasses.sh from the directory
* that contains your build.xml.
*/
public class VersionedThinDiskLRURegionEntryHeapObjectKey extends VersionedThinDiskLRURegionEntryHeap {
public VersionedThinDiskLRURegionEntryHeapObjectKey (RegionEntryContext context, Object key,
Object value
) {
super(context,
(value instanceof RecoveredEntry ? null : value)
);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// common code
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VersionedThinDiskLRURegionEntryHeapObjectKey> lastModifiedUpdater
= AtomicLongFieldUpdater.newUpdater(VersionedThinDiskLRURegionEntryHeapObjectKey.class, "lastModified");
private volatile Object value;
@Override
protected final Object getValueField() {
return this.value;
}
@Override
protected void setValueField(Object v) {
this.value = v;
}
protected long getlastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
/**
* @see HashEntry#getEntryHash()
*/
public final int getEntryHash() {
return this.hash;
}
protected void setEntryHash(int v) {
this.hash = v;
}
/**
* @see HashEntry#getNextEntry()
*/
public final HashEntry<Object, Object> getNextEntry() {
return this.next;
}
/**
* @see HashEntry#setNextEntry
*/
public final void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// disk code
protected void initialize(RegionEntryContext drs, Object value) {
boolean isBackup;
if (drs instanceof LocalRegion) {
isBackup = ((LocalRegion)drs).getDiskRegion().isBackup();
} else if (drs instanceof PlaceHolderDiskRegion) {
isBackup = true;
} else {
throw new IllegalArgumentException("expected a LocalRegion or PlaceHolderDiskRegion");
}
// Delay the initialization of DiskID if overflow only
if (isBackup) {
diskInitialize(drs, value);
}
}
@Override
public final synchronized int updateAsyncEntrySize(EnableLRU capacityController) {
int oldSize = getEntrySize();
int newSize = capacityController.entrySize( getKeyForSizing(), null);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private void diskInitialize(RegionEntryContext context, Object value) {
DiskRecoveryStore drs = (DiskRecoveryStore)context;
DiskStoreImpl ds = drs.getDiskStore();
long maxOplogSize = ds.getMaxOplogSize();
//get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList());
Helper.initialize(this, drs, value);
}
/**
* DiskId
*
* @since 5.1
*/
protected DiskId id;//= new DiskId();
public DiskId getDiskId() {
return this.id;
}
@Override
void setDiskId(RegionEntry old) {
this.id = ((AbstractDiskRegionEntry)old).getDiskId();
}
// // inlining DiskId
// // always have these fields
// /**
// * id consists of
// * most significant
// * 1 byte = users bits
// * 2-8 bytes = oplog id
// * least significant.
// *
// * The highest bit in the oplog id part is set to 1 if the oplog id
// * is negative.
// * @todo this field could be an int for an overflow only region
// */
// private long id;
// /**
// * Length of the bytes on disk.
// * This is always set. If the value is invalid then it will be set to 0.
// * The most significant bit is used by overflow to mark it as needing to be written.
// */
// protected int valueLength = 0;
// // have intOffset or longOffset
// // intOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile int offsetInOplog;
// // longOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile long offsetInOplog;
// // have overflowOnly or persistence
// // overflowOnly
// // no fields
// // persistent
// /** unique entry identifier * */
// private long keyId;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// lru code
@Override
public void setDelayedDiskId(LocalRegion r) {
DiskStoreImpl ds = r.getDiskStore();
long maxOplogSize = ds.getMaxOplogSize();
this.id = DiskId.createDiskId(maxOplogSize, false /* over flow only */, ds.needsLinkedList());
}
public final synchronized int updateEntrySize(EnableLRU capacityController) {
return updateEntrySize(capacityController, _getValue()); // OFHEAP: _getValue ok w/o incing refcount because we are synced and only getting the size
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public final synchronized int updateEntrySize(EnableLRU capacityController,
Object value) {
int oldSize = getEntrySize();
int newSize = capacityController.entrySize( getKeyForSizing(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
// if ( debug ) log( "updateEntrySize key=" + getKey()
// + (_getValue() == Token.INVALID ? " invalid" :
// (_getValue() == Token.LOCAL_INVALID ? "local_invalid" :
// (_getValue()==null ? " evicted" : " valid")))
// + " oldSize=" + oldSize
// + " newSize=" + this.size );
return delta;
}
public final boolean testRecentlyUsed() {
return areAnyBitsSet(RECENTLY_USED);
}
@Override
public final void setRecentlyUsed() {
setBits(RECENTLY_USED);
}
public final void unsetRecentlyUsed() {
clearBits(~RECENTLY_USED);
}
public final boolean testEvicted() {
return areAnyBitsSet(EVICTED);
}
public final void setEvicted() {
setBits(EVICTED);
}
public final void unsetEvicted() {
clearBits(~EVICTED);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private LRUClockNode nextLRU;
private LRUClockNode prevLRU;
private int size;
public final void setNextLRUNode( LRUClockNode next ) {
this.nextLRU = next;
}
public final LRUClockNode nextLRUNode() {
return this.nextLRU;
}
public final void setPrevLRUNode( LRUClockNode prev ) {
this.prevLRU = prev;
}
public final LRUClockNode prevLRUNode() {
return this.prevLRU;
}
public final int getEntrySize() {
return this.size;
}
protected final void setEntrySize(int size) {
this.size = size;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
//@Override
//public StringBuilder appendFieldsToString(final StringBuilder sb) {
// StringBuilder result = super.appendFieldsToString(sb);
// result.append("; prev=").append(this.prevLRU==null?"null":"not null");
// result.append("; next=").append(this.nextLRU==null?"null":"not null");
// return result;
//}
@Override
public Object getKeyForSizing() {
// default implementation.
return getKey();
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// versioned code
private VersionSource memberID;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
public long getRegionVersion() {
return (((long)regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
public long getVersionTimeStamp() {
return getLastModified();
}
public void setVersionTimeStamp(long time) {
setLastModified(time);
}
public VersionSource getMemberID() {
return this.memberID;
}
public int getDistributedSystemId() {
return this.distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public void setVersions(VersionTag tag) {
this.memberID = tag.getMemberID();
int eVersion = tag.getEntryVersion();
this.entryVersionLowBytes = (short)(eVersion & 0xffff);
this.entryVersionHighByte = (byte)((eVersion & 0xff0000) >> 16);
this.regionVersionHighBytes = tag.getRegionVersionHighBytes();
this.regionVersionLowBytes = tag.getRegionVersionLowBytes();
if (!(tag.isGatewayTag()) && this.distributedSystemId == tag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= tag.getVersionTimeStamp()) {
setVersionTimeStamp(tag.getVersionTimeStamp());
} else {
tag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(tag.getVersionTimeStamp());
}
this.distributedSystemId = (byte)(tag.getDistributedSystemId() & 0xff);
}
public void setMemberID(VersionSource memberID) {
this.memberID = memberID;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberID);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(this.distributedSystemId);
return tag;
}
public void processVersionTag(LocalRegion r, VersionTag tag,
boolean isTombstoneFromGII, boolean hasDelta,
VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) {
basicProcessVersionTag(r, tag, isTombstoneFromGII, hasDelta, thisVM, sender, checkForConflicts);
}
@Override
public void processVersionTag(EntryEvent cacheEvent) {
// this keeps Eclipse happy. without it the sender chain becomes confused
// while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
public short getRegionVersionHighBytes() {
return this.regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
public int getRegionVersionLowBytes() {
return this.regionVersionLowBytes;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// key code
private final Object key;
@Override
public final Object getKey() {
return this.key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/*
* Copyright 2010 Vodafone Group Services Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.onesocialweb.model.relation;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.onesocialweb.model.acl.AclRule;
public class DefaultRelation implements Relation {
private List<AclRule> aclRules = new ArrayList<AclRule>();
private String comment;
private String from;
private String id;
private String message;
private String nature;
private Date published;
private String status;
private String to;
@Override
public void addAclRule(AclRule rule) {
this.aclRules.add(rule);
}
@Override
public List<AclRule> getAclRules() {
return aclRules;
}
@Override
public String getComment() {
return comment;
}
@Override
public String getFrom() {
return from;
}
@Override
public String getId() {
return id;
}
@Override
public String getMessage() {
return message;
}
@Override
public String getNature() {
return nature;
}
@Override
public Date getPublished() {
return published;
}
@Override
public String getStatus() {
return status;
}
@Override
public String getTo() {
return to;
}
@Override
public boolean hasAclRules() {
return (aclRules != null && !aclRules.isEmpty());
}
@Override
public boolean hasComment() {
return (comment != null);
}
@Override
public boolean hasFrom() {
return (from != null);
}
@Override
public boolean hasId() {
return (id != null);
}
@Override
public boolean hasMessage() {
return (message != null);
}
@Override
public boolean hasNature() {
return (nature != null);
}
@Override
public boolean hasPublished() {
return (published != null);
}
@Override
public boolean hasStatus() {
return (status != null);
}
@Override
public boolean hasTo() {
return (to != null);
}
@Override
public void removeAclRule(AclRule rule) {
this.aclRules.remove(rule);
}
@Override
public void setAclRules(List<AclRule> rules) {
this.aclRules = rules;
}
@Override
public void setComment(String comment) {
this.comment = comment;
}
@Override
public void setFrom(String from) {
this.from = from;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public void setMessage(String message) {
this.message = message;
}
@Override
public void setNature(String nature) {
this.nature = nature;
}
@Override
public void setPublished(Date published) {
this.published = published;
}
@Override
public void setStatus(String status) {
this.status = status;
}
@Override
public void setTo(String to) {
this.to = to;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[Relation ");
if (id != null) {
buffer.append("id:" + id + " ");
}
if (from != null) {
buffer.append("from:" + from + " ");
}
if (to != null) {
buffer.append("to:" + to + " ");
}
if (nature != null) {
buffer.append("nature:" + nature + " ");
}
if (status != null) {
buffer.append("status:" + status + " ");
}
if (message != null) {
buffer.append("message:" + message + " ");
}
if (comment != null) {
buffer.append("comment:" + comment + " ");
}
buffer.append("]");
return buffer.toString();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.lang;
import org.drools.compiler.compiler.DrlExprParser;
import org.drools.compiler.lang.descr.AtomicExprDescr;
import org.drools.compiler.lang.descr.BaseDescr;
import org.drools.compiler.lang.descr.BindingDescr;
import org.drools.compiler.lang.descr.ConnectiveType;
import org.drools.compiler.lang.descr.ConstraintConnectiveDescr;
import org.drools.compiler.lang.descr.ExprConstraintDescr;
import org.drools.compiler.lang.descr.OperatorDescr;
import org.drools.compiler.lang.descr.RelationalExprDescr;
import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.core.base.EvaluatorWrapper;
import org.drools.core.base.evaluators.Operator;
import org.drools.core.rule.XpathBackReference;
import org.drools.core.util.ReflectiveVisitor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static org.drools.compiler.rule.builder.dialect.DialectUtil.findClassByName;
import static org.drools.core.util.ClassUtils.findClass;
import static org.drools.core.util.StringUtils.indexOfOutOfQuotes;
public class MVELDumper extends ReflectiveVisitor implements ExpressionRewriter {
private static final java.util.regex.Pattern evalRegexp = java.util.regex.Pattern.compile( "^eval\\s*\\(", Pattern.MULTILINE );
private static final String[] standard;
static {
standard = new String[]{ "==", "<", ">", ">=", "<=", "!=", "~=", "instanceof" };
Arrays.sort( standard );
}
public String dump( BaseDescr base ) {
return dump( new StringBuilder(),
base,
0,
false,
createContext() ).toString();
}
public String dump( BaseDescr base,
MVELDumperContext context ) {
return dump( new StringBuilder(),
base,
0,
false,
context ).toString();
}
public String dump( BaseDescr base,
ConstraintConnectiveDescr parent,
MVELDumperContext context ) {
return dump( new StringBuilder(),
base,
parent,
0,
0,
false,
context ).toString();
}
public String dump( BaseDescr base,
int parentPrecedence ) {
return dump( new StringBuilder(),
base,
parentPrecedence,
false,
createContext() ).toString();
}
public StringBuilder dump( StringBuilder sbuilder,
BaseDescr base,
int parentPriority,
boolean isInsideRelCons,
MVELDumperContext context ) {
return dump(sbuilder,
base,
null,
0,
parentPriority,
false,
context);
}
public StringBuilder dump( StringBuilder sbuilder,
BaseDescr base,
ConstraintConnectiveDescr parent,
int parentIndex,
int parentPriority,
boolean isInsideRelCons,
MVELDumperContext context ) {
if ( context == null ) {
context = createContext();
}
if ( base instanceof ConstraintConnectiveDescr ) {
processConnectiveDescr( sbuilder, base, parent, parentPriority, isInsideRelCons, context );
} else if ( base instanceof AtomicExprDescr ) {
processAtomicExpression(sbuilder, context, (AtomicExprDescr) base, parent, parentIndex);
} else if ( base instanceof BindingDescr ) {
processBinding(sbuilder, (BindingDescr) base, parent, isInsideRelCons, context);
} else if ( base instanceof RelationalExprDescr ) {
processRelationalExpression(sbuilder, (RelationalExprDescr) base, parent, context);
} else if ( base instanceof ExprConstraintDescr ) {
processConstraint(sbuilder, (ExprConstraintDescr) base, isInsideRelCons, context);
}
return sbuilder;
}
private void processConstraint(StringBuilder sbuilder, ExprConstraintDescr base, boolean isInsideRelCons, MVELDumperContext context) {
DrlExprParser expr = new DrlExprParser( context.getRuleContext().getConfiguration().getLanguageLevel() );
ConstraintConnectiveDescr result = expr.parse( base.getExpression() );
if ( result.getDescrs().size() == 1 ) {
dump( sbuilder,
result.getDescrs().get( 0 ),
0,
isInsideRelCons,
context );
} else {
dump( sbuilder,
result,
0,
isInsideRelCons,
context );
}
}
private String[] processAtomicExpression( StringBuilder sbuilder, MVELDumperContext context, AtomicExprDescr atomicExpr, ConstraintConnectiveDescr parent, int parentIdx ) {
String expr = atomicExpr.getExpression().trim();
expr = processEval(expr);
String[] constrAndExpr = processImplicitConstraints( expr, atomicExpr, parent, parentIdx, context );
// top-level, implicit constraints will be processed in different nodes.
// Nested CCDs require all constraints to be evaluated locally, as a complex constraints
sbuilder.append( context.isCcdNested() ? constrAndExpr[ 0 ] + constrAndExpr[ 1 ] : constrAndExpr[ 1 ] );
return constrAndExpr;
}
private void processBinding(StringBuilder sbuilder, BindingDescr bind, ConstraintConnectiveDescr parent, boolean isInsideRelCons, MVELDumperContext context) {
String expr = bind.getExpression().trim();
AtomicExprDescr atomicExpr = new AtomicExprDescr(expr);
String[] constrAndExpr = processImplicitConstraints(expr, atomicExpr, parent, parent.getDescrs().indexOf( bind ), context );
if ( isInsideRelCons ) {
sbuilder.append( constrAndExpr[0] ).append( constrAndExpr[1] );
} else if ( constrAndExpr[0].length() > 4 ) {
sbuilder.append( constrAndExpr[ 0 ].substring( 0, constrAndExpr[ 0 ].length() - 4 ) );
}
if (bind.getExpression().equals(bind.getBindingField())) {
bind.setExpressionAndBindingField( constrAndExpr[1] );
} else {
bind.setExpression( constrAndExpr[1] );
}
context.addBinding(bind);
}
private void processRelationalExpression(StringBuilder sbuilder, RelationalExprDescr red, ConstraintConnectiveDescr parent, MVELDumperContext context) {
// maximum precedence, so wrap any child connective in parenthesis
int idx = parent.getDescrs().indexOf( red );
StringBuilder left = dump(new StringBuilder(), red.getLeft(), parent, idx, Integer.MAX_VALUE, true, context);
String right = red.getRight() instanceof AtomicExprDescr ?
processRightAtomicExpr(left, (AtomicExprDescr)red.getRight(), parent, idx, context) :
dump( new StringBuilder(), red.getRight(), parent, idx, Integer.MAX_VALUE, true, context).toString();
processRestriction( context,
sbuilder,
left.toString(),
red.getOperatorDescr(),
right );// maximum precedence, so wrap any child connective in parenthesis
}
private String processRightAtomicExpr( StringBuilder left, AtomicExprDescr atomicExpr, ConstraintConnectiveDescr parent, int parentIdx, MVELDumperContext context ) {
String expr = atomicExpr.getExpression().trim();
expr = processEval( expr );
String[] constrAndExpr = processImplicitConstraints(expr, atomicExpr, parent, parentIdx, context);
left.insert( 0, constrAndExpr[0] );
return processBackReference( context, atomicExpr, constrAndExpr[1] );
}
private String processBackReference(MVELDumperContext context, AtomicExprDescr atomicExpr, String expr) {
if (!context.isInXpath()) {
return expr; // this is not an xpath and back references are allowed only there
}
int i = 0;
while (expr.startsWith( "../" )) {
i++;
expr = expr.substring( 3 ).trim();
}
if (i > 0) {
expr = XpathBackReference.BACK_REFERENCE_HEAD + i + "." + expr;
atomicExpr.setRewrittenExpression( expr );
}
return expr;
}
String[] processImplicitConstraints(String expr, AtomicExprDescr atomicExpr, ConstraintConnectiveDescr parent, int parentIdx, MVELDumperContext context) {
boolean hasQuotes = expr.indexOf('"') >= 0;
String[] constrAndExpr = new String[] { "", expr };
int sharpPos = hasQuotes ? indexOfOutOfQuotes(expr, '#') : expr.indexOf('#');
int nullSafePos = hasQuotes ? indexOfOutOfQuotes(expr, "!.") : expr.indexOf("!.");
int j = 0;
while (sharpPos > 0 || nullSafePos > 0) {
if ( nullSafePos < 0 || ( sharpPos > 0 && sharpPos < nullSafePos ) ) {
String[] castAndExpr = processInlineCast(expr, atomicExpr, parent, context, sharpPos, parentIdx, j++);
expr = castAndExpr[1];
constrAndExpr = new String[] { constrAndExpr[0] + castAndExpr[0], expr };
} else {
String[] nullCheckAndExpr = processNullSafeDereferencing(expr, atomicExpr, parent, nullSafePos, parentIdx, j++ );
expr = nullCheckAndExpr[1];
constrAndExpr = new String[] { constrAndExpr[0] + nullCheckAndExpr[0], expr };
}
sharpPos = hasQuotes ? indexOfOutOfQuotes(expr, '#') : expr.indexOf('#');
nullSafePos = hasQuotes ? indexOfOutOfQuotes(expr, "!.") : expr.indexOf("!.");
}
return new String[] { constrAndExpr[0], processInferredCast(constrAndExpr[1], atomicExpr, context) };
}
private String[] processInlineCast(String expr, AtomicExprDescr atomicExpr, ConstraintConnectiveDescr ccd, MVELDumperContext context, int sharpPos, int parentIdx, int childIdx ) {
// convert "field1#Class.field2" in ["field1 instanceof Class && ", "((Class)field1).field2"]
String field1 = expr.substring(0, sharpPos).trim();
int sharpPos2 = expr.indexOf('#', sharpPos+1);
String part2 = sharpPos2 < 0 ? expr.substring(sharpPos+1).trim() : expr.substring(sharpPos+1, sharpPos2).trim();
String[] classAndField = splitInClassAndField(part2, context);
BaseDescr desc = parentIdx >= 0 ? ccd.getDescrs().get( parentIdx ) : null;
if (classAndField == null) {
return new String[] { "", expr };
} else if ( desc instanceof AtomicExprDescr && classAndField.length == 1 ) {
return new String[] { "", field1 + " instanceof " + classAndField[ 0 ] };
}
String className = classAndField[0];
String castedExpression = classAndField.length == 1 ?
"((" + className + ")" + field1 + ")" :
"((" + className + ")" + field1 + ")." + classAndField[1] + (sharpPos2 > 0 ? expr.substring(sharpPos2) : "");
RelationalExprDescr check = new RelationalExprDescr( "instanceof",
false,
null,
new AtomicExprDescr( field1 ),
new AtomicExprDescr( className ) );
if ( ccd.getConnective() == ConnectiveType.AND || ccd.getConnective() == ConnectiveType.INC_AND ) {
ccd.getDescrs().add( childIdx, check );
} else {
if ( desc instanceof ConstraintConnectiveDescr ) {
((ConstraintConnectiveDescr) desc).getDescrs().add( childIdx, check );
} else {
ConstraintConnectiveDescr localAnd = new ConstraintConnectiveDescr( ConnectiveType.AND );
BaseDescr original = ccd.getDescrs().remove( parentIdx );
localAnd.getDescrs().add( check );
localAnd.getDescrs().add( original );
ccd.getDescrs().add( parentIdx, localAnd );
}
}
atomicExpr.setRewrittenExpression(castedExpression);
String innerCheck = check.toString() + " && ";
return new String[] { innerCheck, castedExpression };
}
private String processInferredCast(String expr, AtomicExprDescr atomicExpr, MVELDumperContext context) {
if (context == null) {
return expr;
}
Map.Entry<String, String> castEntry = context.getInferredCast(expr);
if (castEntry == null) {
return expr;
}
String castedExpr = "((" + castEntry.getValue() + ")" + castEntry.getKey() + ")" + expr.substring(castEntry.getKey().length());
atomicExpr.setRewrittenExpression(castedExpr);
return castedExpr;
}
private String[] processNullSafeDereferencing( String expr, AtomicExprDescr atomicExpr, ConstraintConnectiveDescr ccd, int nullSafePos, int parentIdx, int childIdx ) {
// convert "field1!.field2" in ["field1 != null && ", "field1.field2"]
String field1 = expr.substring( 0, nullSafePos ).trim();
expr = field1 + "." + expr.substring( nullSafePos + 2 ).trim();
RelationalExprDescr check = new RelationalExprDescr( "!=",
false,
null,
new AtomicExprDescr( getPreconditionsToAppend( field1 ) ),
new AtomicExprDescr( "null" ) );
if ( ccd.getConnective() == ConnectiveType.AND || ccd.getConnective() == ConnectiveType.INC_AND ) {
ccd.getDescrs().add( childIdx, check );
} else {
BaseDescr desc = ccd.getDescrs().get( parentIdx );
if ( desc instanceof ConstraintConnectiveDescr ) {
((ConstraintConnectiveDescr) desc).getDescrs().add( childIdx, check );
} else {
ConstraintConnectiveDescr localAnd = new ConstraintConnectiveDescr( ConnectiveType.AND );
BaseDescr original = ccd.getDescrs().remove( parentIdx );
localAnd.getDescrs().add( check );
localAnd.getDescrs().add( original );
ccd.getDescrs().add( parentIdx, localAnd );
}
}
String innerCheck = check.toString() + " && ";
String[] nullCheckAndExpr = new String[] { innerCheck, expr };
atomicExpr.setRewrittenExpression( expr );
return nullCheckAndExpr;
}
private String getPreconditionsToAppend(String field1) {
int parenthesisDepth = 0;
int squareDepth = 0;
for (int i = field1.length()-1; i >= 0; i--) {
switch (field1.charAt(i)) {
case '(':
parenthesisDepth--;
if (parenthesisDepth < 0) {
return field1.substring(i+1, field1.length()).trim();
}
break;
case ')':
parenthesisDepth++;
break;
case '[':
squareDepth--;
if (squareDepth < 0) {
return field1.substring(i+1, field1.length()).trim();
}
break;
case ']':
squareDepth++;
break;
case ',':
if (squareDepth == 0 && parenthesisDepth == 0) {
return field1.substring(i+1, field1.length()).trim();
}
break;
}
}
return field1;
}
private String processEval(String expr) {
// stripping "eval" as it is no longer necessary
return evalRegexp.matcher( expr ).find() ? expr.substring( expr.indexOf( '(' ) + 1, expr.lastIndexOf( ')' ) ) : expr;
}
private String[] splitInClassAndField(String expr, MVELDumperContext context) {
String[] split = expr.split("\\.");
if (split.length < 2) {
return new String[] { expr };
}
if (split[0].endsWith("!")) {
split[0] = split[0].substring(0, split[0].length()-1);
}
if (split.length < 3) {
return split;
}
// check non-FQN case first
if ( context == null || findClassByName(context.getRuleContext(), split[0]) != null ) {
return new String[] { split[0], concatDotSeparated(split, 1, split.length) };
}
ClassLoader cl = context.getRuleContext().getKnowledgeBuilder().getRootClassLoader();
for (int i = split.length-1; i > 1; i++) {
String className = concatDotSeparated(split, 0, i);
if (className.endsWith("!")) {
className = className.substring(0, className.length()-1);
}
if (findClass(className, cl) != null) {
return new String[] { className, concatDotSeparated(split, i, split.length) };
}
}
return null;
}
private String concatDotSeparated(String[] parts, int start, int end) {
StringBuilder sb = new StringBuilder( parts[start] );
for (int i = start+1; i < end; i++) {
sb.append(".").append(parts[i]);
}
return sb.toString();
}
protected void processConnectiveDescr( StringBuilder sbuilder,
BaseDescr base,
ConstraintConnectiveDescr parent,
int parentPriority,
boolean isInsideRelCons,
MVELDumperContext context ) {
ConstraintConnectiveDescr ccd = (ConstraintConnectiveDescr) base;
boolean wrapParenthesis = parentPriority > ccd.getConnective().getPrecedence();
if ( wrapParenthesis ) {
sbuilder.append( "( " );
}
boolean first = true;
List<BaseDescr> descrs = new ArrayList<BaseDescr>( ccd.getDescrs() );
for ( BaseDescr constr : descrs ) {
if ( !( constr instanceof BindingDescr ) ) {
if ( first ) {
first = false;
} else {
sbuilder.append( " " );
sbuilder.append( ccd.getConnective().toString() );
sbuilder.append( " " );
}
}
context.incOpenCcd();
dump( sbuilder,
constr,
ccd,
ccd.getDescrs().indexOf( constr ),
ccd.getConnective().getPrecedence(),
isInsideRelCons,
context );
context.decOpenCcd();
}
if( first ) {
// means all children were actually only bindings, replace by just true
sbuilder.append( "true" );
}
if ( wrapParenthesis ) {
sbuilder.append( " )" );
}
}
public void processRestriction( MVELDumperContext context,
StringBuilder sbuilder,
String left,
OperatorDescr operator,
String right ) {
Operator op = Operator.determineOperator( operator.getOperator(),
operator.isNegated() );
if ( op == Operator.determineOperator( "memberOf",
operator.isNegated() ) ) {
int lastAndPos = left.lastIndexOf("&&");
if ( lastAndPos > 0 ) {
sbuilder.append( left.substring(0, lastAndPos).trim() ).append( " && " );
left = left.substring(lastAndPos + 2).trim();
}
sbuilder.append( evaluatorPrefix( operator.isNegated() ) )
.append( right )
.append( " contains " )
.append( left )
.append( evaluatorSufix( operator.isNegated() ) );
} else if ( op == Operator.determineOperator( "contains",
operator.isNegated() ) ) {
sbuilder.append( evaluatorPrefix( operator.isNegated() ) )
.append( left )
.append( " contains " )
.append( right )
.append( evaluatorSufix( operator.isNegated() ) );
} else if ( op == Operator.determineOperator( "excludes",
operator.isNegated() ) ) {
sbuilder.append( evaluatorPrefix( !operator.isNegated() ) )
.append( left )
.append( " contains " )
.append( right )
.append( evaluatorSufix( !operator.isNegated() ) );
} else if ( op == Operator.determineOperator( "matches",
operator.isNegated() ) ) {
sbuilder.append( evaluatorPrefix( operator.isNegated() ) )
.append( left )
.append( " ~= " )
.append( right )
.append( evaluatorSufix( operator.isNegated() ) );
} else if ( lookupBasicOperator( operator.getOperator() ) ) {
if (operator.getOperator().equals("instanceof")) {
context.addInferredCast(left, right);
}
rewriteBasicOperator( sbuilder, left, operator, right );
} else {
// rewrite operator as a function call
rewriteOperator( context, sbuilder, left, operator, right );
}
}
protected void rewriteBasicOperator( StringBuilder sbuilder,
String left,
OperatorDescr operator,
String right) {
sbuilder.append( evaluatorPrefix( operator.isNegated() ) )
.append( left )
.append( " " )
.append( operator.getOperator() )
.append( " " )
.append( right )
.append( evaluatorSufix( operator.isNegated() ) );
}
protected boolean lookupBasicOperator( String op ) {
return Arrays.binarySearch( standard, op ) >= 0;
}
protected void rewriteOperator( MVELDumperContext context,
StringBuilder sbuilder,
String left,
OperatorDescr operator,
String right ) {
String alias = context.createAlias( operator );
operator.setLeftString( left );
operator.setRightString( right );
sbuilder.append( evaluatorPrefix( operator.isNegated() ) )
.append( alias )
.append( ".evaluate( " )
.append( left )
.append( ", " )
.append( right )
.append( " )" )
.append( evaluatorSufix( operator.isNegated() ) );
}
protected String evaluatorPrefix(final boolean isNegated) {
if ( isNegated ) {
return "!( ";
}
return "";
}
protected String evaluatorSufix(final boolean isNegated) {
if ( isNegated ) {
return " )";
}
return "";
}
protected MVELDumperContext createContext() {
return new MVELDumperContext();
}
public Class<?> getEvaluatorWrapperClass() {
return EvaluatorWrapper.class;
}
public static class MVELDumperContext {
protected Map<String, OperatorDescr> aliases;
protected int counter;
protected List<BindingDescr> bindings;
private RuleBuildContext ruleContext;
private Map<String, String> inferredCasts;
private int openCcd;
private boolean inXpath;
public MVELDumperContext() {
this.aliases = new HashMap<String, OperatorDescr>();
this.counter = 0;
this.bindings = null;
this.openCcd = 0;
}
public void clear() {
this.aliases.clear();
this.counter = 0;
this.bindings = null;
this.openCcd = 0;
}
public void addInferredCast(String var, String cast) {
if (inferredCasts == null) {
inferredCasts = new HashMap<String, String>();
}
inferredCasts.put(var, cast);
}
public Map.Entry<String, String> getInferredCast(String expr) {
if (inferredCasts != null) {
for (Map.Entry<String, String> entry : inferredCasts.entrySet()) {
if (expr.matches(entry.getKey() + "\\s*\\..+")) {
return entry;
}
}
}
return null;
}
/**
* @return the aliases
*/
public Map<String, OperatorDescr> getAliases() {
return aliases;
}
/**
* @param aliases the aliases to set
*/
public void setAliases( Map<String, OperatorDescr> aliases ) {
this.aliases = aliases;
}
/**
* Creates a new alias for the operator, setting it in the descriptor
* class, adding it to the internal Map and returning it as a String
*/
public String createAlias( OperatorDescr operator ) {
String alias = operator.getOperator() + counter++;
operator.setAlias(alias);
this.aliases.put( alias,
operator );
return alias;
}
/**
* Adds a binding to the list of bindings on this context
*/
public void addBinding( BindingDescr bind ) {
if( this.bindings == null ) {
this.bindings = new ArrayList<BindingDescr>();
}
this.bindings.add( bind );
}
@SuppressWarnings("unchecked")
public List<BindingDescr> getBindings() {
return this.bindings == null ? Collections.EMPTY_LIST : this.bindings;
}
public RuleBuildContext getRuleContext() {
return ruleContext;
}
public MVELDumperContext setRuleContext(RuleBuildContext ruleContext) {
this.ruleContext = ruleContext;
return this;
}
public void incOpenCcd() {
openCcd++;
}
public void decOpenCcd() {
openCcd--;
}
public boolean isCcdNested() {
return openCcd > 0;
}
public boolean isInXpath() {
return inXpath;
}
public void setInXpath( boolean inXpath ) {
this.inXpath = inXpath;
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Author: max
* Date: Oct 9, 2001
* Time: 8:43:17 PM
*/
package com.intellij.codeInspection.ex;
import com.intellij.codeInspection.*;
import com.intellij.icons.AllIcons;
import com.intellij.ide.impl.ContentManagerWatcher;
import com.intellij.ide.ui.search.SearchableOptionsRegistrar;
import com.intellij.lang.Language;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.profile.codeInspection.ui.header.InspectionToolsConfigurable;
import com.intellij.psi.PsiElement;
import com.intellij.ui.content.ContentFactory;
import com.intellij.ui.content.ContentManager;
import com.intellij.ui.content.TabbedPaneContentUI;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import jakarta.inject.Singleton;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.TestOnly;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import jakarta.inject.Inject;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
@Singleton
public class InspectionManagerEx extends InspectionManagerBase {
private static final Pattern HTML_PATTERN = Pattern.compile("<[^<>]*>");
private final NotNullLazyValue<ContentManager> myContentManager;
private final Set<GlobalInspectionContextImpl> myRunningContexts = new HashSet<GlobalInspectionContextImpl>();
private final AtomicBoolean myToolsAreInitialized = new AtomicBoolean(false);
private GlobalInspectionContextImpl myGlobalInspectionContext;
@Inject
public InspectionManagerEx(final Project project) {
super(project);
if (ApplicationManager.getApplication().isHeadlessEnvironment()) {
myContentManager = new NotNullLazyValue<ContentManager>() {
@Nonnull
@Override
protected ContentManager compute() {
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project);
toolWindowManager.registerToolWindow(ToolWindowId.INSPECTION, true, ToolWindowAnchor.BOTTOM, project);
return ContentFactory.getInstance().createContentManager(new TabbedPaneContentUI(), true, project);
}
};
}
else {
myContentManager = new NotNullLazyValue<ContentManager>() {
@Nonnull
@Override
protected ContentManager compute() {
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project);
ToolWindow toolWindow =
toolWindowManager.registerToolWindow(ToolWindowId.INSPECTION, true, ToolWindowAnchor.BOTTOM, project);
ContentManager contentManager = toolWindow.getContentManager();
toolWindow.setIcon(AllIcons.Toolwindows.ToolWindowInspection);
new ContentManagerWatcher(toolWindow, contentManager);
return contentManager;
}
};
}
}
@Nullable
public static SuppressIntentionAction[] getSuppressActions(@Nonnull InspectionToolWrapper toolWrapper) {
final InspectionProfileEntry tool = toolWrapper.getTool();
if (tool instanceof CustomSuppressableInspectionTool) {
return ((CustomSuppressableInspectionTool)tool).getSuppressActions(null);
}
final List<LocalQuickFix> actions = new ArrayList<LocalQuickFix>(Arrays.asList(tool.getBatchSuppressActions(null)));
if (actions.isEmpty()) {
final Language language = Language.findLanguageByID(toolWrapper.getLanguage());
if (language != null) {
final List<InspectionSuppressor> suppressors = LanguageInspectionSuppressors.INSTANCE.allForLanguage(language);
for (InspectionSuppressor suppressor : suppressors) {
final SuppressQuickFix[] suppressActions = suppressor.getSuppressActions(null, tool.getShortName());
Collections.addAll(actions, suppressActions);
}
}
}
return ContainerUtil.map2Array(actions, SuppressIntentionAction.class, new Function<LocalQuickFix, SuppressIntentionAction>() {
@Override
public SuppressIntentionAction fun(final LocalQuickFix fix) {
return SuppressIntentionActionFromFix.convertBatchToSuppressIntentionAction((SuppressQuickFix)fix);
}
});
}
private static void processText(@Nonnull @NonNls String descriptionText,
@Nonnull InspectionToolWrapper tool,
@Nonnull SearchableOptionsRegistrar myOptionsRegistrar) {
if (ApplicationManager.getApplication().isDisposed()) return;
final Set<String> words = myOptionsRegistrar.getProcessedWordsWithoutStemming(descriptionText);
for (String word : words) {
myOptionsRegistrar.addOption(word, tool.getShortName(), tool.getDisplayName(), InspectionToolsConfigurable.ID, InspectionToolsConfigurable.DISPLAY_NAME);
}
}
@Nonnull
public ProblemDescriptor createProblemDescriptor(@Nonnull final PsiElement psiElement,
@Nonnull final String descriptionTemplate,
@Nonnull final ProblemHighlightType highlightType,
@Nullable final HintAction hintAction,
boolean onTheFly,
final LocalQuickFix... fixes) {
return new ProblemDescriptorImpl(psiElement, psiElement, descriptionTemplate, fixes, highlightType, false, null, hintAction, onTheFly);
}
@Override
@Nonnull
public GlobalInspectionContextImpl createNewGlobalContext(boolean reuse) {
final GlobalInspectionContextImpl inspectionContext;
if (reuse) {
if (myGlobalInspectionContext == null) {
myGlobalInspectionContext = inspectionContext = new GlobalInspectionContextImpl(getProject(), myContentManager);
}
else {
inspectionContext = myGlobalInspectionContext;
}
}
else {
inspectionContext = new GlobalInspectionContextImpl(getProject(), myContentManager);
}
myRunningContexts.add(inspectionContext);
return inspectionContext;
}
public void setProfile(final String name) {
myCurrentProfileName = name;
}
public void closeRunningContext(GlobalInspectionContextImpl globalInspectionContext){
myRunningContexts.remove(globalInspectionContext);
}
@Nonnull
public Set<GlobalInspectionContextImpl> getRunningContexts() {
return myRunningContexts;
}
@Nonnull
@Deprecated
public ProblemDescriptor createProblemDescriptor(@Nonnull final PsiElement psiElement,
@Nonnull final String descriptionTemplate,
@Nonnull final ProblemHighlightType highlightType,
@Nullable final HintAction hintAction,
final LocalQuickFix... fixes) {
return new ProblemDescriptorImpl(psiElement, psiElement, descriptionTemplate, fixes, highlightType, false, null, hintAction, true);
}
@TestOnly
public NotNullLazyValue<ContentManager> getContentManager() {
return myContentManager;
}
public void buildInspectionSearchIndexIfNecessary() {
if (!myToolsAreInitialized.getAndSet(true)) {
final SearchableOptionsRegistrar myOptionsRegistrar = SearchableOptionsRegistrar.getInstance();
final InspectionToolRegistrar toolRegistrar = InspectionToolRegistrar.getInstance();
final Application app = ApplicationManager.getApplication();
if (app.isUnitTestMode() || app.isHeadlessEnvironment()) return;
app.executeOnPooledThread(new Runnable(){
@Override
public void run() {
List<InspectionToolWrapper> tools = toolRegistrar.createTools();
for (InspectionToolWrapper toolWrapper : tools) {
processText(toolWrapper.getDisplayName().toLowerCase(), toolWrapper, myOptionsRegistrar);
final String description = toolWrapper.loadDescription();
if (description != null) {
@NonNls String descriptionText = HTML_PATTERN.matcher(description).replaceAll(" ");
processText(descriptionText, toolWrapper, myOptionsRegistrar);
}
}
}
});
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avalon.excalibur.pool;
import java.util.Iterator;
import java.util.LinkedList;
import org.apache.avalon.framework.activity.Disposable;
import org.apache.avalon.framework.logger.AbstractLogEnabled;
import org.apache.avalon.framework.logger.LogEnabled;
import org.apache.avalon.framework.thread.ThreadSafe;
import org.apache.excalibur.instrument.CounterInstrument;
import org.apache.excalibur.instrument.Instrument;
import org.apache.excalibur.instrument.Instrumentable;
import org.apache.excalibur.instrument.ValueInstrument;
/**
* General Pool implementation which supports; weak and strong pool size limits,
* optional blocking gets when poolables are not available, and automatic pool
* trimming of unused poolables.
* <p>
* Whenever get() is called, the pool tests to see whether it is time to trim old
* poolables from the pool. If any old poolables exist then they are removed at
* this time. This means that old poolables will not be removed if get() is never
* called. Applications can optionally call trim() to force old objects to be
* trimmed. See the {@link #trim()} method for details of how trimming works.
*
* @author <a href="mailto:dev@avalon.apache.org">Avalon Development Team</a>
* @version CVS $Revision: 1.1 $ $Date: 2004/03/29 16:50:37 $
* @since 4.1
*/
public class InstrumentedResourceLimitingPool
extends AbstractLogEnabled
implements Pool, LogEnabled, Disposable, ThreadSafe, Instrumentable
{
public static final String DEFAULT_INSTRUMENTABLE_NAME = "pool";
public static final String INSTRUMENT_SIZE_NAME = "size";
public static final String INSTRUMENT_READY_SIZE_NAME = "ready-size";
public static final String INSTRUMENT_GETS_NAME = "gets";
public static final String INSTRUMENT_PUTS_NAME = "puts";
public static final String INSTRUMENT_BLOCKS_NAME = "blocks";
public static final String INSTRUMENT_CREATES_NAME = "creates";
public static final String INSTRUMENT_DECOMMISSIONS_NAME = "decommissions";
/*---------------------------------------------------------------
* Protected Fields
*-------------------------------------------------------------*/
/**
* Object used to synchronize access to the get and put methods
*/
protected final Object m_semaphore = new Object();
/*---------------------------------------------------------------
* Private Fields
*-------------------------------------------------------------*/
/**
* Keeps track of whether or not the Pool has been disposed.
*/
private boolean m_disposed = false;
/**
* The Object Factory used to generate new Poolable instances for the pool.
*/
private final ObjectFactory m_factory;
/**
* The maximum size of the pool.
*/
private final int m_max;
/**
* Whether or not the pool allows for the creation of objects beyond the maximum pool size.
*/
private final boolean m_maxStrict;
/**
* Whether or not the pool should cause threads requesting a Poolable to block when m_maxStrict
* is true, the pool size is equal to m_max and there are no Poolable instances available.
*/
private final boolean m_blocking;
/**
* The maximum amount of time in milliseconds that the pool will block. If 0, blocking will
* wait indeffinately.
*/
private final long m_blockTimeout;
/**
* The minimum interval with which old unused poolables will be removed from the pool.
*/
private final long m_trimInterval;
/**
* The last time that the pool was trimmed.
*/
private long m_lastTrim;
/**
* List of the Poolable instances which are available for use.
*/
private LinkedList m_ready;
/**
* Store the size of the ready list to optimize operations which require this value.
*/
private int m_readySize;
/**
* List of the Poolable instance which are available for use but have been idle for a while.
*/
private LinkedList m_oldReady;
/**
* Store the size of the old ready list to optimize operations which require this value.
*/
private int m_oldReadySize;
/**
* Total number of Poolable instances in the pool
*/
private int m_size;
/** Instrumentable Name assigned to this Instrumentable */
private String m_instrumentableName = DEFAULT_INSTRUMENTABLE_NAME;
/** Instrument used to profile the size of the pool. */
private ValueInstrument m_sizeInstrument;
/** Instrument used to profile the number of available poolables. */
private ValueInstrument m_readySizeInstrument;
/** Instrument used to profile the number of gets. */
private CounterInstrument m_getsInstrument;
/** Instrument used to profile the number of puts. */
private CounterInstrument m_putsInstrument;
/** Instrument used to profile the number of blocks. */
private CounterInstrument m_blocksInstrument;
/** Instrument used to profile the number of created poolables. */
private CounterInstrument m_createsInstrument;
/** Instrument used to profile the number of decommissioned poolables. */
private CounterInstrument m_decommissionsInstrument;
/*---------------------------------------------------------------
* Constructors
*-------------------------------------------------------------*/
/**
* Creates a new InstrumentedResourceLimitingPool
*
* @param factory The ObjectFactory which will be used to create new Poolables as needed by
* the pool.
* @param max Maximum number of Poolables which can be stored in the pool, 0 implies no limit.
* @param maxStrict true if the pool should never allow more than max Poolable to be created.
* Will cause an exception to be thrown if more than max Poolables are requested and blocking
* is false.
* @param blocking true if the pool should cause a thread calling get() to block when Poolables
* are not currently available in the pool.
* @param blockTimeout The maximum amount of time, in milliseconds, that a call to get() will
* block before an exception is thrown. A value of 0 implies an indefinate wait.
* @param trimInterval The minimum interval with which old unused poolables will be removed
* from the pool. A value of 0 will cause the pool to never trim poolables.
*/
public InstrumentedResourceLimitingPool( final ObjectFactory factory,
int max,
boolean maxStrict,
boolean blocking,
long blockTimeout,
long trimInterval )
{
m_factory = factory;
m_max = ( max <= 0 ? Integer.MAX_VALUE : max );
m_maxStrict = maxStrict;
m_blocking = blocking;
m_blockTimeout = blockTimeout;
m_trimInterval = trimInterval;
// Create the pool lists.
m_ready = new LinkedList();
if( m_trimInterval > 0 )
{
m_oldReady = new LinkedList();
}
// Initialize the Instrumentable elements.
m_sizeInstrument = new ValueInstrument( INSTRUMENT_SIZE_NAME );
m_readySizeInstrument = new ValueInstrument( INSTRUMENT_READY_SIZE_NAME );
m_getsInstrument = new CounterInstrument( INSTRUMENT_GETS_NAME );
m_putsInstrument = new CounterInstrument( INSTRUMENT_PUTS_NAME );
m_blocksInstrument = new CounterInstrument( INSTRUMENT_BLOCKS_NAME );
m_createsInstrument = new CounterInstrument( INSTRUMENT_CREATES_NAME );
m_decommissionsInstrument = new CounterInstrument( INSTRUMENT_DECOMMISSIONS_NAME );
}
/*---------------------------------------------------------------
* Pool Methods
*-------------------------------------------------------------*/
/**
* Gets a Poolable from the pool. If there is room in the pool, a new Poolable will be
* created. Depending on the parameters to the constructor, the method may block or throw
* an exception if a Poolable is not available on the pool.
*
* @return Always returns a Poolable. Contract requires that put must always be called with
* the Poolable returned.
* @throws Exception An exception may be thrown as described above or if there is an exception
* thrown by the ObjectFactory's newInstance() method.
*/
public Poolable get() throws Exception
{
if( m_disposed ) throw new IllegalStateException( "Already Disposed" );
Poolable poolable;
int readySize;
synchronized( m_semaphore )
{
// If trimming is enabled then trim if it is time
if( ( m_oldReady != null ) &&
( System.currentTimeMillis() - m_lastTrim >= m_trimInterval ) )
{
trimInner();
}
// Look for a Poolable at the end of the m_ready list
if( m_readySize > 0 )
{
// A poolable is ready and waiting in the pool
poolable = (Poolable)m_ready.removeLast();
m_readySize--;
}
else if( m_oldReadySize > 0 )
{
// An old poolable is ready and waiting in the pool
poolable = (Poolable)m_oldReady.removeLast();
m_oldReadySize--;
}
else
{
// Are we allowed to create a new poolable here?
if( ( m_size >= m_max ) && m_maxStrict )
{
// The pool has as many active Poolables as it is allowed and
// we are not allowed to create any more.
// Are we allowed to wait for a Poolable to become available?
if( m_blocking )
{
long blockStart = System.currentTimeMillis();
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Blocking until a Poolable is available. "
+ "Thread: " + Thread.currentThread().getName() );
}
// Notify the InstrumentManager
m_blocksInstrument.increment();
if( m_blockTimeout > 0 )
{
// Wait for a limited amount of time for a poolable is made
// available.
// Other threads may grab a connection before this thread gets the
// semaphore, so be careful.
long blockWait = m_blockTimeout;
do
{
if( blockWait > 0 )
{
try
{
m_semaphore.wait( blockWait );
}
catch( InterruptedException e )
{
}
// The dispose() method might have woken us up.
if( m_disposed )
{
throw new IllegalStateException( "Already Disposed" );
}
if( m_readySize == 0 )
{
// Not available yet, calculate how much longer to wait.
long now = System.currentTimeMillis();
blockWait = m_blockTimeout - ( now - blockStart );
}
}
else
{
// We timed out waiting.
long now = System.currentTimeMillis();
if( getLogger().isDebugEnabled() )
{
getLogger().debug(
"Timed out waiting for a Poolable to become "
+ "available. Blocked for " + ( now - blockStart )
+ "ms. Thread: " + Thread.currentThread().getName() );
}
throw new Exception
( "Could not create enough Components to service your "
+ "request (Timed out)." );
}
} while( m_readySize == 0 );
}
else
{
// Wait until we get a poolable no matter how long it takes.
// Other threads may grab a connection before this thread gets the
// semaphore, so be careful.
do
{
try
{
m_semaphore.wait();
}
catch( InterruptedException e )
{
}
// The dispose() method might have woken us up.
if( m_disposed )
{
throw new IllegalStateException( "Already Disposed" );
}
} while( m_readySize == 0 );
}
// A poolable is ready and waiting in the pool
poolable = (Poolable)m_ready.removeLast();
m_readySize--;
if( getLogger().isDebugEnabled() )
{
long now = System.currentTimeMillis();
getLogger().debug( "Blocked for " + ( now - blockStart ) + "ms "
+ "waiting for a Poolable to become available. "
+ "Thread: " + Thread.currentThread().getName() );
}
}
else
{
// We must fail.
throw new Exception
( "Could not create enough Components to service your request." );
}
}
else
{
// Create a new poolable. May throw an exception if the poolable can not be
// instantiated.
poolable = newPoolable();
m_size++;
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Created a new " + poolable.getClass().getName()
+ " from the object factory." );
}
}
}
readySize = getReadySizeSync();
}
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Got a " + poolable.getClass().getName() + " from the pool." );
}
// Notify the InstrumentManager
m_getsInstrument.increment();
m_readySizeInstrument.setValue( readySize );
return poolable;
}
/**
* Returns a poolable to the pool and notifies any thread blocking.
*
* @param poolable Poolable to return to the pool.
*/
public void put( Poolable poolable )
{
// Handle Recyclable objects
if( poolable instanceof Recyclable )
{
( (Recyclable)poolable ).recycle();
}
int readySize;
synchronized( m_semaphore )
{
if( m_size <= m_max )
{
if( m_disposed )
{
// The pool has already been disposed.
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Put called for a " + poolable.getClass().getName()
+ " after the pool was disposed." );
}
permanentlyRemovePoolable( poolable );
}
else
{
// There is room in the pool to keep this poolable.
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Put a " + poolable.getClass().getName()
+ " back into the pool." );
}
m_ready.addLast( poolable );
m_readySize++;
// Let any waiting threads know that a poolable has become available.
if( m_blocking )
{
m_semaphore.notify();
}
}
}
else
{
// More Poolables were created than can be held in the pool, so remove.
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "No room to put a " + poolable.getClass().getName()
+ " back into the pool, so remove it." );
}
permanentlyRemovePoolable( poolable );
}
readySize = getReadySizeSync();
}
// Notify the InstrumentManager
m_putsInstrument.increment();
m_readySizeInstrument.setValue( readySize );
}
/*---------------------------------------------------------------
* Disposable Methods
*-------------------------------------------------------------*/
/**
* The dispose operation is called at the end of a components lifecycle.
* This method will be called after Startable.stop() method (if implemented
* by component). Components use this method to release and destroy any
* resources that the Component owns.
*/
public void dispose()
{
m_disposed = true;
// Any Poolables in the m_ready list need to be disposed of
int size;
int readySize;
synchronized( m_semaphore )
{
// Remove objects in the ready list.
for( Iterator iter = m_ready.iterator(); iter.hasNext(); )
{
Poolable poolable = (Poolable)iter.next();
iter.remove();
m_readySize--;
permanentlyRemovePoolable( poolable );
}
// Remove objects in the old ready list.
if( m_oldReady != null )
{
for( Iterator iter = m_oldReady.iterator(); iter.hasNext(); )
{
Poolable poolable = (Poolable)iter.next();
iter.remove();
m_oldReadySize--;
permanentlyRemovePoolable( poolable );
}
}
// Notify any threads currently waiting for objects so they can abort
if( m_blocking )
{
m_semaphore.notifyAll();
}
if( ( m_size > 0 ) && getLogger().isDebugEnabled() )
{
getLogger().debug( "There were " + m_size
+ " outstanding objects when the pool was disposed." );
}
size = getSize();
readySize = getReadySizeSync();
}
// Notify the InstrumentManager
m_sizeInstrument.setValue( size );
m_readySizeInstrument.setValue( readySize );
}
/*---------------------------------------------------------------
* Instrumentable Methods
*-------------------------------------------------------------*/
/**
* Sets the name for the Instrumentable. The Instrumentable Name is used
* to uniquely identify the Instrumentable during the configuration of
* the InstrumentManager and to gain access to an InstrumentableDescriptor
* through the InstrumentManager. The value should be a string which does
* not contain spaces or periods.
* <p>
* This value may be set by a parent Instrumentable, or by the
* InstrumentManager using the value of the 'instrumentable' attribute in
* the configuration of the component.
*
* @param name The name used to identify a Instrumentable.
*/
public void setInstrumentableName( String name )
{
m_instrumentableName = name;
}
/**
* Gets the name of the Instrumentable.
*
* @return The name used to identify a Instrumentable.
*/
public String getInstrumentableName()
{
return m_instrumentableName;
}
/**
* Obtain a reference to all the Instruments that the Instrumentable object
* wishes to expose. All sampling is done directly through the
* Instruments as opposed to the Instrumentable interface.
*
* @return An array of the Instruments available for profiling. Should
* never be null. If there are no Instruments, then
* EMPTY_INSTRUMENT_ARRAY can be returned. This should never be
* the case though unless there are child Instrumentables with
* Instruments.
*/
public Instrument[] getInstruments()
{
return new Instrument[]
{
m_sizeInstrument,
m_readySizeInstrument,
m_getsInstrument,
m_putsInstrument,
m_blocksInstrument,
m_createsInstrument,
m_decommissionsInstrument
};
}
/**
* Any Object which implements Instrumentable can also make use of other
* Instrumentable child objects. This method is used to tell the
* InstrumentManager about them.
*
* @return An array of child Instrumentables. This method should never
* return null. If there are no child Instrumentables, then
* EMPTY_INSTRUMENTABLE_ARRAY can be returned.
*/
public Instrumentable[] getChildInstrumentables()
{
return Instrumentable.EMPTY_INSTRUMENTABLE_ARRAY;
}
/*---------------------------------------------------------------
* Methods
*-------------------------------------------------------------*/
/**
* Permanently removes a poolable from the pool's active list and
* destroys it so that it will not ever be reused.
* <p>
* This method is only called by threads that have m_semaphore locked.
*/
protected void permanentlyRemovePoolable( Poolable poolable )
{
m_size--;
removePoolable( poolable );
}
/**
* Returns the total number of Poolables created by the pool. Includes active and ready.
*
* @return The total size.
*/
public int getSize()
{
return m_size;
}
/**
* Returns the number of available Poolables waiting in the pool.
* Only called when synchronized.
*
* @return The ready size.
*/
private int getReadySizeSync()
{
return m_readySize + m_oldReadySize;
}
/**
* Returns the number of available Poolables waiting in the pool.
*
* @return The ready size.
*/
public int getReadySize()
{
synchronized( m_semaphore )
{
return getReadySizeSync();
}
}
/**
* Create a new poolable instance by by calling the newInstance method
* on the pool's ObjectFactory.
* <p>
* This is the method to override when you need to enforce creational
* policies.
* <p>
* This method is only called by threads that have m_semaphore locked.
*/
protected Poolable newPoolable() throws Exception
{
Object obj = m_factory.newInstance();
// Notify the InstrumentManager
m_createsInstrument.increment();
// The size is incremented after this call in case an error is thrown.
m_sizeInstrument.setValue( getSize() + 1 );
return (Poolable)obj;
}
/**
* Called when an object is being removed permanently from the pool.
* This is the method to override when you need to enforce destructional
* policies.
* <p>
* This method is only called by threads that have m_semaphore locked.
*
* @param poolable Poolable to be completely removed from the pool.
*/
protected void removePoolable( Poolable poolable )
{
try
{
m_factory.decommission( poolable );
// Notify the InstrumentManager
m_decommissionsInstrument.increment();
m_sizeInstrument.setValue( getSize() );
}
catch( Exception e )
{
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Error decommissioning object", e );
}
}
}
/**
* Forces the pool to trim, remove, old Poolables from the pool. If the Pool
* was created with a non-zero value for trimInterval, then this method will
* be called at that interval when get() is called. If get() is not called
* for long periods of time then if may be necessary to call this method
* manually.
* <p>
* Trimming is done by maintaining two lists of objects. The first is a ready list
* of new poolables. The second is a list of old poolables. Each time trim() is
* called, the contents of the old list are removed from the pool. Then the
* contents of the new list is moved into the old list.
* <p>
* Each time get() is called on the pool, the new list is checked first, then the
* old list is checked, finally a new poolable may be created if both lists are
* empty. Then whenever put() is called, the poolables are always returned to
* the new list. In this way, the need for maining time stamps for each poolable
* can be avoided while at the same time avoiding unnecessary removal and creation
* on poolables.
* <p>
* This works out to a poolable having a maximum idle time of two calls to trim() or
* twice the value of trimInterval.
* <p>
* NOTE - The trimming feature does not harm performance because pools with high
* load will not have old poolables to be trimmed, and the benefits to system
* resources from not keeping around unused poolables makes up for any hit.
*
* @return the number of Poolables that were trimmed.
*/
public int trim()
{
if( m_oldReady != null )
{
synchronized( m_semaphore )
{
return trimInner();
}
}
else
{
throw new IllegalStateException( "This pool is not configured to do trimming." );
}
}
/**
* See trim() for details.
*
* This method is only called by threads that have m_semaphore locked.
*/
private int trimInner()
{
int trimCount = 0;
// Remove any poolables in the m_oldReady list.
if( m_oldReadySize > 0 )
{
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Trimming " + m_oldReadySize + " idle objects from pool." );
}
trimCount = m_oldReadySize;
for( Iterator iter = m_oldReady.iterator(); iter.hasNext(); )
{
Poolable poolable = (Poolable)iter.next();
iter.remove();
m_oldReadySize--;
permanentlyRemovePoolable( poolable );
}
}
// Move the poolables in m_ready into m_oldReady (swap lists)
if( getLogger().isDebugEnabled() )
{
getLogger().debug( "Marking " + m_readySize + " objects as old in pool." );
}
LinkedList tempList = m_oldReady;
m_oldReady = m_ready;
m_oldReadySize = m_readySize;
m_ready = tempList;
m_readySize = 0;
m_lastTrim = System.currentTimeMillis();
return trimCount;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package hydra;
import com.gemstone.gemfire.LogWriter;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
/**
* Manages the VM running GFMonMgr and WindowTester.
*/
public class GFMonMgr {
protected static final String GFMON_DIR = "gfmon";
private static int PID = -1;
/**
* Starts the GFMon/WindowTester VM and waits for it to live.
*
* @throws HydraTimeoutException if the VM does not start within {@link
* GFMonPrms#maxStartupWaitSec} seconds.
*/
protected static void start() {
if (GFMonPrms.getTestClassName() != null)
{
// set preferences
log().info("Setting preferences for GFMon/WindowTester VM..." );
try {
setCurrentPreferences();
} catch (BackingStoreException e) {
throw new HydraRuntimeException("While setting preferences", e);
}
log().info("Set preferences for GFMon/WindowTester VM..." );
// start vm
log().info("Starting GFMon/WindowTester VM..." );
PID = Java.javaGFMon();
// wait for vm to start
int maxWaitSec = GFMonPrms.getMaxStartupWaitSec();
log().info("Waiting " + maxWaitSec + " seconds for process with pid="
+ PID + " to start...");
if (ProcessMgr.waitForLife(HostHelper.getLocalHost(), PID, maxWaitSec))
{
log().info("Started GFMon/WindowTester VM with pid=" + PID);
}
else
{
String s = "Failed to start GFMon/WindowTester VM with pid=" + PID
+ " within " + maxWaitSec + " seconds.";
throw new HydraTimeoutException(s);
}
}
}
/**
* Waits for the GFMon/WindowTester VM stop. Reports the test outcome.
*
* @throws HydraTimeoutException if the VM does not stop within {@link
* GFMonPrms#maxShutdownWaitSec} seconds.
*/
protected static void waitForStop() {
if (PID != -1)
{
log().info("Waiting for GFMon/WindowTester VM pid=" + PID
+ " to stop..." );
int maxWaitSec = GFMonPrms.getMaxShutdownWaitSec();
log().info("Waiting " + maxWaitSec + " seconds for process with pid="
+ PID + " to stop...");
if (ProcessMgr.waitForDeath(HostHelper.getLocalHost(), PID, maxWaitSec))
{
log().info("GFMon/WindowTester VM with pid=" + PID + " has stopped");
HostDescription hd = TestConfig.getInstance().getMasterDescription()
.getVmDescription().getHostDescription();
Nuker.getInstance().removePID(hd, PID);
String resultFileName = System.getProperty("user.dir") + File.separator
+ "wintest/" + GFMonPrms.getTestClassName()
+ "-result.xml";
parseXml(resultFileName, PID);
PID = -1;
}
else
{
String s = "GFMon/WindowTester VM with pid=" + PID
+ " failed to stop within " + maxWaitSec + " seconds.";
throw new HydraTimeoutException(s);
}
}
}
/**
* Set the current user preferences for this test run.
*/
private static void setCurrentPreferences()
throws BackingStoreException
{
String currDir = System.getProperty("user.dir");
// look up current user preferences, which needs forward slashes
String modDir = currDir.replace('\\', '/');
Preferences currentUser =
Preferences.userRoot().node("GemFire Monitor 2.0").node(modDir);
// override the log directory, which needs backslashes
currentUser.put("prefs_dir", currDir + File.separator + GFMON_DIR);
// make it write them out right away
currentUser.flush();
currentUser.sync();
log().info("Set current preferences: " + preferencesToString(currentUser));
}
/**
* Returns the preferences with key-value pairs as a string.
*/
private static String preferencesToString(Preferences prefs)
throws BackingStoreException
{
StringBuffer buf = new StringBuffer();
buf.append(prefs);
String[] keys = prefs.keys();
for (int i = 0; i < keys.length; i++) {
buf.append("\n" + keys[i] + "=" + prefs.get(keys[i], null));
}
return buf.toString();
}
private static LogWriter log() {
return Log.getLogWriter();
}
/**
* Parses the WindowTester XML result file and writes error files as needed.
*/
public static void parseXml(String fn, int pid) {
File f = new File(fn);
SAXParser parser = null;
try {
parser = SAXParserFactory.newInstance().newSAXParser();
} catch (javax.xml.parsers.ParserConfigurationException e) {
throw new HydraRuntimeException("While creating SAX parser", e);
} catch (SAXException e) {
throw new HydraRuntimeException("While creating SAX parser", e);
}
XMLHandler handler = new XMLHandler(pid);
try {
parser.parse(f, handler);
} catch (HydraRuntimeException e) {
throw new HydraRuntimeException("While parsing " + f, e);
} catch (SAXException e) {
throw new HydraRuntimeException("While parsing " + f, e);
} catch (java.io.IOException e) {
throw new HydraRuntimeException("While parsing " + f, e);
}
}
static class XMLHandler extends DefaultHandler {
private List stack = null; // for accumulating stack traces
int pid = -1; // pid of windowtester/gfmon vm
public XMLHandler(int pid) {
this.pid = pid;
}
public InputSource resolveEntity(String publicId, String systemId)
throws SAXException {
return this.resolveEntity(publicId, systemId);
}
public void setDocumentLocator(Locator locator) {
}
public void startDocument() throws SAXException {
}
public void endDocument() throws SAXException {
}
public void startPrefixMapping(String prefix, String uri)
throws SAXException {
}
public void endPrefixMapping(String prefix) throws SAXException {
}
public void startError(Attributes attributes) {
if (stack == null) {
stack = new ArrayList();
}
for (int i = 0; i < attributes.getLength(); i++) {
String qName = attributes.getQName(i);
if (qName.equals("type")) {
String s = attributes.getValue(i);
stack.add(s + ": ");
}
}
for (int i = 0; i < attributes.getLength(); i++) {
String qName = attributes.getQName(i);
if (qName.equals("message")) {
String s = attributes.getValue(i);
stack.add(s + "\n\n");
}
}
}
public void startFailure(Attributes attributes) {
if (stack == null) {
stack = new ArrayList();
}
for (int i = 0; i < attributes.getLength(); i++) {
String qName = attributes.getQName(i);
if (qName.equals("type")) {
String s = attributes.getValue(i);
stack.add(s + ": ");
}
}
for (int i = 0; i < attributes.getLength(); i++) {
String qName = attributes.getQName(i);
if (qName.equals("message")) {
String s = attributes.getValue(i);
stack.add(s + "\n\n");
}
}
}
public void startTestSuite(Attributes attributes) {
String errors = attributes.getValue("errors");
String failures = attributes.getValue("failures");
String name = attributes.getValue("name");
String report = "WINDOWTESTER/GFMON REPORT for"
+ " " + name + ":"
+ " errors=" + errors
+ " failures=" + failures;
Log.getLogWriter().info(report);
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equals("testsuite")) {
startTestSuite(attributes);
} else if (qName.equals("error")) {
startError(attributes);
} else if (qName.equals("failure")) {
startFailure(attributes);
}
}
public void endElement(String uri, String localName, String qName)
throws SAXException {
if (stack != null) {
processStack(qName);
}
}
private void processStack(String qName) {
StringBuffer buf = new StringBuffer();
if (stack.size() == 0) {
buf.append("no information available");
} else {
for (Iterator i = stack.iterator(); i.hasNext();) {
buf.append((String)i.next());
}
}
if (qName.equals("error")) {
ResultLogger.reportErr(this.pid, buf.toString());
} else if (qName.equals("failure")) {
ResultLogger.reportHang(this.pid, buf.toString());
} else {
ResultLogger.reportHang(this.pid, qName + ": " + buf.toString());
}
stack = null;
}
public void characters(char[] ch, int start, int length)
throws SAXException {
if (stack != null) {
StringBuffer buf = new StringBuffer();
buf.append(ch, start, length);
stack.add(buf.toString());
}
}
public void ignorableWhitespace(char[] ch, int start, int length)
throws SAXException {
}
public void processingInstruction(String target, String data)
throws SAXException {
}
public void skippedEntity(String name) throws SAXException {
}
public void warning(SAXParseException e) throws SAXException {
Log.getLogWriter().warning("While parsing", e);
}
public void error(SAXParseException e) throws SAXException {
throw e;
}
public void fatalError(SAXParseException e) throws SAXException {
throw e;
}
}
}
| |
package org.jgroups.stack;
import org.jgroups.Address;
import org.jgroups.PhysicalAddress;
import org.jgroups.protocols.PingData;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.jmx.JmxConfigurator;
import org.jgroups.logging.Log;
import org.jgroups.logging.LogFactory;
import org.jgroups.util.*;
import org.jgroups.util.UUID;
import javax.management.MBeanServer;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Router for TCP based group comunication (using layer TCP instead of UDP). Instead of the TCP
* layer sending packets point-to-point to each other member, it sends the packet to the router
* which - depending on the target address - multicasts or unicasts it to the group / or single member.
* <p/>
* This class is especially interesting for applets which cannot directly make connections (neither
* UDP nor TCP) to a host different from the one they were loaded from. Therefore, an applet would
* create a normal channel plus protocol stack, but the bottom layer would have to be the TCP layer
* which sends all packets point-to-point (over a TCP connection) to the router, which in turn
* forwards them to their end location(s) (also over TCP). A centralized router would therefore have
* to be running on the host the applet was loaded from.
* <p/>
* An alternative for running JGroups in an applet (IP multicast is not allows in applets as of
* 1.2), is to use point-to-point UDP communication via the gossip server. However, then the appplet
* has to be signed which involves additional administrative effort on the part of the user.
* <p/>
* Note that a GossipRouter is also a good way of running JGroups in Amazon's EC2 environment which (as of summer 09)
* doesn't support IP multicasting.
* @author Bela Ban
* @author Vladimir Blagojevic
* @author Ovidiu Feodorov <ovidiuf@users.sourceforge.net>
* @since 2.1.1
*/
public class GossipRouter {
public static final byte CONNECT=1; // CONNECT(group, addr) --> local address
public static final byte DISCONNECT=2; // DISCONNECT(group, addr)
public static final byte GOSSIP_GET=4; // GET(group) --> List<addr> (members)
public static final byte MESSAGE=10;
public static final byte SUSPECT=11;
public static final byte PING=12;
public static final byte CLOSE=13;
public static final byte CONNECT_OK=14;
public static final byte OP_FAIL=15;
public static final byte DISCONNECT_OK=16;
public static final int PORT=12001;
@ManagedAttribute(description="server port on which the GossipRouter accepts client connections", writable=true)
private int port;
@ManagedAttribute(description="address to which the GossipRouter should bind", writable=true, name="bind_address")
private String bindAddressString;
@ManagedAttribute(description="time (in msecs) until gossip entry expires", writable=true)
private long expiryTime=0;
// Maintains associations between groups and their members
private final ConcurrentMap<String, ConcurrentMap<Address, ConnectionHandler>> routingTable=new ConcurrentHashMap<String, ConcurrentMap<Address, ConnectionHandler>>();
/**
* Store physical address(es) associated with a logical address. Used mainly by TCPGOSSIP
*/
private final Map<Address, Set<PhysicalAddress>> address_mappings=new ConcurrentHashMap<Address,Set<PhysicalAddress>>();
private ServerSocket srvSock=null;
private InetAddress bindAddress=null;
@Property(description="Time (in ms) for setting SO_LINGER on sockets returned from accept(). 0 means do not set SO_LINGER")
private long linger_timeout=2000L;
@Property(description="Time (in ms) for SO_TIMEOUT on sockets returned from accept(). 0 means don't set SO_TIMEOUT")
private long sock_read_timeout=0L;
@Property(description="The max queue size of backlogged connections")
private int backlog=1000;
private final AtomicBoolean running = new AtomicBoolean(false);
@ManagedAttribute(description="whether to discard message sent to self", writable=true)
private boolean discard_loopbacks=false;
protected List<ConnectionTearListener> connectionTearListeners=new CopyOnWriteArrayList<ConnectionTearListener>();
protected ThreadFactory default_thread_factory=new DefaultThreadFactory("gossip-handlers", true, true);
protected Timer timer=null;
protected final Log log=LogFactory.getLog(this.getClass());
private boolean jmx=false;
private boolean registered=false;
public GossipRouter() {
this(PORT);
}
public GossipRouter(int port) {
this(port, null);
}
public GossipRouter(int port, String bindAddressString) {
this(port,bindAddressString,false,0);
}
public GossipRouter(int port, String bindAddressString, boolean jmx) {
this(port, bindAddressString,jmx,0);
}
public GossipRouter(int port, String bindAddressString, boolean jmx, long expiryTime) {
this.port = port;
this.bindAddressString = bindAddressString;
this.jmx = jmx;
this.expiryTime = expiryTime;
this.connectionTearListeners.add(new FailureDetectionListener());
}
public void setPort(int port) {
this.port=port;
}
public int getPort() {
return port;
}
public void setBindAddress(String bindAddress) {
bindAddressString=bindAddress;
}
public String getBindAddress() {
return bindAddressString;
}
public int getBacklog() {
return backlog;
}
public void setBacklog(int backlog) {
this.backlog=backlog;
}
public void setExpiryTime(long expiryTime) {
this.expiryTime = expiryTime;
}
public long getExpiryTime() {
return expiryTime;
}
@ManagedAttribute(description="status")
public boolean isStarted() {
return isRunning();
}
public boolean isDiscardLoopbacks() {
return discard_loopbacks;
}
public void setDiscardLoopbacks(boolean discard_loopbacks) {
this.discard_loopbacks=discard_loopbacks;
}
public long getLingerTimeout() {
return linger_timeout;
}
public void setLingerTimeout(long linger_timeout) {
this.linger_timeout=linger_timeout;
}
public long getSocketReadTimeout() {
return sock_read_timeout;
}
public void setSocketReadTimeout(long sock_read_timeout) {
this.sock_read_timeout=sock_read_timeout;
}
public ThreadFactory getDefaultThreadPoolThreadFactory() {
return default_thread_factory;
}
public static String type2String(int type) {
switch (type) {
case CONNECT:
return "CONNECT";
case DISCONNECT:
return "DISCONNECT";
case GOSSIP_GET:
return "GOSSIP_GET";
case MESSAGE:
return "MESSAGE";
case SUSPECT:
return "SUSPECT";
case PING:
return "PING";
case CLOSE:
return "CLOSE";
case CONNECT_OK:
return "CONNECT_OK";
case DISCONNECT_OK:
return "DISCONNECT_OK";
case OP_FAIL:
return "OP_FAIL";
default:
return "unknown (" + type + ")";
}
}
/**
* Lifecycle operation. Called after create(). When this method is called, the managed attributes
* have already been set.<br>
* Brings the Router into a fully functional state.
*/
@ManagedOperation(description="Lifecycle operation. Called after create(). When this method is called, "
+ "the managed attributes have already been set. Brings the Router into a fully functional state.")
public void start() throws Exception {
if(running.compareAndSet(false, true)) {
if(jmx && !registered) {
MBeanServer server=Util.getMBeanServer();
JmxConfigurator.register(this, server, "jgroups:name=GossipRouter");
registered=true;
}
if(bindAddressString != null) {
bindAddress=InetAddress.getByName(bindAddressString);
srvSock=new ServerSocket(port, backlog, bindAddress);
}
else {
srvSock=new ServerSocket(port, backlog);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
GossipRouter.this.stop();
}
});
// start the main server thread
new Thread(new Runnable() {
public void run() {
mainLoop();
}
}, "GossipRouter").start();
long expiryTime = getExpiryTime();
if (expiryTime > 0) {
timer = new Timer(true);
timer.schedule(new TimerTask() {
public void run() {
sweep();
}
}, expiryTime, expiryTime);
}
} else {
throw new Exception("Router already started.");
}
}
/**
* Always called before destroy(). Close connections and frees resources.
*/
@ManagedOperation(description="Always called before destroy(). Closes connections and frees resources")
public void stop() {
clear();
if(running.compareAndSet(true, false)){
Util.close(srvSock);
if(log.isDebugEnabled())
log.debug("router stopped");
}
}
@ManagedOperation(description="Closes all connections and clears routing table (leave the server socket open)")
public void clear() {
if(running.get()) {
for(ConcurrentMap<Address,ConnectionHandler> map: routingTable.values()) {
for(ConnectionHandler ce: map.values())
ce.close();
}
routingTable.clear();
}
}
public void destroy() {
}
@ManagedAttribute(description="operational status", name="running")
public boolean isRunning() {
return running.get();
}
@ManagedOperation(description="dumps the contents of the routing table")
public String dumpRoutingTable() {
String label="routing";
StringBuilder sb=new StringBuilder();
if(routingTable.isEmpty()) {
sb.append("empty ").append(label).append(" table");
}
else {
boolean first=true;
for(Map.Entry<String, ConcurrentMap<Address, ConnectionHandler>> entry : routingTable.entrySet()) {
String gname=entry.getKey();
if(!first)
sb.append("\n");
else
first=false;
sb.append(gname + ": ");
Map<Address,ConnectionHandler> map=entry.getValue();
if(map == null || map.isEmpty()) {
sb.append("null");
}
else {
sb.append(Util.printListWithDelimiter(map.keySet(), ", "));
}
}
}
return sb.toString();
}
@ManagedOperation(description="dumps the contents of the routing table")
public String dumpRoutingTableDetailed() {
String label="routing";
StringBuilder sb=new StringBuilder();
if(routingTable.isEmpty()) {
sb.append("empty ").append(label).append(" table");
}
else {
boolean first=true;
for(Map.Entry<String, ConcurrentMap<Address, ConnectionHandler>> entry : routingTable.entrySet()) {
String gname=entry.getKey();
if(!first)
sb.append("\n");
else
first=false;
sb.append(gname + ":\n");
Map<Address,ConnectionHandler> map=entry.getValue();
if(map == null || map.isEmpty()) {
sb.append("null");
}
else {
for(Map.Entry<Address,ConnectionHandler> en: map.entrySet()) {
sb.append(en.getKey() + ": ");
ConnectionHandler handler=en.getValue();
sb.append("sock=" +handler.sock).append("\n");
}
}
sb.append("\n");
}
}
return sb.toString();
}
@ManagedOperation(description="dumps the mappings between logical and physical addresses")
public String dumpAddresssMappings() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,Set<PhysicalAddress>> entry: address_mappings.entrySet()) {
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
return sb.toString();
}
private void mainLoop() {
if(bindAddress == null)
bindAddress=srvSock.getInetAddress();
printStartupInfo();
while(isRunning()) {
Socket sock=null;
try {
sock=srvSock.accept();
if(linger_timeout > 0) {
int linger=Math.max(1, (int)(linger_timeout / 1000));
sock.setSoLinger(true, linger);
}
if(sock_read_timeout > 0)
sock.setSoTimeout((int)sock_read_timeout);
if(log.isDebugEnabled())
log.debug("Accepted connection, socket is " + sock);
ConnectionHandler ch=new ConnectionHandler(sock);
getDefaultThreadPoolThreadFactory().newThread(ch).start();
}
catch(IOException e) {
//only consider this exception if GR is not shutdown
if(isRunning()) {
log.error("failure handling connection from " + sock, e);
Util.close(sock);
}
}
}
}
/**
* Removes expired gossip entries (entries older than EXPIRY_TIME msec).
* @since 2.2.1
*/
private void sweep() {
long diff, currentTime = System.currentTimeMillis();
List <ConnectionHandler> victims = new ArrayList<ConnectionHandler>();
for (Iterator<Entry<String, ConcurrentMap<Address, ConnectionHandler>>> it = routingTable.entrySet().iterator(); it.hasNext();) {
Map<Address, ConnectionHandler> map = it.next().getValue();
if (map == null || map.isEmpty()) {
it.remove();
continue;
}
for (Iterator<Entry<Address, ConnectionHandler>> it2 = map.entrySet().iterator(); it2.hasNext();) {
ConnectionHandler ch = it2.next().getValue();
diff = currentTime - ch.timestamp;
if (diff > expiryTime) {
victims.add(ch);
}
}
}
for (ConnectionHandler v : victims) {
v.close();
}
}
private void route(Address dest, String group, byte[] msg) {
if(dest == null) { // send to all members in group
if(group == null) {
if(log.isErrorEnabled())
log.error("group is null");
}
else {
sendToAllMembersInGroup(group, msg);
}
}
else { // send unicast
ConnectionHandler handler=findAddressEntry(group, dest);
if(handler == null) {
if(log.isTraceEnabled())
log.trace("cannot find " + dest + " in the routing table, \nrouting table=\n" + dumpRoutingTable());
return;
}
if(handler.output == null) {
if(log.isErrorEnabled())
log.error(dest + " is associated with a null output stream");
return;
}
try {
sendToMember(dest, handler.output, msg);
}
catch(Exception e) {
if(log.isErrorEnabled())
log.error("failed sending message to " + dest + ": " + e.getMessage());
removeEntry(group, dest); // will close socket
}
}
}
private void removeEntry(String group, Address addr) {
// Remove from routing table
ConcurrentMap<Address, ConnectionHandler> map;
if(group != null) {
map=routingTable.get(group);
if(map != null && map.remove(addr) != null) {
if(log.isTraceEnabled())
log.trace("Removed " +addr + " from group " + group);
if(map.isEmpty()) {
boolean removed=removeGroupIfEmpty(group);
if(removed && log.isTraceEnabled())
log.trace("Removed group " + group);
}
}
}
else {
for(Map.Entry<String,ConcurrentMap<Address,ConnectionHandler>> entry: routingTable.entrySet()) {
map=entry.getValue();
if(map != null && map.remove(addr) != null && map.isEmpty()) {
boolean removed=removeGroupIfEmpty(entry.getKey());
if(removed && log.isTraceEnabled())
log.trace("Removed " + entry.getKey() + " from group " + group);
}
}
}
address_mappings.remove(addr);
UUID.remove(addr);
}
protected boolean removeGroupIfEmpty(String group) {
if(group == null)
return false;
synchronized(routingTable) {
ConcurrentMap<Address,ConnectionHandler> val=routingTable.get(group);
if(val != null && val.isEmpty()) {
routingTable.remove(group);
return true;
}
return false;
}
}
/**
* @return null if not found
*/
private ConnectionHandler findAddressEntry(String group, Address addr) {
if(group == null || addr == null)
return null;
ConcurrentMap<Address,ConnectionHandler> map=routingTable.get(group);
if(map == null)
return null;
return map.get(addr);
}
private void sendToAllMembersInGroup(String group, byte[] msg) {
final ConcurrentMap<Address,ConnectionHandler> map=routingTable.get(group);
if(map == null || map.isEmpty()) {
if(log.isWarnEnabled())
log.warn("didn't find any members for group " + group);
return;
}
synchronized(map) {
for(Map.Entry<Address,ConnectionHandler> entry: map.entrySet()) {
ConnectionHandler handler=entry.getValue();
DataOutputStream dos=handler.output;
if(dos != null) {
try {
sendToMember(null, dos, msg);
}
catch(Exception e) {
if(log.isWarnEnabled())
log.warn("cannot send to " + entry.getKey() + ": " + e.getMessage());
}
}
}
}
}
private static void sendToMember(Address dest, final DataOutputStream out, byte[] msg) throws Exception {
if(out == null)
return;
synchronized(out) {
GossipData request=new GossipData(GossipRouter.MESSAGE, null, dest, msg);
request.writeTo(out);
out.flush();
}
}
private void notifyAbnormalConnectionTear(final ConnectionHandler ch, final Exception e) {
for (ConnectionTearListener l : connectionTearListeners) {
l.connectionTorn(ch, e);
}
}
public interface ConnectionTearListener {
public void connectionTorn(ConnectionHandler ch, Exception e);
}
/*
* https://jira.jboss.org/jira/browse/JGRP-902
*/
class FailureDetectionListener implements ConnectionTearListener {
public void connectionTorn(ConnectionHandler ch, Exception e) {
Set<String> groups = ch.known_groups;
for (String group : groups) {
if(group == null)
continue;
Map<Address, ConnectionHandler> map = routingTable.get(group);
if (map != null && !map.isEmpty()) {
for (Iterator<Entry<Address, ConnectionHandler>> i = map.entrySet().iterator(); i.hasNext();) {
ConnectionHandler entry = i.next().getValue();
DataOutputStream stream = entry.output;
try {
for (Address a : ch.logical_addrs) {
GossipData suspect = new GossipData(GossipRouter.SUSPECT);
suspect.writeTo(stream);
Util.writeAddress(a, stream);
stream.flush();
}
} catch (Exception ioe) {
// intentionally ignored
}
}
}
}
}
}
/**
* Prints startup information.
*/
private void printStartupInfo() {
System.out.println("GossipRouter started at " + new Date());
System.out.print("Listening on port " + port);
System.out.println(" bound on address " + bindAddress);
System.out.print("Backlog is " + backlog);
System.out.print(", linger timeout is " + linger_timeout);
System.out.println(", and read timeout is " + sock_read_timeout);
}
/**
* Handles the requests from a client (RouterStub)
*/
class ConnectionHandler implements Runnable {
private final AtomicBoolean active = new AtomicBoolean(false);
private final Socket sock;
private final DataOutputStream output;
private final DataInputStream input;
private final List<Address> logical_addrs=new ArrayList<Address>();
Set<String> known_groups = new HashSet<String>();
private long timestamp;
public ConnectionHandler(Socket sock) throws IOException {
this.sock=sock;
this.input=new DataInputStream(sock.getInputStream());
this.output=new DataOutputStream(sock.getOutputStream());
}
void close() {
if(active.compareAndSet(true, false)) {
if(log.isDebugEnabled())
log.debug(this + " is being closed");
Util.close(input);
Util.close(output);
Util.close(sock);
for(Address addr: logical_addrs) {
removeEntry(null, addr);
}
}
}
public void run() {
if(active.compareAndSet(false, true)) {
try {
if(log.isDebugEnabled())
log.debug(this + " entering receive loop");
readLoop();
}
finally {
close();
}
}
}
public boolean isRunning() {
return active.get();
}
private void readLoop() {
while(isRunning()) {
GossipData request;
Address addr;
String group;
try {
request=new GossipData();
request.readFrom(input);
byte command=request.getType();
addr=request.getAddress();
group=request.getGroup();
known_groups.add(group);
timestamp = System.currentTimeMillis();
if(log.isTraceEnabled())
log.trace(this + " received " + request);
switch(command) {
case GossipRouter.CONNECT:
handleConnect(request, addr, group);
break;
case GossipRouter.PING:
// do nothing here - client doesn't expect response data
break;
case GossipRouter.MESSAGE:
if(request.buffer == null || request.buffer.length == 0) {
if(log.isWarnEnabled())
log.warn(this +" received null message");
break;
}
try {
route(addr, request.getGroup(), request.getBuffer());
}
catch(Exception e) {
if(log.isErrorEnabled())
log.error(this +" failed in routing request to " + addr, e);
}
break;
case GossipRouter.GOSSIP_GET:
Set<PhysicalAddress> physical_addrs;
List<PingData> mbrs=new ArrayList<PingData>();
ConcurrentMap<Address,ConnectionHandler> map=routingTable.get(group);
if(map != null) {
for(Address logical_addr: map.keySet()) {
physical_addrs=address_mappings.get(logical_addr);
PingData rsp=new PingData(logical_addr, null, true, UUID.get(logical_addr),
physical_addrs != null? new ArrayList<PhysicalAddress>(physical_addrs) : null);
mbrs.add(rsp);
}
}
output.writeShort(mbrs.size());
for(PingData data: mbrs)
data.writeTo(output);
output.flush();
if(log.isDebugEnabled())
log.debug(this + " responded to GOSSIP_GET with " + mbrs);
break;
case GossipRouter.DISCONNECT:
try {
removeEntry(group, addr);
sendData(new GossipData(DISCONNECT_OK));
if(log.isDebugEnabled())
log.debug(this + " disconnect completed");
}
catch(Exception e) {
sendData(new GossipData(OP_FAIL));
}
break;
case GossipRouter.CLOSE:
close();
break;
case -1: // EOF
notifyAbnormalConnectionTear(this, new EOFException("Connection broken"));
break;
}
if(log.isTraceEnabled())
log.trace(this + " processed " + request);
}
catch(SocketTimeoutException ste) {
}
catch(IOException ioex) {
notifyAbnormalConnectionTear(this, ioex);
break;
}
catch(Exception ex) {
if (active.get()) {
if (log.isWarnEnabled())
log.warn("Exception in ConnectionHandler thread", ex);
}
break;
}
}
}
private void handleConnect(GossipData request, Address addr, String group) throws Exception {
try {
checkExistingConnection(addr,group);
String logical_name = request.getLogicalName();
if (logical_name != null && addr instanceof org.jgroups.util.UUID)
org.jgroups.util.UUID.add(addr, logical_name);
// group name, logical address, logical name, physical addresses (could be null)
logical_addrs.add(addr); // allows us to remove the entries for this connection on socket close
addGroup(group, addr, this);
Set<PhysicalAddress> physical_addrs;
if (request.getPhysicalAddresses() != null) {
physical_addrs = address_mappings.get(addr);
if (physical_addrs == null) {
physical_addrs = new HashSet<PhysicalAddress>();
address_mappings.put(addr, physical_addrs);
}
physical_addrs.addAll(request.getPhysicalAddresses());
}
sendStatus(CONNECT_OK);
if(log.isDebugEnabled())
log.debug(this + " connection handshake completed, added " +addr + " to group "+ group);
} catch (Exception e) {
removeEntry(group, addr);
sendStatus(OP_FAIL);
throw new Exception("Unsuccessful connection setup handshake for " + this);
}
}
protected void addGroup(String group, Address addr, ConnectionHandler handler) {
if(group == null || handler == null)
return;
synchronized(routingTable) {
ConcurrentMap<Address,ConnectionHandler> map=routingTable.get(group);
if(map == null) {
map=new ConcurrentHashMap<Address, ConnectionHandler>();
routingTable.put(group, map);
}
map.put(addr, this);
}
}
private boolean checkExistingConnection(Address addr, String group) throws Exception {
boolean isOldExists = false;
if (address_mappings.containsKey(addr)) {
ConcurrentMap<Address, ConnectionHandler> map = null;
ConnectionHandler oldConnectionH = null;
if (group != null) {
map = routingTable.get(group);
if (map != null) {
oldConnectionH = map.get(addr);
}
} else {
for (Map.Entry<String, ConcurrentMap<Address, ConnectionHandler>> entry : routingTable
.entrySet()) {
map = entry.getValue();
if (map != null) {
oldConnectionH = map.get(addr);
}
}
}
if (oldConnectionH != null) {
isOldExists = true;
if (log.isDebugEnabled()) {
log.debug("Found old connection[" + oldConnectionH + "] for addr[" + addr
+ "]. Closing old connection ...");
}
oldConnectionH.close();
} else {
if (log.isDebugEnabled()) {
log.debug("No old connection for addr[" + addr + "] exists");
}
}
}
return isOldExists;
}
private void sendStatus(byte status) {
try {
output.writeByte(status);
output.flush();
} catch (IOException e1) {
//ignored
}
}
private void sendData(GossipData data) {
try {
data.writeTo(output);
output.flush();
} catch (Exception e1) {
//ignored
}
}
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append("ConnectionHandler[peer: " + sock.getInetAddress());
if(!logical_addrs.isEmpty())
sb.append(", logical_addrs: " + Util.printListWithDelimiter(logical_addrs, ", "));
sb.append("]");
return sb.toString();
}
}
public static void main(String[] args) throws Exception {
int port=12001;
int backlog=0;
long soLinger=-1;
long soTimeout=-1;
long expiry_time=60000;
GossipRouter router=null;
String bind_addr=null;
boolean jmx=true;
for(int i=0; i < args.length; i++) {
String arg=args[i];
if("-port".equals(arg)) {
port=Integer.parseInt(args[++i]);
continue;
}
if("-bindaddress".equals(arg) || "-bind_addr".equals(arg)) {
bind_addr=args[++i];
continue;
}
if("-backlog".equals(arg)) {
backlog=Integer.parseInt(args[++i]);
continue;
}
if("-expiry".equals(arg)) {
expiry_time=Long.parseLong(args[++i]);
continue;
}
if("-jmx".equals(arg)) {
jmx=Boolean.valueOf(args[++i]);
continue;
}
// this option is not used and should be deprecated/removed in a future release
if("-timeout".equals(arg)) {
System.out.println(" -timeout is deprecated and will be ignored");
++i;
continue;
}
// this option is not used and should be deprecated/removed in a future release
if("-rtimeout".equals(arg)) {
System.out.println(" -rtimeout is deprecated and will be ignored");
++i;
continue;
}
if("-solinger".equals(arg)) {
soLinger=Long.parseLong(args[++i]);
continue;
}
if("-sotimeout".equals(arg)) {
soTimeout=Long.parseLong(args[++i]);
continue;
}
help();
return;
}
System.out.println("GossipRouter is starting. CTRL-C to exit JVM");
try {
router=new GossipRouter(port, bind_addr, jmx);
if(backlog > 0)
router.setBacklog(backlog);
if(soTimeout >= 0)
router.setSocketReadTimeout(soTimeout);
if(soLinger >= 0)
router.setLingerTimeout(soLinger);
if(expiry_time > 0)
router.setExpiryTime(expiry_time);
router.start();
}
catch(Exception e) {
System.err.println(e);
}
}
static void help() {
System.out.println();
System.out.println("GossipRouter [-port <port>] [-bind_addr <address>] [options]");
System.out.println();
System.out.println("Options:");
System.out.println();
System.out.println(" -backlog <backlog> - Max queue size of backlogged connections. Must be");
System.out.println(" greater than zero or the default of 1000 will be");
System.out.println(" used.");
System.out.println();
System.out.println(" -jmx <true|false> - Expose attributes and operations via JMX.");
System.out.println();
System.out.println(" -solinger <msecs> - Time for setting SO_LINGER on connections. 0");
System.out.println(" means do not set SO_LINGER. Must be greater than");
System.out.println(" or equal to zero or the default of 2000 will be");
System.out.println(" used.");
System.out.println();
System.out.println(" -sotimeout <msecs> - Time for setting SO_TIMEOUT on connections. 0");
System.out.println(" means don't set SO_TIMEOUT. Must be greater than");
System.out.println(" or equal to zero or the default of 3000 will be");
System.out.println(" used.");
System.out.println();
System.out.println(" -expiry <msecs> - Time for closing idle connections. 0");
System.out.println(" means don't expire.");
System.out.println();
}
}
| |
package org.bouncycastle.x509.util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.security.Principal;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509CRL;
import java.security.cert.X509Certificate;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.security.auth.x500.X500Principal;
import org.bouncycastle.asn1.ASN1InputStream;
import org.bouncycastle.asn1.x509.Certificate;
import org.bouncycastle.asn1.x509.CertificatePair;
import org.bouncycastle.jce.X509LDAPCertStoreParameters;
import org.bouncycastle.jce.provider.X509AttrCertParser;
import org.bouncycastle.jce.provider.X509CRLParser;
import org.bouncycastle.jce.provider.X509CertPairParser;
import org.bouncycastle.jce.provider.X509CertParser;
import org.bouncycastle.util.StoreException;
import org.bouncycastle.x509.X509AttributeCertStoreSelector;
import org.bouncycastle.x509.X509AttributeCertificate;
import org.bouncycastle.x509.X509CRLStoreSelector;
import org.bouncycastle.x509.X509CertPairStoreSelector;
import org.bouncycastle.x509.X509CertStoreSelector;
import org.bouncycastle.x509.X509CertificatePair;
/**
* This is a general purpose implementation to get X.509 certificates, CRLs,
* attribute certificates and cross certificates from a LDAP location.
* <p/>
* At first a search is performed in the ldap*AttributeNames of the
* {@link org.bouncycastle.jce.X509LDAPCertStoreParameters} with the given
* information of the subject (for all kind of certificates) or issuer (for
* CRLs), respectively, if a {@link org.bouncycastle.x509.X509CertStoreSelector} or
* {@link org.bouncycastle.x509.X509AttributeCertificate} is given with that
* details.
* <p/>
* For the used schemes see:
* <ul>
* <li><a href="http://www.ietf.org/rfc/rfc2587.txt">RFC 2587</a>
* <li><a
* href="http://www3.ietf.org/proceedings/01mar/I-D/pkix-ldap-schema-01.txt">Internet
* X.509 Public Key Infrastructure Additional LDAP Schema for PKIs and PMIs</a>
* </ul>
*/
public class LDAPStoreHelper
{
// TODO: cache results
private X509LDAPCertStoreParameters params;
public LDAPStoreHelper(X509LDAPCertStoreParameters params)
{
this.params = params;
}
/**
* Initial Context Factory.
*/
private static String LDAP_PROVIDER = "com.sun.jndi.ldap.LdapCtxFactory";
/**
* Processing referrals..
*/
private static String REFERRALS_IGNORE = "ignore";
/**
* Security level to be used for LDAP connections.
*/
private static final String SEARCH_SECURITY_LEVEL = "none";
/**
* Package Prefix for loading URL context factories.
*/
private static final String URL_CONTEXT_PREFIX = "com.sun.jndi.url";
private DirContext connectLDAP() throws NamingException
{
Properties props = new Properties();
props.setProperty(Context.INITIAL_CONTEXT_FACTORY, LDAP_PROVIDER);
props.setProperty(Context.BATCHSIZE, "0");
props.setProperty(Context.PROVIDER_URL, params.getLdapURL());
props.setProperty(Context.URL_PKG_PREFIXES, URL_CONTEXT_PREFIX);
props.setProperty(Context.REFERRAL, REFERRALS_IGNORE);
props.setProperty(Context.SECURITY_AUTHENTICATION,
SEARCH_SECURITY_LEVEL);
DirContext ctx = new InitialDirContext(props);
return ctx;
}
private String parseDN(String subject, String dNAttributeName)
{
String temp = subject;
int begin = temp.toLowerCase().indexOf(
dNAttributeName.toLowerCase() + "=");
if (begin == -1)
{
return "";
}
temp = temp.substring(begin + dNAttributeName.length());
int end = temp.indexOf(',');
if (end == -1)
{
end = temp.length();
}
while (temp.charAt(end - 1) == '\\')
{
end = temp.indexOf(',', end + 1);
if (end == -1)
{
end = temp.length();
}
}
temp = temp.substring(0, end);
begin = temp.indexOf('=');
temp = temp.substring(begin + 1);
if (temp.charAt(0) == ' ')
{
temp = temp.substring(1);
}
if (temp.startsWith("\""))
{
temp = temp.substring(1);
}
if (temp.endsWith("\""))
{
temp = temp.substring(0, temp.length() - 1);
}
return temp;
}
private Set createCerts(List list, X509CertStoreSelector xselector)
throws StoreException
{
Set certSet = new HashSet();
Iterator it = list.iterator();
X509CertParser parser = new X509CertParser();
while (it.hasNext())
{
try
{
parser.engineInit(new ByteArrayInputStream((byte[])it
.next()));
X509Certificate cert = (X509Certificate)parser
.engineRead();
if (xselector.match((Object)cert))
{
certSet.add(cert);
}
}
catch (Exception e)
{
}
}
return certSet;
}
/**
* Can use the subject and serial and the subject and serialNumber of the
* certificate of the given of the X509CertStoreSelector. If a certificate
* for checking is given this has higher precedence.
*
* @param xselector The selector with the search criteria.
* @param attrs Attributes which contain the certificates in the LDAP
* directory.
* @param attrNames Attribute names in teh LDAP directory which correspond to the
* subjectAttributeNames.
* @param subjectAttributeNames Subject attribute names (like "CN", "O", "OU") to use to
* search in the LDAP directory
* @return A list of found DER encoded certificates.
* @throws StoreException if an error occurs while searching.
*/
private List certSubjectSerialSearch(X509CertStoreSelector xselector,
String[] attrs, String attrNames[], String subjectAttributeNames[])
throws StoreException
{
// TODO: support also subjectAltNames?
List list = new ArrayList();
String subject = null;
String serial = null;
subject = getSubjectAsString(xselector);
if (xselector.getSerialNumber() != null)
{
serial = xselector.getSerialNumber().toString();
}
if (xselector.getCertificate() != null)
{
subject = xselector.getCertificate().getSubjectX500Principal().getName("RFC1779");
serial = xselector.getCertificate().getSerialNumber().toString();
}
String attrValue = null;
if (subject != null)
{
for (int i = 0; i < subjectAttributeNames.length; i++)
{
attrValue = parseDN(subject, subjectAttributeNames[i]);
list
.addAll(search(attrNames, "*" + attrValue + "*",
attrs));
}
}
if (serial != null && params.getSearchForSerialNumberIn() != null)
{
attrValue = serial;
list.addAll(search(
splitString(params.getSearchForSerialNumberIn()),
attrValue, attrs));
}
if (serial == null && subject == null)
{
list.addAll(search(attrNames, "*", attrs));
}
return list;
}
/**
* Can use the subject of the forward certificate of the set certificate
* pair or the subject of the forward
* {@link org.bouncycastle.x509.X509CertStoreSelector} of the given
* selector.
*
* @param xselector The selector with the search criteria.
* @param attrs Attributes which contain the attribute certificates in the
* LDAP directory.
* @param attrNames Attribute names in the LDAP directory which correspond to the
* subjectAttributeNames.
* @param subjectAttributeNames Subject attribute names (like "CN", "O", "OU") to use to
* search in the LDAP directory
* @return A list of found DER encoded certificate pairs.
* @throws StoreException if an error occurs while searching.
*/
private List crossCertificatePairSubjectSearch(
X509CertPairStoreSelector xselector, String[] attrs,
String attrNames[], String subjectAttributeNames[])
throws StoreException
{
List list = new ArrayList();
// search for subject
String subject = null;
if (xselector.getForwardSelector() != null)
{
subject = getSubjectAsString(xselector.getForwardSelector());
}
if (xselector.getCertPair() != null)
{
if (xselector.getCertPair().getForward() != null)
{
subject = xselector.getCertPair().getForward()
.getSubjectX500Principal().getName("RFC1779");
}
}
String attrValue = null;
if (subject != null)
{
for (int i = 0; i < subjectAttributeNames.length; i++)
{
attrValue = parseDN(subject, subjectAttributeNames[i]);
list
.addAll(search(attrNames, "*" + attrValue + "*",
attrs));
}
}
if (subject == null)
{
list.addAll(search(attrNames, "*", attrs));
}
return list;
}
/**
* Can use the entityName of the holder of the attribute certificate, the
* serialNumber of attribute certificate and the serialNumber of the
* associated certificate of the given of the X509AttributeCertSelector.
*
* @param xselector The selector with the search criteria.
* @param attrs Attributes which contain the attribute certificates in the
* LDAP directory.
* @param attrNames Attribute names in the LDAP directory which correspond to the
* subjectAttributeNames.
* @param subjectAttributeNames Subject attribute names (like "CN", "O", "OU") to use to
* search in the LDAP directory
* @return A list of found DER encoded attribute certificates.
* @throws StoreException if an error occurs while searching.
*/
private List attrCertSubjectSerialSearch(
X509AttributeCertStoreSelector xselector, String[] attrs,
String attrNames[], String subjectAttributeNames[])
throws StoreException
{
List list = new ArrayList();
// search for serialNumber of associated cert,
// serialNumber of the attribute certificate or DN in the entityName
// of the holder
String subject = null;
String serial = null;
Collection serials = new HashSet();
Principal principals[] = null;
if (xselector.getHolder() != null)
{
// serialNumber of associated cert
if (xselector.getHolder().getSerialNumber() != null)
{
serials.add(xselector.getHolder().getSerialNumber()
.toString());
}
// DN in the entityName of the holder
if (xselector.getHolder().getEntityNames() != null)
{
principals = xselector.getHolder().getEntityNames();
}
}
if (xselector.getAttributeCert() != null)
{
if (xselector.getAttributeCert().getHolder().getEntityNames() != null)
{
principals = xselector.getAttributeCert().getHolder()
.getEntityNames();
}
// serialNumber of the attribute certificate
serials.add(xselector.getAttributeCert().getSerialNumber()
.toString());
}
if (principals != null)
{
// only first should be relevant
if (principals[0] instanceof X500Principal)
{
subject = ((X500Principal)principals[0])
.getName("RFC1779");
}
else
{
// strange ...
subject = principals[0].getName();
}
}
if (xselector.getSerialNumber() != null)
{
serials.add(xselector.getSerialNumber().toString());
}
String attrValue = null;
if (subject != null)
{
for (int i = 0; i < subjectAttributeNames.length; i++)
{
attrValue = parseDN(subject, subjectAttributeNames[i]);
list
.addAll(search(attrNames, "*" + attrValue + "*",
attrs));
}
}
if (serials.size() > 0
&& params.getSearchForSerialNumberIn() != null)
{
Iterator it = serials.iterator();
while (it.hasNext())
{
serial = (String)it.next();
list.addAll(search(splitString(params.getSearchForSerialNumberIn()), serial, attrs));
}
}
if (serials.size() == 0 && subject == null)
{
list.addAll(search(attrNames, "*", attrs));
}
return list;
}
/**
* Can use the issuer of the given of the X509CRLStoreSelector.
*
* @param xselector The selector with the search criteria.
* @param attrs Attributes which contain the attribute certificates in the
* LDAP directory.
* @param attrNames Attribute names in the LDAP directory which correspond to the
* subjectAttributeNames.
* @param issuerAttributeNames Issuer attribute names (like "CN", "O", "OU") to use to search
* in the LDAP directory
* @return A list of found DER encoded CRLs.
* @throws StoreException if an error occurs while searching.
*/
private List cRLIssuerSearch(X509CRLStoreSelector xselector,
String[] attrs, String attrNames[], String issuerAttributeNames[])
throws StoreException
{
List list = new ArrayList();
String issuer = null;
Collection issuers = new HashSet();
if (xselector.getIssuers() != null)
{
issuers.addAll(xselector.getIssuers());
}
if (xselector.getCertificateChecking() != null)
{
issuers.add(getCertificateIssuer(xselector.getCertificateChecking()));
}
if (xselector.getAttrCertificateChecking() != null)
{
Principal principals[] = xselector.getAttrCertificateChecking().getIssuer().getPrincipals();
for (int i=0; i<principals.length; i++)
{
if (principals[i] instanceof X500Principal)
{
issuers.add(principals[i]);
}
}
}
Iterator it = issuers.iterator();
while (it.hasNext())
{
issuer = ((X500Principal)it.next()).getName("RFC1779");
String attrValue = null;
for (int i = 0; i < issuerAttributeNames.length; i++)
{
attrValue = parseDN(issuer, issuerAttributeNames[i]);
list
.addAll(search(attrNames, "*" + attrValue + "*",
attrs));
}
}
if (issuer == null)
{
list.addAll(search(attrNames, "*", attrs));
}
return list;
}
/**
* Returns a <code>List</code> of encodings of the certificates, attribute
* certificates, CRL or certificate pairs.
*
* @param attributeNames The attribute names to look for in the LDAP.
* @param attributeValue The value the attribute name must have.
* @param attrs The attributes in the LDAP which hold the certificate,
* attribute certificate, certificate pair or CRL in a found
* entry.
* @return A <code>List</code> of byte arrays with the encodings.
* @throws StoreException if an error occurs getting the results from the LDAP
* directory.
*/
private List search(String attributeNames[], String attributeValue,
String[] attrs) throws StoreException
{
String filter = null;
if (attributeNames == null)
{
filter = null;
}
else
{
filter = "";
if (attributeValue.equals("**"))
{
attributeValue = "*";
}
for (int i = 0; i < attributeNames.length; i++)
{
filter += "(" + attributeNames[i] + "=" + attributeValue + ")";
}
filter = "(|" + filter + ")";
}
String filter2 = "";
for (int i = 0; i < attrs.length; i++)
{
filter2 += "(" + attrs[i] + "=*)";
}
filter2 = "(|" + filter2 + ")";
String filter3 = "(&" + filter + "" + filter2 + ")";
if (filter == null)
{
filter3 = filter2;
}
List list;
list = getFromCache(filter3);
if (list != null)
{
return list;
}
DirContext ctx = null;
list = new ArrayList();
try
{
ctx = connectLDAP();
SearchControls constraints = new SearchControls();
constraints.setSearchScope(SearchControls.SUBTREE_SCOPE);
constraints.setCountLimit(0);
constraints.setReturningAttributes(attrs);
NamingEnumeration results = ctx.search(params.getBaseDN(), filter3,
constraints);
while (results.hasMoreElements())
{
SearchResult sr = (SearchResult)results.next();
NamingEnumeration enumeration = ((Attribute)(sr
.getAttributes().getAll().next())).getAll();
while (enumeration.hasMore())
{
list.add(enumeration.next());
}
}
addToCache(filter3, list);
}
catch (NamingException e)
{
// skip exception, unfortunately if an attribute type is not
// supported an exception is thrown
}
finally
{
try
{
if (null != ctx)
{
ctx.close();
}
}
catch (Exception e)
{
}
}
return list;
}
private Set createCRLs(List list, X509CRLStoreSelector xselector)
throws StoreException
{
Set crlSet = new HashSet();
X509CRLParser parser = new X509CRLParser();
Iterator it = list.iterator();
while (it.hasNext())
{
try
{
parser.engineInit(new ByteArrayInputStream((byte[])it
.next()));
X509CRL crl = (X509CRL)parser.engineRead();
if (xselector.match((Object)crl))
{
crlSet.add(crl);
}
}
catch (StreamParsingException e)
{
}
}
return crlSet;
}
private Set createCrossCertificatePairs(List list,
X509CertPairStoreSelector xselector) throws StoreException
{
Set certPairSet = new HashSet();
int i = 0;
while (i < list.size())
{
X509CertificatePair pair;
try
{
// first try to decode it as certificate pair
try
{
X509CertPairParser parser = new X509CertPairParser();
parser.engineInit(new ByteArrayInputStream(
(byte[])list.get(i)));
pair = (X509CertificatePair)parser.engineRead();
}
catch (StreamParsingException e)
{
// now try it to construct it the forward and reverse
// certificate
byte[] forward = (byte[])list.get(i);
byte[] reverse = (byte[])list.get(i + 1);
pair = new X509CertificatePair(new CertificatePair(
Certificate
.getInstance(new ASN1InputStream(
forward).readObject()),
Certificate
.getInstance(new ASN1InputStream(
reverse).readObject())));
i++;
}
if (xselector.match((Object)pair))
{
certPairSet.add(pair);
}
}
catch (CertificateParsingException e)
{
// try next
}
catch (IOException e)
{
// try next
}
i++;
}
return certPairSet;
}
private Set createAttributeCertificates(List list,
X509AttributeCertStoreSelector xselector) throws StoreException
{
Set certSet = new HashSet();
Iterator it = list.iterator();
X509AttrCertParser parser = new X509AttrCertParser();
while (it.hasNext())
{
try
{
parser.engineInit(new ByteArrayInputStream((byte[])it
.next()));
X509AttributeCertificate cert = (X509AttributeCertificate)parser
.engineRead();
if (xselector.match((Object)cert))
{
certSet.add(cert);
}
}
catch (StreamParsingException e)
{
}
}
return certSet;
}
/**
* Returns the CRLs for issued certificates for other CAs matching the given
* selector. <br>
* The authorityRevocationList attribute includes revocation information
* regarding certificates issued to other CAs.
*
* @param selector The CRL selector to use to find the CRLs.
* @return A possible empty collection with CRLs
* @throws StoreException
*/
public Collection getAuthorityRevocationLists(X509CRLStoreSelector selector)
throws StoreException
{
String[] attrs = splitString(params.getAuthorityRevocationListAttribute());
String attrNames[] = splitString(params
.getLdapAuthorityRevocationListAttributeName());
String issuerAttributeNames[] = splitString(params
.getAuthorityRevocationListIssuerAttributeName());
List list = cRLIssuerSearch(selector, attrs, attrNames,
issuerAttributeNames);
Set resultSet = createCRLs(list, selector);
if (resultSet.size() == 0)
{
X509CRLStoreSelector emptySelector = new X509CRLStoreSelector();
list = cRLIssuerSearch(emptySelector, attrs, attrNames,
issuerAttributeNames);
resultSet.addAll(createCRLs(list, selector));
}
return resultSet;
}
/**
* Returns the revocation list for revoked attribute certificates.
* <p/>
* The attributeCertificateRevocationList holds a list of attribute
* certificates that have been revoked.
*
* @param selector The CRL selector to use to find the CRLs.
* @return A possible empty collection with CRLs.
* @throws StoreException
*/
public Collection getAttributeCertificateRevocationLists(
X509CRLStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params
.getAttributeCertificateRevocationListAttribute());
String attrNames[] = splitString(params
.getLdapAttributeCertificateRevocationListAttributeName());
String issuerAttributeNames[] = splitString(params
.getAttributeCertificateRevocationListIssuerAttributeName());
List list = cRLIssuerSearch(selector, attrs, attrNames,
issuerAttributeNames);
Set resultSet = createCRLs(list, selector);
if (resultSet.size() == 0)
{
X509CRLStoreSelector emptySelector = new X509CRLStoreSelector();
list = cRLIssuerSearch(emptySelector, attrs, attrNames,
issuerAttributeNames);
resultSet.addAll(createCRLs(list, selector));
}
return resultSet;
}
/**
* Returns the revocation list for revoked attribute certificates for an
* attribute authority
* <p/>
* The attributeAuthorityList holds a list of AA certificates that have been
* revoked.
*
* @param selector The CRL selector to use to find the CRLs.
* @return A possible empty collection with CRLs
* @throws StoreException
*/
public Collection getAttributeAuthorityRevocationLists(
X509CRLStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getAttributeAuthorityRevocationListAttribute());
String attrNames[] = splitString(params
.getLdapAttributeAuthorityRevocationListAttributeName());
String issuerAttributeNames[] = splitString(params
.getAttributeAuthorityRevocationListIssuerAttributeName());
List list = cRLIssuerSearch(selector, attrs, attrNames,
issuerAttributeNames);
Set resultSet = createCRLs(list, selector);
if (resultSet.size() == 0)
{
X509CRLStoreSelector emptySelector = new X509CRLStoreSelector();
list = cRLIssuerSearch(emptySelector, attrs, attrNames,
issuerAttributeNames);
resultSet.addAll(createCRLs(list, selector));
}
return resultSet;
}
/**
* Returns cross certificate pairs.
*
* @param selector The selector to use to find the cross certificates.
* @return A possible empty collection with {@link X509CertificatePair}s
* @throws StoreException
*/
public Collection getCrossCertificatePairs(
X509CertPairStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getCrossCertificateAttribute());
String attrNames[] = splitString(params.getLdapCrossCertificateAttributeName());
String subjectAttributeNames[] = splitString(params
.getCrossCertificateSubjectAttributeName());
List list = crossCertificatePairSubjectSearch(selector, attrs,
attrNames, subjectAttributeNames);
Set resultSet = createCrossCertificatePairs(list, selector);
if (resultSet.size() == 0)
{
X509CertStoreSelector emptyCertselector = new X509CertStoreSelector();
X509CertPairStoreSelector emptySelector = new X509CertPairStoreSelector();
emptySelector.setForwardSelector(emptyCertselector);
emptySelector.setReverseSelector(emptyCertselector);
list = crossCertificatePairSubjectSearch(emptySelector, attrs,
attrNames, subjectAttributeNames);
resultSet.addAll(createCrossCertificatePairs(list, selector));
}
return resultSet;
}
/**
* Returns end certificates.
* <p/>
* The attributeDescriptorCertificate is self signed by a source of
* authority and holds a description of the privilege and its delegation
* rules.
*
* @param selector The selector to find the certificates.
* @return A possible empty collection with certificates.
* @throws StoreException
*/
public Collection getUserCertificates(X509CertStoreSelector selector)
throws StoreException
{
String[] attrs = splitString(params.getUserCertificateAttribute());
String attrNames[] = splitString(params.getLdapUserCertificateAttributeName());
String subjectAttributeNames[] = splitString(params
.getUserCertificateSubjectAttributeName());
List list = certSubjectSerialSearch(selector, attrs, attrNames,
subjectAttributeNames);
Set resultSet = createCerts(list, selector);
if (resultSet.size() == 0)
{
X509CertStoreSelector emptySelector = new X509CertStoreSelector();
list = certSubjectSerialSearch(emptySelector, attrs, attrNames,
subjectAttributeNames);
resultSet.addAll(createCerts(list, selector));
}
return resultSet;
}
/**
* Returns attribute certificates for an attribute authority
* <p/>
* The aAcertificate holds the privileges of an attribute authority.
*
* @param selector The selector to find the attribute certificates.
* @return A possible empty collection with attribute certificates.
* @throws StoreException
*/
public Collection getAACertificates(X509AttributeCertStoreSelector selector)
throws StoreException
{
String[] attrs = splitString(params.getAACertificateAttribute());
String attrNames[] = splitString(params.getLdapAACertificateAttributeName());
String subjectAttributeNames[] = splitString(params.getAACertificateSubjectAttributeName());
List list = attrCertSubjectSerialSearch(selector, attrs, attrNames,
subjectAttributeNames);
Set resultSet = createAttributeCertificates(list, selector);
if (resultSet.size() == 0)
{
X509AttributeCertStoreSelector emptySelector = new X509AttributeCertStoreSelector();
list = attrCertSubjectSerialSearch(emptySelector, attrs, attrNames,
subjectAttributeNames);
resultSet.addAll(createAttributeCertificates(list, selector));
}
return resultSet;
}
/**
* Returns an attribute certificate for an authority
* <p/>
* The attributeDescriptorCertificate is self signed by a source of
* authority and holds a description of the privilege and its delegation
* rules.
*
* @param selector The selector to find the attribute certificates.
* @return A possible empty collection with attribute certificates.
* @throws StoreException
*/
public Collection getAttributeDescriptorCertificates(
X509AttributeCertStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getAttributeDescriptorCertificateAttribute());
String attrNames[] = splitString(params
.getLdapAttributeDescriptorCertificateAttributeName());
String subjectAttributeNames[] = splitString(params
.getAttributeDescriptorCertificateSubjectAttributeName());
List list = attrCertSubjectSerialSearch(selector, attrs, attrNames,
subjectAttributeNames);
Set resultSet = createAttributeCertificates(list, selector);
if (resultSet.size() == 0)
{
X509AttributeCertStoreSelector emptySelector = new X509AttributeCertStoreSelector();
list = attrCertSubjectSerialSearch(emptySelector, attrs, attrNames,
subjectAttributeNames);
resultSet.addAll(createAttributeCertificates(list, selector));
}
return resultSet;
}
/**
* Returns CA certificates.
* <p/>
* The cACertificate attribute of a CA's directory entry shall be used to
* store self-issued certificates (if any) and certificates issued to this
* CA by CAs in the same realm as this CA.
*
* @param selector The selector to find the certificates.
* @return A possible empty collection with certificates.
* @throws StoreException
*/
public Collection getCACertificates(X509CertStoreSelector selector)
throws StoreException
{
String[] attrs = splitString(params.getCACertificateAttribute());
String attrNames[] = splitString(params.getLdapCACertificateAttributeName());
String subjectAttributeNames[] = splitString(params
.getCACertificateSubjectAttributeName());
List list = certSubjectSerialSearch(selector, attrs, attrNames,
subjectAttributeNames);
Set resultSet = createCerts(list, selector);
if (resultSet.size() == 0)
{
X509CertStoreSelector emptySelector = new X509CertStoreSelector();
list = certSubjectSerialSearch(emptySelector, attrs, attrNames,
subjectAttributeNames);
resultSet.addAll(createCerts(list, selector));
}
return resultSet;
}
/**
* Returns the delta revocation list for revoked certificates.
*
* @param selector The CRL selector to use to find the CRLs.
* @return A possible empty collection with CRLs.
* @throws StoreException
*/
public Collection getDeltaCertificateRevocationLists(
X509CRLStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getDeltaRevocationListAttribute());
String attrNames[] = splitString(params.getLdapDeltaRevocationListAttributeName());
String issuerAttributeNames[] = splitString(params
.getDeltaRevocationListIssuerAttributeName());
List list = cRLIssuerSearch(selector, attrs, attrNames,
issuerAttributeNames);
Set resultSet = createCRLs(list, selector);
if (resultSet.size() == 0)
{
X509CRLStoreSelector emptySelector = new X509CRLStoreSelector();
list = cRLIssuerSearch(emptySelector, attrs, attrNames,
issuerAttributeNames);
resultSet.addAll(createCRLs(list, selector));
}
return resultSet;
}
/**
* Returns an attribute certificate for an user.
* <p/>
* The attributeCertificateAttribute holds the privileges of a user
*
* @param selector The selector to find the attribute certificates.
* @return A possible empty collection with attribute certificates.
* @throws StoreException
*/
public Collection getAttributeCertificateAttributes(
X509AttributeCertStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getAttributeCertificateAttributeAttribute());
String attrNames[] = splitString(params
.getLdapAttributeCertificateAttributeAttributeName());
String subjectAttributeNames[] = splitString(params
.getAttributeCertificateAttributeSubjectAttributeName());
List list = attrCertSubjectSerialSearch(selector, attrs, attrNames,
subjectAttributeNames);
Set resultSet = createAttributeCertificates(list, selector);
if (resultSet.size() == 0)
{
X509AttributeCertStoreSelector emptySelector = new X509AttributeCertStoreSelector();
list = attrCertSubjectSerialSearch(emptySelector, attrs, attrNames,
subjectAttributeNames);
resultSet.addAll(createAttributeCertificates(list, selector));
}
return resultSet;
}
/**
* Returns the certificate revocation lists for revoked certificates.
*
* @param selector The CRL selector to use to find the CRLs.
* @return A possible empty collection with CRLs.
* @throws StoreException
*/
public Collection getCertificateRevocationLists(
X509CRLStoreSelector selector) throws StoreException
{
String[] attrs = splitString(params.getCertificateRevocationListAttribute());
String attrNames[] = splitString(params
.getLdapCertificateRevocationListAttributeName());
String issuerAttributeNames[] = splitString(params
.getCertificateRevocationListIssuerAttributeName());
List list = cRLIssuerSearch(selector, attrs, attrNames,
issuerAttributeNames);
Set resultSet = createCRLs(list, selector);
if (resultSet.size() == 0)
{
X509CRLStoreSelector emptySelector = new X509CRLStoreSelector();
list = cRLIssuerSearch(emptySelector, attrs, attrNames,
issuerAttributeNames);
resultSet.addAll(createCRLs(list, selector));
}
return resultSet;
}
private Map cacheMap = new HashMap(cacheSize);
private static int cacheSize = 32;
private static long lifeTime = 60 * 1000;
private synchronized void addToCache(String searchCriteria, List list)
{
Date now = new Date(System.currentTimeMillis());
List cacheEntry = new ArrayList();
cacheEntry.add(now);
cacheEntry.add(list);
if (cacheMap.containsKey(searchCriteria))
{
cacheMap.put(searchCriteria, cacheEntry);
}
else
{
if (cacheMap.size() >= cacheSize)
{
// replace oldest
Iterator it = cacheMap.entrySet().iterator();
long oldest = now.getTime();
Object replace = null;
while (it.hasNext())
{
Map.Entry entry = (Map.Entry)it.next();
long current = ((Date)((List)entry.getValue()).get(0))
.getTime();
if (current < oldest)
{
oldest = current;
replace = entry.getKey();
}
}
cacheMap.remove(replace);
}
cacheMap.put(searchCriteria, cacheEntry);
}
}
private List getFromCache(String searchCriteria)
{
List entry = (List)cacheMap.get(searchCriteria);
long now = System.currentTimeMillis();
if (entry != null)
{
// too old
if (((Date)entry.get(0)).getTime() < (now - lifeTime))
{
return null;
}
return (List)entry.get(1);
}
return null;
}
/*
* spilt string based on spaces
*/
private String[] splitString(String str)
{
return str.split("\\s+");
}
private String getSubjectAsString(X509CertStoreSelector xselector)
{
try
{
byte[] encSubject = xselector.getSubjectAsBytes();
if (encSubject != null)
{
return new X500Principal(encSubject).getName("RFC1779");
}
}
catch (IOException e)
{
throw new StoreException("exception processing name: " + e.getMessage(), e);
}
return null;
}
private X500Principal getCertificateIssuer(X509Certificate cert)
{
return cert.getIssuerX500Principal();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.ServerSocketUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.TestContainerManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestNodeManagerShutdown {
static final File basedir =
new File("target", TestNodeManagerShutdown.class.getName());
static final File tmpDir = new File(basedir, "tmpDir");
static final File logsDir = new File(basedir, "logs");
static final File remoteLogsDir = new File(basedir, "remotelogs");
static final File nmLocalDir = new File(basedir, "nm0");
static final File processStartFile = new File(tmpDir, "start_file.txt")
.getAbsoluteFile();
static final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
static final String user = "nobody";
private FileContext localFS;
private ContainerId cId;
private NodeManager nm;
@Before
public void setup() throws UnsupportedFileSystemException {
localFS = FileContext.getLocalFSFileContext();
tmpDir.mkdirs();
logsDir.mkdirs();
remoteLogsDir.mkdirs();
nmLocalDir.mkdirs();
// Construct the Container-id
cId = createContainerId();
}
@After
public void tearDown() throws IOException, InterruptedException {
if (nm != null) {
nm.stop();
}
localFS.delete(new Path(basedir.getPath()), true);
}
@Test
public void testStateStoreRemovalOnDecommission() throws IOException {
final File recoveryDir = new File(basedir, "nm-recovery");
nm = new TestNodeManager();
YarnConfiguration conf = createNMConfig();
conf.setBoolean(YarnConfiguration.NM_RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.NM_RECOVERY_DIR, recoveryDir.getAbsolutePath());
// verify state store is not removed on normal shutdown
nm.init(conf);
nm.start();
Assert.assertTrue(recoveryDir.exists());
Assert.assertTrue(recoveryDir.isDirectory());
nm.stop();
nm = null;
Assert.assertTrue(recoveryDir.exists());
Assert.assertTrue(recoveryDir.isDirectory());
// verify state store is removed on decommissioned shutdown
nm = new TestNodeManager();
nm.init(conf);
nm.start();
Assert.assertTrue(recoveryDir.exists());
Assert.assertTrue(recoveryDir.isDirectory());
nm.getNMContext().setDecommissioned(true);
nm.stop();
nm = null;
Assert.assertFalse(recoveryDir.exists());
}
@Test
public void testKillContainersOnShutdown() throws IOException,
YarnException {
nm = new TestNodeManager();
int port = ServerSocketUtil.getPort(49157, 10);
nm.init(createNMConfig(port));
nm.start();
startContainer(nm, cId, localFS, tmpDir, processStartFile, port);
final int MAX_TRIES=20;
int numTries = 0;
while (!processStartFile.exists() && numTries < MAX_TRIES) {
try {
Thread.sleep(500);
} catch (InterruptedException ex) {ex.printStackTrace();}
numTries++;
}
nm.stop();
// Now verify the contents of the file. Script generates a message when it
// receives a sigterm so we look for that. We cannot perform this check on
// Windows, because the process is not notified when killed by winutils.
// There is no way for the process to trap and respond. Instead, we can
// verify that the job object with ID matching container ID no longer exists.
if (Shell.WINDOWS) {
Assert.assertFalse("Process is still alive!",
DefaultContainerExecutor.containerIsAlive(cId.toString()));
} else {
BufferedReader reader =
new BufferedReader(new FileReader(processStartFile));
boolean foundSigTermMessage = false;
while (true) {
String line = reader.readLine();
if (line == null) {
break;
}
if (line.contains("SIGTERM")) {
foundSigTermMessage = true;
break;
}
}
Assert.assertTrue("Did not find sigterm message", foundSigTermMessage);
reader.close();
}
}
public static void startContainer(NodeManager nm, ContainerId cId,
FileContext localFS, File scriptFileDir, File processStartFile,
final int port)
throws IOException, YarnException {
File scriptFile =
createUnhaltingScriptFile(cId, scriptFileDir, processStartFile);
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
NodeId nodeId = BuilderUtils.newNodeId(InetAddress.getByName("localhost")
.getCanonicalHostName(), port);
URL localResourceUri =
URL.fromPath(localFS
.makeQualified(new Path(scriptFile.getAbsolutePath())));
LocalResource localResource =
recordFactory.newRecordInstance(LocalResource.class);
localResource.setResource(localResourceUri);
localResource.setSize(-1);
localResource.setVisibility(LocalResourceVisibility.APPLICATION);
localResource.setType(LocalResourceType.FILE);
localResource.setTimestamp(scriptFile.lastModified());
String destinationFile = "dest_file";
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>();
localResources.put(destinationFile, localResource);
containerLaunchContext.setLocalResources(localResources);
List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile));
containerLaunchContext.setCommands(commands);
final InetSocketAddress containerManagerBindAddress =
NetUtils.createSocketAddrForHost("127.0.0.1", port);
UserGroupInformation currentUser = UserGroupInformation
.createRemoteUser(cId.toString());
org.apache.hadoop.security.token.Token<NMTokenIdentifier> nmToken =
ConverterUtils.convertFromYarn(
nm.getNMContext().getNMTokenSecretManager()
.createNMToken(cId.getApplicationAttemptId(), nodeId, user),
containerManagerBindAddress);
currentUser.addToken(nmToken);
ContainerManagementProtocol containerManager =
currentUser.doAs(new PrivilegedAction<ContainerManagementProtocol>() {
@Override
public ContainerManagementProtocol run() {
Configuration conf = new Configuration();
YarnRPC rpc = YarnRPC.create(conf);
InetSocketAddress containerManagerBindAddress =
NetUtils.createSocketAddrForHost("127.0.0.1", port);
return (ContainerManagementProtocol) rpc.getProxy(ContainerManagementProtocol.class,
containerManagerBindAddress, conf);
}
});
StartContainerRequest scRequest =
StartContainerRequest.newInstance(containerLaunchContext,
TestContainerManager.createContainerToken(cId, 0,
nodeId, user, nm.getNMContext().getContainerTokenSecretManager()));
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
containerManager.startContainers(allRequests);
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(cId);
GetContainerStatusesRequest request =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerStatus containerStatus =
containerManager.getContainerStatuses(request).getContainerStatuses().get(0);
Assert.assertTrue(
EnumSet.of(ContainerState.RUNNING, ContainerState.SCHEDULED)
.contains(containerStatus.getState()));
}
public static ContainerId createContainerId() {
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0);
return containerId;
}
private YarnConfiguration createNMConfig(int port) throws IOException {
YarnConfiguration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.NM_PMEM_MB, 5*1024); // 5GB
conf.set(YarnConfiguration.NM_ADDRESS, "127.0.0.1:" + port);
conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS, "127.0.0.1:"
+ ServerSocketUtil.getPort(49158, 10));
conf.set(YarnConfiguration.NM_WEBAPP_ADDRESS,
"127.0.0.1:" + ServerSocketUtil
.getPort(YarnConfiguration.DEFAULT_NM_WEBAPP_PORT, 10));
conf.set(YarnConfiguration.NM_LOG_DIRS, logsDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogsDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOCAL_DIRS, nmLocalDir.getAbsolutePath());
conf.setLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 1);
return conf;
}
private YarnConfiguration createNMConfig() throws IOException {
return createNMConfig(ServerSocketUtil.getPort(49157, 10));
}
/**
* Creates a script to run a container that will run forever unless
* stopped by external means.
*/
private static File createUnhaltingScriptFile(ContainerId cId,
File scriptFileDir, File processStartFile) throws IOException {
File scriptFile = Shell.appendScriptExtension(scriptFileDir, "scriptFile");
PrintWriter fileWriter = new PrintWriter(scriptFile);
if (Shell.WINDOWS) {
fileWriter.println("@echo \"Running testscript for delayed kill\"");
fileWriter.println("@echo \"Writing pid to start file\"");
fileWriter.println("@echo " + cId + ">> " + processStartFile);
fileWriter.println("@pause");
} else {
fileWriter.write("#!/bin/bash\n\n");
fileWriter.write("echo \"Running testscript for delayed kill\"\n");
fileWriter.write("hello=\"Got SIGTERM\"\n");
fileWriter.write("umask 0\n");
fileWriter.write("trap \"echo $hello >> " + processStartFile +
"\" SIGTERM\n");
fileWriter.write("echo \"Writing pid to start file\"\n");
fileWriter.write("echo $$ >> " + processStartFile + "\n");
fileWriter.write("while true; do\ndate >> /dev/null;\n done\n");
}
fileWriter.close();
return scriptFile;
}
class TestNodeManager extends NodeManager {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MockNodeStatusUpdater myNodeStatusUpdater =
new MockNodeStatusUpdater(context, dispatcher, healthChecker, metrics);
return myNodeStatusUpdater;
}
public void setMasterKey(MasterKey masterKey) {
getNMContext().getContainerTokenSecretManager().setMasterKey(masterKey);
}
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.parsing;
import static com.google.javascript.jscomp.mozilla.rhino.Token.CommentType.JSDOC;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.mozilla.rhino.ErrorReporter;
import com.google.javascript.jscomp.mozilla.rhino.ast.ArrayLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.Assignment;
import com.google.javascript.jscomp.mozilla.rhino.ast.AstNode;
import com.google.javascript.jscomp.mozilla.rhino.ast.AstRoot;
import com.google.javascript.jscomp.mozilla.rhino.ast.Block;
import com.google.javascript.jscomp.mozilla.rhino.ast.BreakStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.CatchClause;
import com.google.javascript.jscomp.mozilla.rhino.ast.Comment;
import com.google.javascript.jscomp.mozilla.rhino.ast.ConditionalExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.ContinueStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.DoLoop;
import com.google.javascript.jscomp.mozilla.rhino.ast.ElementGet;
import com.google.javascript.jscomp.mozilla.rhino.ast.EmptyExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.ExpressionStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.ForInLoop;
import com.google.javascript.jscomp.mozilla.rhino.ast.ForLoop;
import com.google.javascript.jscomp.mozilla.rhino.ast.FunctionCall;
import com.google.javascript.jscomp.mozilla.rhino.ast.FunctionNode;
import com.google.javascript.jscomp.mozilla.rhino.ast.IfStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.InfixExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.KeywordLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.Label;
import com.google.javascript.jscomp.mozilla.rhino.ast.LabeledStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.Name;
import com.google.javascript.jscomp.mozilla.rhino.ast.NewExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.NumberLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.ObjectLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.ObjectProperty;
import com.google.javascript.jscomp.mozilla.rhino.ast.ParenthesizedExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.PropertyGet;
import com.google.javascript.jscomp.mozilla.rhino.ast.RegExpLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.ReturnStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.Scope;
import com.google.javascript.jscomp.mozilla.rhino.ast.StringLiteral;
import com.google.javascript.jscomp.mozilla.rhino.ast.SwitchCase;
import com.google.javascript.jscomp.mozilla.rhino.ast.SwitchStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.ThrowStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.TryStatement;
import com.google.javascript.jscomp.mozilla.rhino.ast.UnaryExpression;
import com.google.javascript.jscomp.mozilla.rhino.ast.VariableDeclaration;
import com.google.javascript.jscomp.mozilla.rhino.ast.VariableInitializer;
import com.google.javascript.jscomp.mozilla.rhino.ast.WhileLoop;
import com.google.javascript.jscomp.mozilla.rhino.ast.WithStatement;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Set;
/**
* IRFactory transforms the new AST to the old AST.
*
*/
public class IRFactory {
private final String sourceString;
private final String sourceName;
private final Config config;
private final ErrorReporter errorReporter;
private final TransformDispatcher transformDispatcher;
// non-static for thread safety
private final Set<String> ALLOWED_DIRECTIVES = Sets.newHashSet("use strict");
// @license text gets appended onto the fileLevelJsDocBuilder as found,
// and stored in JSDocInfo for placeholder node.
Node rootNodeJsDocHolder = new Node(Token.SCRIPT);
Node.FileLevelJsDocBuilder fileLevelJsDocBuilder =
rootNodeJsDocHolder.getJsDocBuilderForNode();
JSDocInfo fileOverviewInfo = null;
// Use a template node for properties set on all nodes to minimize the
// memory footprint associated with these.
private Node templateNode;
// TODO(johnlenz): Consider creating a template pool for ORIGINALNAME_PROP.
private IRFactory(String sourceString,
String sourceName,
Config config,
ErrorReporter errorReporter) {
this.sourceString = sourceString;
this.sourceName = sourceName;
this.config = config;
this.errorReporter = errorReporter;
this.transformDispatcher = new TransformDispatcher();
// The template node properties are applied to all nodes in this transform.
this.templateNode = createTemplateNode();
}
// Create a template node to use as a source of common attributes, this allows
// the prop structure to be shared among all the node from this source file.
// This reduces the cost of these properties to O(nodes) to O(files).
private Node createTemplateNode() {
// The Node type choice is arbitrary.
Node templateNode = new Node(Token.SCRIPT);
templateNode.putProp(Node.SOURCENAME_PROP, sourceName);
return templateNode;
}
public static Node transformTree(AstRoot node,
String sourceString,
Config config,
ErrorReporter errorReporter) {
IRFactory irFactory = new IRFactory(sourceString, node.getSourceName(),
config, errorReporter);
Node irNode = irFactory.transform(node);
if (node.getComments() != null) {
for (Comment comment : node.getComments()) {
if (comment.getCommentType() == JSDOC && !comment.isParsed()) {
irFactory.handlePossibleFileOverviewJsDoc(comment);
}
}
}
irFactory.setFileOverviewJsDoc(irNode);
return irNode;
}
private void setFileOverviewJsDoc(Node irNode) {
// Only after we've seen all @fileoverview entries, attach the
// last one to the root node, and copy the found license strings
// to that node.
irNode.setJSDocInfo(rootNodeJsDocHolder.getJSDocInfo());
if (fileOverviewInfo != null) {
if ((irNode.getJSDocInfo() != null) &&
(irNode.getJSDocInfo().getLicense() != null)) {
fileOverviewInfo.setLicense(irNode.getJSDocInfo().getLicense());
}
irNode.setJSDocInfo(fileOverviewInfo);
}
}
private Node transformBlock(AstNode node) {
Node irNode = transform(node);
if (irNode.getType() != Token.BLOCK) {
if (irNode.getType() == Token.EMPTY) {
irNode.setType(Token.BLOCK);
irNode.setWasEmptyNode(true);
} else {
Node newBlock = newNode(Token.BLOCK, irNode);
newBlock.setLineno(irNode.getLineno());
newBlock.setCharno(irNode.getCharno());
irNode = newBlock;
}
}
return irNode;
}
/**
* @return true if the jsDocParser represents a fileoverview.
*/
private boolean handlePossibleFileOverviewJsDoc(
JsDocInfoParser jsDocParser) {
if (jsDocParser.getFileOverviewJSDocInfo() != fileOverviewInfo) {
fileOverviewInfo = jsDocParser.getFileOverviewJSDocInfo();
return true;
}
return false;
}
private void handlePossibleFileOverviewJsDoc(Comment comment) {
JsDocInfoParser jsDocParser = createJsDocInfoParser(comment);
comment.setParsed(true);
handlePossibleFileOverviewJsDoc(jsDocParser);
}
private JSDocInfo handleJsDoc(AstNode node) {
Comment comment = node.getJsDocNode();
if (comment != null) {
JsDocInfoParser jsDocParser = createJsDocInfoParser(comment);
comment.setParsed(true);
if (!handlePossibleFileOverviewJsDoc(jsDocParser)) {
return jsDocParser.retrieveAndResetParsedJSDocInfo();
}
}
return null;
}
private Node transform(AstNode node) {
JSDocInfo jsDocInfo = handleJsDoc(node);
Node irNode = justTransform(node);
if (jsDocInfo != null) {
irNode.setJSDocInfo(jsDocInfo);
}
// If we have a named function, set the position to that of the name.
if (irNode.getType() == Token.FUNCTION &&
irNode.getFirstChild().getLineno() != -1) {
irNode.setLineno(irNode.getFirstChild().getLineno());
irNode.setCharno(irNode.getFirstChild().getCharno());
} else {
if (irNode.getLineno() == -1) {
// If we didn't already set the line, then set it now. This avoids
// cases like ParenthesizedExpression where we just return a previous
// node, but don't want the new node to get its parent's line number.
int lineno = node.getLineno();
irNode.setLineno(lineno);
int charno = position2charno(node.getAbsolutePosition());
irNode.setCharno(charno);
}
}
return irNode;
}
/**
* Creates a JsDocInfoParser and parses the JsDoc string.
*
* Used both for handling individual JSDoc comments and for handling
* file-level JSDoc comments (@fileoverview and @license).
*
* @param node The JsDoc Comment node to parse.
* @return A JSDocInfoParser. Will contain either fileoverview jsdoc, or
* normal jsdoc, or no jsdoc (if the method parses to the wrong level).
*/
private JsDocInfoParser createJsDocInfoParser(Comment node) {
String comment = node.getValue();
int lineno = node.getLineno();
int position = node.getAbsolutePosition();
// The JsDocInfoParser expects the comment without the initial '/**'.
int numOpeningChars = 3;
JsDocInfoParser jsdocParser =
new JsDocInfoParser(
new JsDocTokenStream(comment.substring(numOpeningChars),
lineno,
position2charno(position) + numOpeningChars),
node,
sourceName,
config,
errorReporter);
jsdocParser.setFileLevelJsDocBuilder(fileLevelJsDocBuilder);
jsdocParser.setFileOverviewJSDocInfo(fileOverviewInfo);
jsdocParser.parse();
return jsdocParser;
}
private int position2charno(int position) {
int lineIndex = sourceString.lastIndexOf('\n', position);
if (lineIndex == -1) {
return position;
} else {
// Subtract one for initial position being 0.
return position - lineIndex - 1;
}
}
private Node justTransform(AstNode node) {
return transformDispatcher.process(node);
}
private class TransformDispatcher extends TypeSafeDispatcher<Node> {
private Node processGeneric(
com.google.javascript.jscomp.mozilla.rhino.Node n) {
Node node = newNode(transformTokenType(n.getType()));
for (com.google.javascript.jscomp.mozilla.rhino.Node child : n) {
node.addChildToBack(transform((AstNode)child));
}
return node;
}
/**
* Transforms the given node and then sets its type to Token.STRING if it
* was Token.NAME. If its type was already Token.STRING, then quotes it.
* Used for properties, as the old AST uses String tokens, while the new one
* uses Name tokens for unquoted strings. For example, in
* var o = {'a' : 1, b: 2};
* the string 'a' is quoted, while the name b is turned into a string, but
* unquoted.
*/
private Node transformAsString(AstNode n) {
Node ret = transform(n);
if (ret.getType() == Token.STRING) {
ret.putBooleanProp(Node.QUOTED_PROP, true);
} else if (ret.getType() == Token.NAME) {
ret.setType(Token.STRING);
}
return ret;
}
@Override
Node processArrayLiteral(ArrayLiteral literalNode) {
if (literalNode.isDestructuring()) {
reportDestructuringAssign(literalNode);
}
Node node = newNode(Token.ARRAYLIT);
int skipCount = 0;
for (AstNode child : literalNode.getElements()) {
Node c = transform(child);
if (c.getType() == Token.EMPTY) {
skipCount++;
}
node.addChildToBack(c);
}
if (skipCount > 0) {
int[] skipIndexes = new int[skipCount];
int i = 0;
int j = 0;
for (Node child : node.children()) {
if (child.getType() == Token.EMPTY) {
node.removeChild(child);
skipIndexes[j] = i;
j++;
}
i++;
}
node.putProp(Node.SKIP_INDEXES_PROP, skipIndexes);
}
return node;
}
@Override
Node processAssignment(Assignment assignmentNode) {
return processInfixExpression(assignmentNode);
}
@Override
Node processAstRoot(AstRoot rootNode) {
Node node = newNode(Token.SCRIPT);
for (com.google.javascript.jscomp.mozilla.rhino.Node child : rootNode) {
node.addChildToBack(transform((AstNode)child));
}
parseDirectives(node);
return node;
}
/**
* Parse the directives, encode them in the AST, and remove their nodes.
*
* For information on ES5 directives, see section 14.1 of
* Ecma-262, Edition 5.
*
* It would be nice if Rhino would eventually take care of this for
* us, but right now their directive-processing is a one-off.
*/
private void parseDirectives(Node node) {
// Remove all the directives, and encode them in the AST.
Set<String> directives = null;
while (isDirective(node.getFirstChild())) {
String directive = node.removeFirstChild().getFirstChild().getString();
if (directives == null) {
directives = Sets.newHashSet(directive);
} else {
directives.add(directive);
}
}
if (directives != null) {
node.setDirectives(directives);
}
}
private boolean isDirective(Node n) {
if (n == null) return false;
int nType = n.getType();
return (nType == Token.EXPR_RESULT || nType == Token.EXPR_VOID) &&
n.getFirstChild().getType() == Token.STRING &&
ALLOWED_DIRECTIVES.contains(n.getFirstChild().getString());
}
@Override
Node processBlock(Block blockNode) {
return processGeneric(blockNode);
}
@Override
Node processBreakStatement(BreakStatement statementNode) {
Node node = newNode(Token.BREAK);
if (statementNode.getBreakLabel() != null) {
Node labelName = transform(statementNode.getBreakLabel());
// Change the NAME to LABEL_NAME
labelName.setType(Token.LABEL_NAME);
node.addChildToBack(labelName);
}
return node;
}
@Override
Node processCatchClause(CatchClause clauseNode) {
AstNode catchVar = clauseNode.getVarName();
Node node = newNode(Token.CATCH, transform(catchVar));
if (clauseNode.getCatchCondition() != null) {
errorReporter.error(
"Catch clauses are not supported",
sourceName,
clauseNode.getCatchCondition().getLineno(), "", 0);
}
node.addChildToBack(transformBlock(clauseNode.getBody()));
return node;
}
@Override
Node processConditionalExpression(ConditionalExpression exprNode) {
return newNode(
Token.HOOK,
transform(exprNode.getTestExpression()),
transform(exprNode.getTrueExpression()),
transform(exprNode.getFalseExpression()));
}
@Override
Node processContinueStatement(ContinueStatement statementNode) {
Node node = newNode(Token.CONTINUE);
if (statementNode.getLabel() != null) {
Node labelName = transform(statementNode.getLabel());
// Change the NAME to LABEL_NAME
labelName.setType(Token.LABEL_NAME);
node.addChildToBack(labelName);
}
return node;
}
@Override
Node processDoLoop(DoLoop loopNode) {
return newNode(
Token.DO,
transformBlock(loopNode.getBody()),
transform(loopNode.getCondition()));
}
@Override
Node processElementGet(ElementGet getNode) {
return newNode(
Token.GETELEM,
transform(getNode.getTarget()),
transform(getNode.getElement()));
}
@Override
Node processEmptyExpression(EmptyExpression exprNode) {
Node node = newNode(Token.EMPTY);
return node;
}
@Override
Node processExpressionStatement(ExpressionStatement statementNode) {
Node node = newNode(transformTokenType(statementNode.getType()));
node.addChildToBack(transform(statementNode.getExpression()));
return node;
}
@Override
Node processForInLoop(ForInLoop loopNode) {
return newNode(
Token.FOR,
transform(loopNode.getIterator()),
transform(loopNode.getIteratedObject()),
transformBlock(loopNode.getBody()));
}
@Override
Node processForLoop(ForLoop loopNode) {
Node node = newNode(
Token.FOR,
transform(loopNode.getInitializer()),
transform(loopNode.getCondition()),
transform(loopNode.getIncrement()));
node.addChildToBack(transformBlock(loopNode.getBody()));
return node;
}
@Override
Node processFunctionCall(FunctionCall callNode) {
Node node = newNode(transformTokenType(callNode.getType()),
transform(callNode.getTarget()));
for (AstNode child : callNode.getArguments()) {
node.addChildToBack(transform(child));
}
int leftParamPos = callNode.getAbsolutePosition() + callNode.getLp();
node.setLineno(callNode.getLineno());
node.setCharno(position2charno(leftParamPos));
return node;
}
@Override
Node processFunctionNode(FunctionNode functionNode) {
Name name = functionNode.getFunctionName();
Boolean isUnnamedFunction = false;
if (name == null) {
name = new Name();
name.setIdentifier("");
isUnnamedFunction = true;
}
Node node = newNode(Token.FUNCTION);
Node newName = transform(name);
if (isUnnamedFunction) {
// Old Rhino tagged the empty name node with the line number of the
// declaration.
newName.setLineno(functionNode.getLineno());
// TODO(bowdidge) Mark line number of paren correctly.
// Same problem as below - the left paren might not be on the
// same line as the function keyword.
int lpColumn = functionNode.getAbsolutePosition() +
functionNode.getLp();
newName.setCharno(position2charno(lpColumn));
}
node.addChildToBack(newName);
Node lp = newNode(Token.LP);
// The left paren's complicated because it's not represented by an
// AstNode, so there's nothing that has the actual line number that it
// appeared on. We know the paren has to appear on the same line as the
// function name (or else a semicolon will be inserted.) If there's no
// function name, assume the paren was on the same line as the function.
// TODO(bowdidge): Mark line number of paren correctly.
Name fnName = functionNode.getFunctionName();
if (fnName != null) {
lp.setLineno(fnName.getLineno());
} else {
lp.setLineno(functionNode.getLineno());
}
int lparenCharno = functionNode.getLp() +
functionNode.getAbsolutePosition();
lp.setCharno(position2charno(lparenCharno));
for (AstNode param : functionNode.getParams()) {
lp.addChildToBack(transform(param));
}
node.addChildToBack(lp);
Node bodyNode = transform(functionNode.getBody());
parseDirectives(bodyNode);
node.addChildToBack(bodyNode);
return node;
}
@Override
Node processIfStatement(IfStatement statementNode) {
Node node = newNode(Token.IF);
node.addChildToBack(transform(statementNode.getCondition()));
node.addChildToBack(transformBlock(statementNode.getThenPart()));
if (statementNode.getElsePart() != null) {
node.addChildToBack(transformBlock(statementNode.getElsePart()));
}
return node;
}
@Override
Node processInfixExpression(InfixExpression exprNode) {
Node n = newNode(
transformTokenType(exprNode.getType()),
transform(exprNode.getLeft()),
transform(exprNode.getRight()));
// Set the line number here so we can fine-tune it in ways transform
// doesn't do.
n.setLineno(exprNode.getLineno());
// Position in new ASTNode is to start of expression, but old-fashioned
// line numbers from Node reference the operator token. Add the offset
// to the operator to get the correct character number.
n.setCharno(position2charno(exprNode.getAbsolutePosition() +
exprNode.getOperatorPosition()));
return n;
}
@Override
Node processKeywordLiteral(KeywordLiteral literalNode) {
return newNode(transformTokenType(literalNode.getType()));
}
@Override
Node processLabel(Label labelNode) {
return newStringNode(Token.LABEL_NAME, labelNode.getName());
}
@Override
Node processLabeledStatement(LabeledStatement statementNode) {
Node node = newNode(Token.LABEL);
Node prev = null;
Node cur = node;
for (Label label : statementNode.getLabels()) {
if (prev != null) {
prev.addChildToBack(cur);
}
cur.addChildToBack(transform(label));
cur.setLineno(label.getLineno());
int clauseAbsolutePosition =
position2charno(label.getAbsolutePosition());
cur.setCharno(clauseAbsolutePosition);
prev = cur;
cur = newNode(Token.LABEL);
}
prev.addChildToBack(transform(statementNode.getStatement()));
return node;
}
@Override
Node processName(Name nameNode) {
return newStringNode(Token.NAME, nameNode.getIdentifier());
}
@Override
Node processNewExpression(NewExpression exprNode) {
return processFunctionCall(exprNode);
}
@Override
Node processNumberLiteral(NumberLiteral literalNode) {
return newNumberNode(literalNode.getNumber());
}
@Override
Node processObjectLiteral(ObjectLiteral literalNode) {
if (literalNode.isDestructuring()) {
reportDestructuringAssign(literalNode);
}
Node node = newNode(Token.OBJECTLIT);
for (ObjectProperty el : literalNode.getElements()) {
if (!config.acceptES5) {
if (el.isGetter()) {
reportGetter(el);
continue;
} else if (el.isSetter()) {
reportSetter(el);
continue;
}
}
Node key = transformAsString(el.getLeft());
if (el.isGetter()) {
key.setType(Token.GET);
} else if (el.isSetter()) {
key.setType(Token.SET);
}
key.addChildToFront(transform(el.getRight()));
node.addChildToBack(key);
}
return node;
}
@Override
Node processObjectProperty(ObjectProperty propertyNode) {
return processInfixExpression(propertyNode);
}
@Override
Node processParenthesizedExpression(ParenthesizedExpression exprNode) {
Node node = transform(exprNode.getExpression());
node.putProp(Node.PARENTHESIZED_PROP, Boolean.TRUE);
return node;
}
@Override
Node processPropertyGet(PropertyGet getNode) {
return newNode(
Token.GETPROP,
transform(getNode.getTarget()),
transformAsString(getNode.getProperty()));
}
@Override
Node processRegExpLiteral(RegExpLiteral literalNode) {
Node literalStringNode = newStringNode(literalNode.getValue());
// assume it's on the same line.
literalStringNode.setLineno(literalNode.getLineno());
Node node = newNode(Token.REGEXP, literalStringNode);
String flags = literalNode.getFlags();
if (flags != null && !flags.isEmpty()) {
Node flagsNode = newStringNode(flags);
// Assume the flags are on the same line as the literal node.
flagsNode.setLineno(literalNode.getLineno());
node.addChildToBack(flagsNode);
}
return node;
}
@Override
Node processReturnStatement(ReturnStatement statementNode) {
Node node = newNode(Token.RETURN);
if (statementNode.getReturnValue() != null) {
node.addChildToBack(transform(statementNode.getReturnValue()));
}
return node;
}
@Override
Node processScope(Scope scopeNode) {
return processGeneric(scopeNode);
}
@Override
Node processStringLiteral(StringLiteral literalNode) {
Node n = newStringNode(literalNode.getValue());
return n;
}
@Override
Node processSwitchCase(SwitchCase caseNode) {
Node node;
if (caseNode.isDefault()) {
node = newNode(Token.DEFAULT);
} else {
AstNode expr = caseNode.getExpression();
node = newNode(Token.CASE, transform(expr));
}
Node block = newNode(Token.BLOCK);
block.putBooleanProp(Node.SYNTHETIC_BLOCK_PROP, true);
block.setLineno(caseNode.getLineno());
block.setCharno(position2charno(caseNode.getAbsolutePosition()));
if (caseNode.getStatements() != null) {
for (AstNode child : caseNode.getStatements()) {
block.addChildToBack(transform(child));
}
}
node.addChildToBack(block);
return node;
}
@Override
Node processSwitchStatement(SwitchStatement statementNode) {
Node node = newNode(Token.SWITCH,
transform(statementNode.getExpression()));
for (AstNode child : statementNode.getCases()) {
node.addChildToBack(transform(child));
}
return node;
}
@Override
Node processThrowStatement(ThrowStatement statementNode) {
return newNode(Token.THROW,
transform(statementNode.getExpression()));
}
@Override
Node processTryStatement(TryStatement statementNode) {
Node node = newNode(Token.TRY,
transformBlock(statementNode.getTryBlock()));
Node block = newNode(Token.BLOCK);
boolean lineSet = false;
for (CatchClause cc : statementNode.getCatchClauses()) {
// Mark the enclosing block at the same line as the first catch
// clause.
if (lineSet == false) {
block.setLineno(cc.getLineno());
lineSet = true;
}
block.addChildToBack(transform(cc));
}
node.addChildToBack(block);
AstNode finallyBlock = statementNode.getFinallyBlock();
if (finallyBlock != null) {
node.addChildToBack(transformBlock(finallyBlock));
}
// If we didn't set the line on the catch clause, then
// we've got an empty catch clause. Set its line to be the same
// as the finally block (to match Old Rhino's behavior.)
if ((lineSet == false) && (finallyBlock != null)) {
block.setLineno(finallyBlock.getLineno());
}
return node;
}
@Override
Node processUnaryExpression(UnaryExpression exprNode) {
int type = transformTokenType(exprNode.getType());
Node operand = transform(exprNode.getOperand());
if (type == Token.NEG && operand.getType() == Token.NUMBER) {
operand.setDouble(-operand.getDouble());
return operand;
} else {
Node node = newNode(type, operand);
if (exprNode.isPostfix()) {
node.putBooleanProp(Node.INCRDECR_PROP, true);
}
return node;
}
}
@Override
Node processVariableDeclaration(VariableDeclaration declarationNode) {
Node node = newNode(Token.VAR);
for (VariableInitializer child : declarationNode.getVariables()) {
node.addChildToBack(transform(child));
}
return node;
}
@Override
Node processVariableInitializer(VariableInitializer initializerNode) {
Node node = transform(initializerNode.getTarget());
if (initializerNode.getInitializer() != null) {
node.addChildToBack(transform(initializerNode.getInitializer()));
node.setLineno(node.getLineno());
}
return node;
}
@Override
Node processWhileLoop(WhileLoop loopNode) {
return newNode(
Token.WHILE,
transform(loopNode.getCondition()),
transformBlock(loopNode.getBody()));
}
@Override
Node processWithStatement(WithStatement statementNode) {
return newNode(
Token.WITH,
transform(statementNode.getExpression()),
transformBlock(statementNode.getStatement()));
}
@Override
Node processIllegalToken(AstNode node) {
errorReporter.error(
"Unsupported syntax: " +
com.google.javascript.jscomp.mozilla.rhino.Token.typeToName(
node.getType()),
sourceName,
node.getLineno(), "", 0);
return newNode(Token.EMPTY);
}
void reportDestructuringAssign(AstNode node) {
errorReporter.error(
"destructuring assignment forbidden",
sourceName,
node.getLineno(), "", 0);
}
void reportGetter(AstNode node) {
errorReporter.error(
"getters are not supported in Internet Explorer",
sourceName,
node.getLineno(), "", 0);
}
void reportSetter(AstNode node) {
errorReporter.error(
"setters are not supported in Internet Explorer",
sourceName,
node.getLineno(), "", 0);
}
}
private static int transformTokenType(int token) {
switch (token) {
case com.google.javascript.jscomp.mozilla.rhino.Token.ERROR:
return Token.ERROR;
case com.google.javascript.jscomp.mozilla.rhino.Token.EOF:
return Token.EOF;
case com.google.javascript.jscomp.mozilla.rhino.Token.EOL:
return Token.EOL;
case com.google.javascript.jscomp.mozilla.rhino.Token.ENTERWITH:
return Token.ENTERWITH;
case com.google.javascript.jscomp.mozilla.rhino.Token.LEAVEWITH:
return Token.LEAVEWITH;
case com.google.javascript.jscomp.mozilla.rhino.Token.RETURN:
return Token.RETURN;
case com.google.javascript.jscomp.mozilla.rhino.Token.GOTO:
return Token.GOTO;
case com.google.javascript.jscomp.mozilla.rhino.Token.IFEQ:
return Token.IFEQ;
case com.google.javascript.jscomp.mozilla.rhino.Token.IFNE:
return Token.IFNE;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETNAME:
return Token.SETNAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.BITOR:
return Token.BITOR;
case com.google.javascript.jscomp.mozilla.rhino.Token.BITXOR:
return Token.BITXOR;
case com.google.javascript.jscomp.mozilla.rhino.Token.BITAND:
return Token.BITAND;
case com.google.javascript.jscomp.mozilla.rhino.Token.EQ:
return Token.EQ;
case com.google.javascript.jscomp.mozilla.rhino.Token.NE:
return Token.NE;
case com.google.javascript.jscomp.mozilla.rhino.Token.LT:
return Token.LT;
case com.google.javascript.jscomp.mozilla.rhino.Token.LE:
return Token.LE;
case com.google.javascript.jscomp.mozilla.rhino.Token.GT:
return Token.GT;
case com.google.javascript.jscomp.mozilla.rhino.Token.GE:
return Token.GE;
case com.google.javascript.jscomp.mozilla.rhino.Token.LSH:
return Token.LSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.RSH:
return Token.RSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.URSH:
return Token.URSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.ADD:
return Token.ADD;
case com.google.javascript.jscomp.mozilla.rhino.Token.SUB:
return Token.SUB;
case com.google.javascript.jscomp.mozilla.rhino.Token.MUL:
return Token.MUL;
case com.google.javascript.jscomp.mozilla.rhino.Token.DIV:
return Token.DIV;
case com.google.javascript.jscomp.mozilla.rhino.Token.MOD:
return Token.MOD;
case com.google.javascript.jscomp.mozilla.rhino.Token.NOT:
return Token.NOT;
case com.google.javascript.jscomp.mozilla.rhino.Token.BITNOT:
return Token.BITNOT;
case com.google.javascript.jscomp.mozilla.rhino.Token.POS:
return Token.POS;
case com.google.javascript.jscomp.mozilla.rhino.Token.NEG:
return Token.NEG;
case com.google.javascript.jscomp.mozilla.rhino.Token.NEW:
return Token.NEW;
case com.google.javascript.jscomp.mozilla.rhino.Token.DELPROP:
return Token.DELPROP;
case com.google.javascript.jscomp.mozilla.rhino.Token.TYPEOF:
return Token.TYPEOF;
case com.google.javascript.jscomp.mozilla.rhino.Token.GETPROP:
return Token.GETPROP;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETPROP:
return Token.SETPROP;
case com.google.javascript.jscomp.mozilla.rhino.Token.GETELEM:
return Token.GETELEM;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETELEM:
return Token.SETELEM;
case com.google.javascript.jscomp.mozilla.rhino.Token.CALL:
return Token.CALL;
case com.google.javascript.jscomp.mozilla.rhino.Token.NAME:
return Token.NAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.NUMBER:
return Token.NUMBER;
case com.google.javascript.jscomp.mozilla.rhino.Token.STRING:
return Token.STRING;
case com.google.javascript.jscomp.mozilla.rhino.Token.NULL:
return Token.NULL;
case com.google.javascript.jscomp.mozilla.rhino.Token.THIS:
return Token.THIS;
case com.google.javascript.jscomp.mozilla.rhino.Token.FALSE:
return Token.FALSE;
case com.google.javascript.jscomp.mozilla.rhino.Token.TRUE:
return Token.TRUE;
case com.google.javascript.jscomp.mozilla.rhino.Token.SHEQ:
return Token.SHEQ;
case com.google.javascript.jscomp.mozilla.rhino.Token.SHNE:
return Token.SHNE;
case com.google.javascript.jscomp.mozilla.rhino.Token.REGEXP:
return Token.REGEXP;
case com.google.javascript.jscomp.mozilla.rhino.Token.BINDNAME:
return Token.BINDNAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.THROW:
return Token.THROW;
case com.google.javascript.jscomp.mozilla.rhino.Token.RETHROW:
return Token.RETHROW;
case com.google.javascript.jscomp.mozilla.rhino.Token.IN:
return Token.IN;
case com.google.javascript.jscomp.mozilla.rhino.Token.INSTANCEOF:
return Token.INSTANCEOF;
case com.google.javascript.jscomp.mozilla.rhino.Token.LOCAL_LOAD:
return Token.LOCAL_LOAD;
case com.google.javascript.jscomp.mozilla.rhino.Token.GETVAR:
return Token.GETVAR;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETVAR:
return Token.SETVAR;
case com.google.javascript.jscomp.mozilla.rhino.Token.CATCH_SCOPE:
return Token.CATCH_SCOPE;
case com.google.javascript.jscomp.mozilla.rhino.Token.ENUM_INIT_KEYS:
return Token.ENUM_INIT_KEYS;
case com.google.javascript.jscomp.mozilla.rhino.Token.ENUM_INIT_VALUES:
return Token.ENUM_INIT_VALUES;
case com.google.javascript.jscomp.mozilla.rhino.Token.ENUM_NEXT:
return Token.ENUM_NEXT;
case com.google.javascript.jscomp.mozilla.rhino.Token.ENUM_ID:
return Token.ENUM_ID;
case com.google.javascript.jscomp.mozilla.rhino.Token.THISFN:
return Token.THISFN;
case com.google.javascript.jscomp.mozilla.rhino.Token.RETURN_RESULT:
return Token.RETURN_RESULT;
case com.google.javascript.jscomp.mozilla.rhino.Token.ARRAYLIT:
return Token.ARRAYLIT;
case com.google.javascript.jscomp.mozilla.rhino.Token.OBJECTLIT:
return Token.OBJECTLIT;
case com.google.javascript.jscomp.mozilla.rhino.Token.GET_REF:
return Token.GET_REF;
case com.google.javascript.jscomp.mozilla.rhino.Token.SET_REF:
return Token.SET_REF;
case com.google.javascript.jscomp.mozilla.rhino.Token.DEL_REF:
return Token.DEL_REF;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_CALL:
return Token.REF_CALL;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_SPECIAL:
return Token.REF_SPECIAL;
case com.google.javascript.jscomp.mozilla.rhino.Token.DEFAULTNAMESPACE:
return Token.DEFAULTNAMESPACE;
case com.google.javascript.jscomp.mozilla.rhino.Token.ESCXMLTEXT:
return Token.ESCXMLTEXT;
case com.google.javascript.jscomp.mozilla.rhino.Token.ESCXMLATTR:
return Token.ESCXMLATTR;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_MEMBER:
return Token.REF_MEMBER;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_NS_MEMBER:
return Token.REF_NS_MEMBER;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_NAME:
return Token.REF_NAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.REF_NS_NAME:
return Token.REF_NS_NAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.TRY:
return Token.TRY;
case com.google.javascript.jscomp.mozilla.rhino.Token.SEMI:
return Token.SEMI;
case com.google.javascript.jscomp.mozilla.rhino.Token.LB:
return Token.LB;
case com.google.javascript.jscomp.mozilla.rhino.Token.RB:
return Token.RB;
case com.google.javascript.jscomp.mozilla.rhino.Token.LC:
return Token.LC;
case com.google.javascript.jscomp.mozilla.rhino.Token.RC:
return Token.RC;
case com.google.javascript.jscomp.mozilla.rhino.Token.LP:
return Token.LP;
case com.google.javascript.jscomp.mozilla.rhino.Token.RP:
return Token.RP;
case com.google.javascript.jscomp.mozilla.rhino.Token.COMMA:
return Token.COMMA;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN:
return Token.ASSIGN;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_BITOR:
return Token.ASSIGN_BITOR;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_BITXOR:
return Token.ASSIGN_BITXOR;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_BITAND:
return Token.ASSIGN_BITAND;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_LSH:
return Token.ASSIGN_LSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_RSH:
return Token.ASSIGN_RSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_URSH:
return Token.ASSIGN_URSH;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_ADD:
return Token.ASSIGN_ADD;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_SUB:
return Token.ASSIGN_SUB;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_MUL:
return Token.ASSIGN_MUL;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_DIV:
return Token.ASSIGN_DIV;
case com.google.javascript.jscomp.mozilla.rhino.Token.ASSIGN_MOD:
return Token.ASSIGN_MOD;
case com.google.javascript.jscomp.mozilla.rhino.Token.HOOK:
return Token.HOOK;
case com.google.javascript.jscomp.mozilla.rhino.Token.COLON:
return Token.COLON;
case com.google.javascript.jscomp.mozilla.rhino.Token.OR:
return Token.OR;
case com.google.javascript.jscomp.mozilla.rhino.Token.AND:
return Token.AND;
case com.google.javascript.jscomp.mozilla.rhino.Token.INC:
return Token.INC;
case com.google.javascript.jscomp.mozilla.rhino.Token.DEC:
return Token.DEC;
case com.google.javascript.jscomp.mozilla.rhino.Token.DOT:
return Token.DOT;
case com.google.javascript.jscomp.mozilla.rhino.Token.FUNCTION:
return Token.FUNCTION;
case com.google.javascript.jscomp.mozilla.rhino.Token.EXPORT:
return Token.EXPORT;
case com.google.javascript.jscomp.mozilla.rhino.Token.IMPORT:
return Token.IMPORT;
case com.google.javascript.jscomp.mozilla.rhino.Token.IF:
return Token.IF;
case com.google.javascript.jscomp.mozilla.rhino.Token.ELSE:
return Token.ELSE;
case com.google.javascript.jscomp.mozilla.rhino.Token.SWITCH:
return Token.SWITCH;
case com.google.javascript.jscomp.mozilla.rhino.Token.CASE:
return Token.CASE;
case com.google.javascript.jscomp.mozilla.rhino.Token.DEFAULT:
return Token.DEFAULT;
case com.google.javascript.jscomp.mozilla.rhino.Token.WHILE:
return Token.WHILE;
case com.google.javascript.jscomp.mozilla.rhino.Token.DO:
return Token.DO;
case com.google.javascript.jscomp.mozilla.rhino.Token.FOR:
return Token.FOR;
case com.google.javascript.jscomp.mozilla.rhino.Token.BREAK:
return Token.BREAK;
case com.google.javascript.jscomp.mozilla.rhino.Token.CONTINUE:
return Token.CONTINUE;
case com.google.javascript.jscomp.mozilla.rhino.Token.VAR:
return Token.VAR;
case com.google.javascript.jscomp.mozilla.rhino.Token.WITH:
return Token.WITH;
case com.google.javascript.jscomp.mozilla.rhino.Token.CATCH:
return Token.CATCH;
case com.google.javascript.jscomp.mozilla.rhino.Token.FINALLY:
return Token.FINALLY;
case com.google.javascript.jscomp.mozilla.rhino.Token.VOID:
return Token.VOID;
case com.google.javascript.jscomp.mozilla.rhino.Token.RESERVED:
return Token.RESERVED;
case com.google.javascript.jscomp.mozilla.rhino.Token.EMPTY:
return Token.EMPTY;
case com.google.javascript.jscomp.mozilla.rhino.Token.BLOCK:
return Token.BLOCK;
case com.google.javascript.jscomp.mozilla.rhino.Token.LABEL:
return Token.LABEL;
case com.google.javascript.jscomp.mozilla.rhino.Token.TARGET:
return Token.TARGET;
case com.google.javascript.jscomp.mozilla.rhino.Token.LOOP:
return Token.LOOP;
case com.google.javascript.jscomp.mozilla.rhino.Token.EXPR_VOID:
case com.google.javascript.jscomp.mozilla.rhino.Token.EXPR_RESULT:
return Token.EXPR_RESULT;
case com.google.javascript.jscomp.mozilla.rhino.Token.JSR:
return Token.JSR;
case com.google.javascript.jscomp.mozilla.rhino.Token.SCRIPT:
return Token.SCRIPT;
case com.google.javascript.jscomp.mozilla.rhino.Token.TYPEOFNAME:
return Token.TYPEOFNAME;
case com.google.javascript.jscomp.mozilla.rhino.Token.USE_STACK:
return Token.USE_STACK;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETPROP_OP:
return Token.SETPROP_OP;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETELEM_OP:
return Token.SETELEM_OP;
case com.google.javascript.jscomp.mozilla.rhino.Token.LOCAL_BLOCK:
return Token.LOCAL_BLOCK;
case com.google.javascript.jscomp.mozilla.rhino.Token.SET_REF_OP:
return Token.SET_REF_OP;
case com.google.javascript.jscomp.mozilla.rhino.Token.DOTDOT:
return Token.DOTDOT;
case com.google.javascript.jscomp.mozilla.rhino.Token.COLONCOLON:
return Token.COLONCOLON;
case com.google.javascript.jscomp.mozilla.rhino.Token.XML:
return Token.XML;
case com.google.javascript.jscomp.mozilla.rhino.Token.DOTQUERY:
return Token.DOTQUERY;
case com.google.javascript.jscomp.mozilla.rhino.Token.XMLATTR:
return Token.XMLATTR;
case com.google.javascript.jscomp.mozilla.rhino.Token.XMLEND:
return Token.XMLEND;
case com.google.javascript.jscomp.mozilla.rhino.Token.TO_OBJECT:
return Token.TO_OBJECT;
case com.google.javascript.jscomp.mozilla.rhino.Token.TO_DOUBLE:
return Token.TO_DOUBLE;
case com.google.javascript.jscomp.mozilla.rhino.Token.GET:
return Token.GET;
case com.google.javascript.jscomp.mozilla.rhino.Token.SET:
return Token.SET;
case com.google.javascript.jscomp.mozilla.rhino.Token.CONST:
return Token.CONST;
case com.google.javascript.jscomp.mozilla.rhino.Token.SETCONST:
return Token.SETCONST;
case com.google.javascript.jscomp.mozilla.rhino.Token.DEBUGGER:
return Token.DEBUGGER;
}
// Token without name
throw new IllegalStateException(String.valueOf(token));
}
// Simple helper to create nodes and set the initial node properties.
private Node newNode(int type) {
return new Node(type).clonePropsFrom(templateNode);
}
private Node newNode(int type, Node child1) {
return new Node(type, child1).clonePropsFrom(templateNode);
}
private Node newNode(int type, Node child1, Node child2) {
return new Node(type, child1, child2).clonePropsFrom(templateNode);
}
private Node newNode(int type, Node child1, Node child2, Node child3) {
return new Node(type, child1, child2, child3).clonePropsFrom(templateNode);
}
private Node newStringNode(String value) {
return Node.newString(value).clonePropsFrom(templateNode);
}
private Node newStringNode(int type, String value) {
return Node.newString(type, value).clonePropsFrom(templateNode);
}
private Node newNumberNode(Double value) {
return Node.newNumber(value).clonePropsFrom(templateNode);
}
}
| |
package eu.ehealth.db.wservices.measurements;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import javax.xml.datatype.DatatypeFactory;
import org.hibernate.Session;
import eu.ehealth.Globals;
import eu.ehealth.SystemDictionary;
import eu.ehealth.db.DbStorageComponent;
import eu.ehealth.db.xsd.Measurement;
import eu.ehealth.db.xsd.SystemParameter;
/**
*
* @author a572832
*
* @param <R>
* @param <P>
*/
public abstract class BaseMeasurementsOperations<R, P> extends DbStorageComponent<R, P>
{
/**
*
* @param session
*/
public BaseMeasurementsOperations(Session session)
{
super(session);
}
/**
* Store measurement
*
* @param xMeasurement measurement
* @param patientAssessmentId id of the patientAssessment
* @return id of the stored data
*/
public Integer importMeasurement(Measurement xMeasurement, Integer patientAssessmentId)
{
long timeInMillis = 0;
eu.ehealth.db.db.Measurement dMeasurement = new eu.ehealth.db.db.Measurement();
if (patientAssessmentId != null)
{
dMeasurement.setPatientassessment(patientAssessmentId);
}
String type = xMeasurement.getType().getCode();
if (type.equals(Globals.ClientAppValue_DiastolicBloodPressure) || type.equals(Globals.ClientAppValue_SystolicBloodPressure))
{
type = Globals.ServerAppValue_BloodPressure;
}
dMeasurement.setType(type);
dMeasurement.setValue(new BigDecimal(xMeasurement.getValue()));
if (xMeasurement.getDateTime() != null)
{
timeInMillis = xMeasurement.getDateTime().toGregorianCalendar().getTimeInMillis();
}
dMeasurement.setDatetime(new Timestamp(timeInMillis));
dMeasurement.setUnits(xMeasurement.getUnits());
dMeasurement.setLowerlimit(new BigDecimal(xMeasurement.getLowerLimit()));
dMeasurement.setUpperlimit(new BigDecimal(xMeasurement.getUpperLimit()));
if (xMeasurement.getTaskID() != null)
{
dMeasurement.setTask(new Integer(xMeasurement.getTaskID()));
}
_session.save(dMeasurement);
return dMeasurement.getId();
}
/**
* Export measurement
*
* @param dMeasurement measurement for export
* @return XSD conform
*/
protected Measurement exportMeasurement(eu.ehealth.db.db.Measurement dMeasurement)
{
Measurement xMeasurement = new Measurement();
SystemParameter rmeasurementType = new SystemParameter();
rmeasurementType.setCode(dMeasurement.getType());
xMeasurement.setType(rmeasurementType);
xMeasurement.setValue(dMeasurement.getValue().doubleValue());
Timestamp datetime = dMeasurement.getDatetime();
GregorianCalendar c = new GregorianCalendar();
c.setTimeInMillis(datetime.getTime());
try
{
xMeasurement.setDateTime(DatatypeFactory.newInstance().newXMLGregorianCalendar(c));
}
catch (Exception ex) {}
xMeasurement.setUnits(dMeasurement.getUnits());
xMeasurement.setLowerLimit(dMeasurement.getLowerlimit().doubleValue());
xMeasurement.setUpperLimit(dMeasurement.getUpperlimit().doubleValue());
return xMeasurement;
}
/**
*
* @param patientId
* @param _fromDate
* @param _toDate
* @param measurementType
* @return
*/
protected List<Measurement> getPatientMeasurement(Integer patientId, Calendar _fromDate, Calendar _toDate, String measurementType)
{
ArrayList<Measurement> export = new ArrayList<Measurement>();
for (eu.ehealth.db.db.Measurement m : getPatientMeasurementX(patientId, _fromDate, _toDate, measurementType))
{
export.add(exportMeasurement(m));
}
return export;
}
/**
*
* @param patientId
* @param _fromDate
* @param _toDate
* @param measurementType
* @return
*/
@SuppressWarnings("deprecation")
private List<eu.ehealth.db.db.Measurement> getPatientMeasurementX(Integer patientId, Calendar _fromDate, Calendar _toDate, String measurementType)
{
String fromDate = _fromDate.toString();
String fromDateSQLFormat = _fromDate.get(Calendar.YEAR) + "-" +
(_fromDate.get(Calendar.MONTH) + 1) + "-" +
_fromDate.get(Calendar.DAY_OF_MONTH) + " " +
"00:00:00";
String toDate = _toDate.toString();
String toDateSQLFormat = _toDate.get(Calendar.YEAR) + "-" +
(_toDate.get(Calendar.MONTH) + 1) + "-" +
_toDate.get(Calendar.DAY_OF_MONTH) + " " +
"23:59:59";
if (fromDate.compareTo(toDate) == 0)
{
Date time = _fromDate.getTime();
time.setHours(time.getHours() + 23);
time.setMinutes(time.getMinutes() + 59);
time.setSeconds(time.getSeconds() + 59);
toDate = time.toString();
}
else
{
Date time1 = _toDate.getTime();
time1.setHours(23);
time1.setMinutes(59);
time1.setSeconds(59);
toDate = time1.toString();
Date time2 = _fromDate.getTime();
time2.setHours(0);
time2.setMinutes(0);
time2.setSeconds(0);
fromDate = time2.toString();
}
String sql = "";
if (SystemDictionary.DATABASE == SystemDictionary.DataBase.MySQL)
{
// compare dates : example ... STR_TO_DATE('2013-12-31 00:00:01', '%Y-%m-%d %H:%i:%s')
sql = "SELECT m.id FROM measurement as m inner join task as t on (t.id = m.task) inner join aladdinuser as u on (u.id = t.object) WHERE u.personid = '"
+ patientId.toString()
+ "' AND m.datetime BETWEEN STR_TO_DATE('"
+ fromDateSQLFormat
+ "', '%Y-%m-%d %H:%i:%s') AND STR_TO_DATE('"
+ toDateSQLFormat
+ "', '%Y-%m-%d %H:%i:%s') AND m.type = '"
+ measurementType.toString() + "'";
}
else
{
sql = "SELECT m.id FROM measurement as m inner join task as t on (t.id = m.task) inner join aladdinuser as u on (u.id = t.object) WHERE u.personid = '"
+ patientId.toString()
+ "' AND m.datetime BETWEEN '"
+ fromDate
+ "' AND '"
+ toDate
+ "' AND m.type = '"
+ measurementType.toString() + "'";
}
SystemDictionary.webguiLog("DEBUG", sql);
Object[] ml = _session.createSQLQuery(sql).list().toArray();
ArrayList<eu.ehealth.db.db.Measurement> export = new ArrayList<eu.ehealth.db.db.Measurement>();
for (int i = 0; i < ml.length; i++)
{
Integer id = (Integer) ml[i];
eu.ehealth.db.db.Measurement m = (eu.ehealth.db.db.Measurement) _session.load(eu.ehealth.db.db.Measurement.class, id);
export.add(m);
}
return export;
}
}
| |
/*******************************************************************************
* Copyright 2015 Maximilian Stark | Dakror <mail@dakror.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.dakror.vloxlands.game;
import java.util.Random;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Buttons;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.graphics.g3d.Environment;
import com.badlogic.gdx.graphics.g3d.Material;
import com.badlogic.gdx.graphics.g3d.ModelBatch;
import com.badlogic.gdx.graphics.g3d.ModelInstance;
import com.badlogic.gdx.graphics.g3d.attributes.BlendingAttribute;
import com.badlogic.gdx.graphics.g3d.attributes.ColorAttribute;
import com.badlogic.gdx.graphics.g3d.environment.DirectionalLight;
import com.badlogic.gdx.graphics.g3d.environment.DirectionalShadowLight;
import com.badlogic.gdx.graphics.g3d.utils.CameraInputController;
import com.badlogic.gdx.graphics.g3d.utils.DepthShaderProvider;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.math.Interpolation;
import com.badlogic.gdx.math.Intersector;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.collision.BoundingBox;
import com.badlogic.gdx.math.collision.Ray;
import com.badlogic.gdx.utils.Array;
import de.dakror.vloxlands.Config;
import de.dakror.vloxlands.Vloxlands;
import de.dakror.vloxlands.ai.path.BFS;
import de.dakror.vloxlands.ai.path.node.BFSNode;
import de.dakror.vloxlands.game.entity.Entity;
import de.dakror.vloxlands.game.entity.creature.Creature;
import de.dakror.vloxlands.game.entity.creature.Human;
import de.dakror.vloxlands.game.entity.statics.StaticEntity;
import de.dakror.vloxlands.game.entity.structure.Structure;
import de.dakror.vloxlands.game.entity.structure.Towncenter;
import de.dakror.vloxlands.game.item.Item;
import de.dakror.vloxlands.game.item.ItemStack;
import de.dakror.vloxlands.game.query.VoxelPos;
import de.dakror.vloxlands.game.voxel.Voxel;
import de.dakror.vloxlands.game.world.Chunk;
import de.dakror.vloxlands.game.world.Island;
import de.dakror.vloxlands.game.world.World;
import de.dakror.vloxlands.layer.Layer;
import de.dakror.vloxlands.render.DDirectionalShadowLight;
import de.dakror.vloxlands.render.MeshingThread;
import de.dakror.vloxlands.util.Direction;
import de.dakror.vloxlands.util.event.SelectionListener;
import de.dakror.vloxlands.util.event.VoxelSelection;
import de.dakror.vloxlands.util.math.CustomizableFrustum;
/**
* @author Dakror
*/
@SuppressWarnings("deprecation")
public class Game extends Layer {
public static long seed = (long) (Math.random() * Long.MAX_VALUE);
public static final float velocity = 10;
public static final float rotateSpeed = 0.2f;
public static float pickRayMaxDistance = 150f;
public static final int dayInTicks = 72020; // 1 ingame day = 72020 ticks = 1200s = 20min
public static Game instance;
public static World world;
public static Camera camera;
public static float time = 0.99999999999f;
public Environment env;
public Array<SelectionListener> listeners = new Array<SelectionListener>();
public Environment minimapEnv;
public Camera minimapCamera;
public ModelBatch minimapBatch;
public StaticEntity cursorEntity;
boolean cursorEntityPlacable;
boolean cursorEntityContinousPlacing;
Array<Material> defaultCursorEntityMaterials;
public String activeAction = "";
public Island activeIsland;
public DirectionalShadowLight shadowLight;
DirectionalLight directionalLight;
public CameraInputController controller;
ModelBatch modelBatch;
ModelBatch shadowBatch;
boolean middleDown;
boolean doneLoading;
ModelInstance sky;
int tick;
int ticksForTravel;
int startTick;
public boolean regionSelectionMode = false;
boolean regionSelectionLMB;
public Vector3 hoveredVoxel = new Vector3();
public Vector3 selectedVoxel = new Vector3();
public Vector3 selectionStartVoxel = new Vector3(-1, 0, 0);
Vector3 controllerTarget = new Vector3();
Vector3 cameraPos = new Vector3();
Vector3 target = new Vector3();
Vector3 targetDirection = new Vector3();
Vector3 targetUp = new Vector3();
Vector2 mouseDown = new Vector2();
// -- temp -- //
public final Vector3 tmp = new Vector3();
public final Vector3 tmp1 = new Vector3();
public final Vector3 tmp2 = new Vector3();
public final Vector3 tmp3 = new Vector3();
public final Vector3 tmp4 = new Vector3();
public final Vector3 tmp5 = new Vector3();
public final Vector3 tmp6 = new Vector3();
public final Vector3 tmp7 = new Vector3();
public final Vector3 tmp8 = new Vector3();
public final Matrix4 m4 = new Matrix4();
public final BoundingBox bb = new BoundingBox();
public final BoundingBox bb2 = new BoundingBox();
public final BoundingBox bb3 = new BoundingBox();
@Override
public void show() {
modal = true;
instance = this;
Gdx.app.log("GameLayer.show", "Seed: " + seed + "");
MathUtils.random = new Random(seed);
modelBatch = new ModelBatch(Gdx.files.internal("shader/shader.vs"), Gdx.files.internal("shader/shader.fs"));
minimapBatch = new ModelBatch(Gdx.files.internal("shader/shader.vs"), Gdx.files.internal("shader/shader.fs"));
camera = new PerspectiveCamera(Config.fov, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
camera.near = 0.1f;
camera.far = pickRayMaxDistance;
controller = new CameraInputController(camera) {
private final Vector3 tmpV1 = new Vector3();
private final Vector3 tmpV2 = new Vector3();
@Override
protected boolean process(float deltaX, float deltaY, int button) {
if (button == rotateButton && Gdx.input.isKeyPressed(Keys.CONTROL_LEFT)) return false;
if (button == rotateButton) {
tmpV1.set(camera.direction).crs(camera.up).y = 0f;
camera.rotateAround(target, tmpV1.nor(), deltaY * rotateAngle);
float dot = camera.direction.dot(Vector3.Y);
if (dot < -0.95f) camera.rotateAround(target, tmpV1.nor(), -deltaY * rotateAngle);
camera.rotateAround(target, Vector3.Y, deltaX * -rotateAngle);
} else if (button == translateButton) {
camera.translate(tmpV1.set(camera.direction).crs(camera.up).nor().scl(-deltaX * translateUnits));
camera.translate(tmpV2.set(camera.up).scl(-deltaY * translateUnits));
if (translateTarget) target.add(tmpV1).add(tmpV2);
} else if (button == forwardButton) {
camera.translate(tmpV1.set(camera.direction).scl(deltaY * translateUnits));
if (forwardTarget) target.add(tmpV1);
}
if (autoUpdate) camera.update();
return true;
}
@Override
public boolean zoom(float amount) {
if (!alwaysScroll && activateKey != 0 && !activatePressed) return false;
tmpV1.set(camera.direction).scl(amount);
tmpV2.set(camera.position).add(tmpV1);
if (tmpV2.dst(target) > 5) {
camera.translate(tmpV1);
if (scrollTarget) target.add(tmpV1);
if (autoUpdate) camera.update();
return true;
}
return false;
}
};
controller.translateUnits = 20;
controller.rotateLeftKey = -1;
controller.rotateRightKey = -1;
controller.forwardKey = -1;
controller.backwardKey = -1;
controller.translateButton = -1;
controller.rotateButton = Buttons.MIDDLE;
Vloxlands.instance.getMultiplexer().addProcessor(controller);
minimapCamera = new OrthographicCamera(Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
minimapCamera.near = 0.1f;
minimapCamera.far = pickRayMaxDistance;
minimapEnv = new Environment();
minimapEnv.set(new ColorAttribute(ColorAttribute.AmbientLight, 0.4f, 0.4f, 0.4f, 1.f));
minimapEnv.add(new DirectionalLight().set(1f, 1f, 1f, -0.5f, -0.5f, -0.5f));
minimapEnv.add(new DirectionalLight().set(0.5f, 0.5f, 0.5f, -0.5f, -0.5f, -0.5f));
shadowBatch = new ModelBatch(new DepthShaderProvider());
Vloxlands.shapeRenderer = new ShapeRenderer();
new MeshingThread();
env = new Environment();
env.set(new ColorAttribute(ColorAttribute.AmbientLight, 0.4f, 0.4f, 0.4f, 1.f), new ColorAttribute(ColorAttribute.Fog, 0.5f, 0.8f, 0.85f, 1.f));
env.add(directionalLight = new DirectionalLight().set(0.8f, 0.8f, 0.8f, -0.5f, -0.5f, -0.5f));
env.add((shadowLight = new DDirectionalShadowLight(Config.shadowQuality, 128, 128, camera.near, camera.far)).set(0.6f, 0.6f, 0.6f, 0, -0.5f, time));
env.shadowMap = shadowLight;
// int w = MathUtils.random(1, 5);
// int d = MathUtils.random(1, 5);
world = new World(1, 1);
// world = new World(w, d);
// Gdx.app.log("GameLayer.show", "World size: " + w + "x" + d);
}
public void doneLoading() {
for (Item item : Item.getAll())
item.onLoaded();
focusIsland(world.getIslands()[0], true);
Human human = new Human(Island.SIZE / 2 - 5, Island.SIZE / 4 * 3, Island.SIZE / 2);
activeIsland.addEntity(human, false, false);
human = new Human(Island.SIZE / 2 - 4, Island.SIZE / 4 * 3, Island.SIZE / 2);
activeIsland.addEntity(human, false, false);
Towncenter tc = new Towncenter(Island.SIZE / 2 - 2, Island.SIZE / 4 * 3, Island.SIZE / 2 - 2);
activeIsland.addEntity(tc, false, true);
tc.setBuilt(true);
tc.getInnerInventory().add(new ItemStack(Item.get("AXE"), 5));
tc.getInnerInventory().add(new ItemStack(Item.get("PICKAXE"), 5));
tc.getInnerInventory().add(new ItemStack(Item.get("SHOVEL"), 5));
tc.getInnerInventory().add(new ItemStack(Item.get("HAMMER"), 5));
tc.getInnerInventory().add(new ItemStack(Item.get("HOE"), 5));
tc.getInnerInventory().add(new ItemStack(Item.get("WOODEN_LOG"), 40));
tc.getInnerInventory().add(new ItemStack(Item.get("IRON_INGOT"), 5));
doneLoading = true;
}
public void focusIsland(Island island, boolean initial) {
Vector3 islandCenter = new Vector3(island.pos.x + Island.SIZE / 2, island.pos.y + Island.SIZE / 4 * 3, island.pos.z + Island.SIZE / 2);
activeIsland = island;
selectedVoxel.set(-1, 0, 0);
if (!initial) {
target.set(islandCenter).add(-Island.SIZE / 3, Island.SIZE / 3, -Island.SIZE / 3);
if (target.equals(camera.position)) {
camera.position.set(islandCenter).add(-Island.SIZE / 3, Island.SIZE / 3, -Island.SIZE / 3);
controller.target.set(islandCenter);
camera.lookAt(islandCenter);
controller.update();
camera.update();
return;
}
ticksForTravel = (int) camera.position.dst(target) * Config.getGameSpeed();
Vector3 pos = camera.position.cpy();
Vector3 dir = camera.direction.cpy();
Vector3 up = camera.up.cpy();
camera.position.set(islandCenter).add(-Island.SIZE / 3, Island.SIZE / 3, -Island.SIZE / 3);
controller.target.set(islandCenter);
camera.lookAt(islandCenter);
targetDirection.set(camera.direction);
targetUp.set(camera.up);
camera.position.set(pos);
camera.direction.set(dir);
camera.up.set(up);
startTick = tick;
} else {
camera.position.set(islandCenter).add(-Island.SIZE / 3, Island.SIZE / 3, -Island.SIZE / 3);
controller.target.set(islandCenter);
camera.lookAt(islandCenter);
controller.update();
camera.update();
}
}
@Override
public void render(float delta) {
if (!doneLoading) return;
controller.update();
((PerspectiveCamera) camera).fieldOfView = Config.fov;
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shadowLight.begin(controller.target, camera.direction);
shadowBatch.begin(shadowLight.getCamera());
world.render(shadowBatch, null);
shadowBatch.end();
shadowLight.end();
Gdx.gl.glClearColor(0.5f, 0.8f, 0.85f, 1);
modelBatch.begin(camera);
world.render(modelBatch, env);
if (!Config.paused) world.update(delta);
// modelBatch.render(sky, env);
if (cursorEntity != null) {
cursorEntity.update(delta);
cursorEntity.render(modelBatch, env, false);
}
modelBatch.end();
if (selectionStartVoxel.x > -1 && selectedVoxel.x > -1) {
Gdx.gl.glEnable(GL20.GL_DEPTH_TEST);
Gdx.gl.glEnable(GL20.GL_BLEND);
float minX = Math.min(selectionStartVoxel.x, selectedVoxel.x);
float maxX = Math.max(selectionStartVoxel.x, selectedVoxel.x);
float minY = Math.min(selectionStartVoxel.y, selectedVoxel.y);
float maxY = Math.max(selectionStartVoxel.y, selectedVoxel.y);
float minZ = Math.min(selectionStartVoxel.z, selectedVoxel.z);
float maxZ = Math.max(selectionStartVoxel.z, selectedVoxel.z);
Vloxlands.shapeRenderer.begin(ShapeType.Filled);
Vloxlands.shapeRenderer.setProjectionMatrix(camera.combined);
Vloxlands.shapeRenderer.identity();
Vloxlands.shapeRenderer.translate(activeIsland.pos.x + minX, activeIsland.pos.y + minY, activeIsland.pos.z + maxZ + 1.01f);
Vloxlands.shapeRenderer.setColor(0, 1, 0, 0.3f);
Vloxlands.shapeRenderer.box(-0.005f, -0.005f, -0.005f, (maxX - minX) + 1.01f, (maxY - minY) + 1.01f, (maxZ - minZ) + 1.01f);
Vloxlands.shapeRenderer.end();
}
if (Vloxlands.showPathDebug) {
Gdx.gl.glEnable(GL20.GL_DEPTH_TEST);
Gdx.gl.glEnable(GL20.GL_BLEND);
Vloxlands.shapeRenderer.begin(ShapeType.Filled);
Vloxlands.shapeRenderer.setProjectionMatrix(camera.combined);
for (BFSNode node : BFS.visited) {
Vloxlands.shapeRenderer.identity();
Vloxlands.shapeRenderer.translate(activeIsland.pos.x + node.x, activeIsland.pos.y + node.y, activeIsland.pos.z + node.z + 1.01f);
Vloxlands.shapeRenderer.setColor(1, 1, 1, 0.3f);
Vloxlands.shapeRenderer.box(-0.005f, -0.005f, -0.005f, 1.01f, 1.01f, 1.01f);
}
Vloxlands.shapeRenderer.end();
}
}
@Override
public void tick(int tick) {
this.tick = tick;
if (!Config.paused) {
time -= 0.00002777f;
if (time <= -0.99999999999f) time = 0.99999999999f;
float t = time * MathUtils.PI;
float x = MathUtils.sin(t) * 0.5f;
float z = MathUtils.cos(t);
float light = MathUtils.cos(t - MathUtils.PI / 2) * 0.5f + 0.3f;
shadowLight.set(light - 0.1f, light, light, x, -0.5f, z);
directionalLight.set(light, light, light, x, -0.5f, z);
world.tick(tick);
}
if (cursorEntity != null) cursorEntity.tick(tick);
if (activeIsland != null && startTick > 0) {
camera.position.interpolate(target, (tick - startTick) / (float) (ticksForTravel * Config.getGameSpeed()), Interpolation.linear);
camera.direction.interpolate(targetDirection, (tick - startTick) / (float) (ticksForTravel * Config.getGameSpeed()), Interpolation.linear);
camera.up.interpolate(new Vector3(0, 1, 0), (tick - startTick) / (float) (ticksForTravel * Config.getGameSpeed()), Interpolation.linear);
if (tick >= startTick + ticksForTravel || camera.position.dst(target) < 0.1f) {
Vector3 islandCenter = new Vector3(activeIsland.pos.x + Island.SIZE / 2, activeIsland.pos.y + Island.SIZE / 4 * 3, activeIsland.pos.z + Island.SIZE / 2);
controller.target.set(islandCenter);
camera.position.set(islandCenter).add(-Island.SIZE / 3, Island.SIZE / 3, -Island.SIZE / 3);
camera.lookAt(islandCenter);
startTick = 0;
}
controller.update();
camera.update();
}
}
@Override
public void resize(int width, int height) {
camera.viewportWidth = width;
camera.viewportHeight = height;
camera.update();
minimapCamera.viewportWidth = width;
minimapCamera.viewportHeight = height;
minimapCamera.update();
}
public void pickRay(boolean hover, boolean lmb, int x, int y) {
Ray ray = camera.getPickRay(x, y);
if (hover) {
Entity hovered = null;
float distance = 0;
for (Entity e : activeIsland.getEntities()) {
e.hovered = false;
if (!e.isVisible()) continue;
if (!e.inFrustum) continue;
e.getWorldBoundingBox(bb);
if (Intersector.intersectRayBounds(ray, bb, tmp)) {
float dst = ray.origin.dst(tmp);
if (hovered == null || dst < distance) {
hovered = e;
distance = dst;
}
}
}
if (hovered != null) hovered.hovered = true;
} else {
Entity selectedEntity = null;
Chunk selectedChunk = null;
Vector3 selVoxel = new Vector3();
float distance = 0;
for (Entity e : activeIsland.getEntities()) {
e.wasSelected = e.selected;
if (lmb) e.selected = false;
float dst = ray.origin.dst(e.posCache);
if (e.isVisible() && e.inFrustum && e.hovered && (distance == 0 || dst < distance) && dst < pickRayMaxDistance) {
distance = dst;
selectedEntity = e;
}
}
for (Chunk c : activeIsland.getChunks()) {
if (c == null) continue;
if (c.inFrustum && !c.isEmpty()) {
tmp1.set(activeIsland.pos.x + c.pos.x, activeIsland.pos.y + c.pos.y, activeIsland.pos.z + c.pos.z);
tmp2.set(tmp1.cpy().add(Chunk.SIZE, Chunk.SIZE, Chunk.SIZE));
bb.set(tmp1, tmp2);
if (Intersector.intersectRayBounds(ray, bb, null) && c.pickVoxel(ray, tmp5, tmp6)) {
float dst = ray.origin.dst(tmp5);
if ((distance == 0 || dst < distance) && dst <= pickRayMaxDistance) {
distance = dst;
selVoxel.set(tmp6);
selectedChunk = c;
}
}
}
}
if (selectedChunk != null) {
// -- determine selectedVoxelFace -- //
Direction dir = null;
float distanc = 0;
Vector3 is2 = new Vector3();
byte air = Voxel.get("AIR").getId();
for (Direction d : Direction.values()) {
tmp7.set(activeIsland.pos.x + selectedChunk.pos.x + selVoxel.x + d.dir.x, activeIsland.pos.y + selectedChunk.pos.y + selVoxel.y + d.dir.y, activeIsland.pos.z + selectedChunk.pos.z + selVoxel.z + d.dir.z);
tmp8.set(tmp7.cpy().add(1, 1, 1));
bb3.set(tmp7, tmp8);
if (activeIsland.get(selectedChunk.pos.x + selVoxel.x + d.dir.x, selectedChunk.pos.y + selVoxel.y + d.dir.y, selectedChunk.pos.z + selVoxel.z + d.dir.z) != air) continue;
if (Intersector.intersectRayBounds(ray, bb3, is2)) {
float dist = ray.origin.dst(is2);
if (dir == null || dist < distanc) {
distanc = dist;
dir = d;
}
}
}
selectedVoxel.set(selVoxel).add(selectedChunk.pos);
for (SelectionListener sl : listeners)
sl.onVoxelSelection(new VoxelSelection(activeIsland, new VoxelPos(selVoxel.cpy().add(selectedChunk.pos), selectedChunk.get((int) selVoxel.x, (int) selVoxel.y, (int) selVoxel.z)), dir), lmb);
} else if (selectedEntity != null) {
selVoxel.set(-1, 0, 0);
selectedEntity.selected = true;
if (selectedEntity instanceof Structure) {
for (SelectionListener sl : listeners)
sl.onStructureSelection((Structure) selectedEntity, lmb);
} else if (selectedEntity instanceof Creature) {
for (SelectionListener sl : listeners)
sl.onCreatureSelection((Creature) selectedEntity, lmb);
}
} else {
for (SelectionListener sl : listeners)
sl.onNoSelection(lmb);
}
}
}
public Chunk pickVoxelRay(Island island, Vector3 selVoxel, boolean lmb, int x, int y) {
Chunk selectedChunk = null;
Ray ray = camera.getPickRay(x, y);
float distance = 0;
for (Chunk c : island.getChunks()) {
if (c == null) continue;
if (c.inFrustum && !c.isEmpty()) {
tmp1.set(island.pos.x + c.pos.x, island.pos.y + c.pos.y, island.pos.z + c.pos.z);
tmp2.set(tmp1.cpy().add(Chunk.SIZE, Chunk.SIZE, Chunk.SIZE));
bb.set(tmp1, tmp2);
if (Intersector.intersectRayBounds(ray, bb, null) && c.pickVoxel(ray, tmp5, tmp6)) {
float dst = ray.origin.dst(tmp5);
if ((distance == 0 || dst < distance) && dst <= pickRayMaxDistance) {
distance = dst;
selVoxel.set(tmp6).add(c.pos);
selectedChunk = c;
}
}
}
}
return selectedChunk;
}
public void selectionBox(Rectangle rectangle) {
CustomizableFrustum frustum = new CustomizableFrustum(rectangle);
camera.update();
frustum.update(camera.invProjectionView);
Vector3 origin = camera.unproject(new Vector3(Gdx.graphics.getWidth() / 2, Gdx.graphics.getHeight() / 2, 0), 0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
boolean anyEntitySelected = false;
boolean dispatched = false;
for (Entity entity : activeIsland.getEntities()) {
if (entity instanceof StaticEntity) continue;
if (!entity.isVisible()) continue;
entity.wasSelected = entity.selected;
entity.selected = false;
entity.getWorldBoundingBox(bb);
float dst = origin.dst(entity.posCache);
if (entity.inFrustum && frustum.boundsInFrustum(bb) && dst < pickRayMaxDistance) {
entity.selected = true;
anyEntitySelected = true;
if (!dispatched && entity instanceof Creature) {
for (SelectionListener sl : listeners)
sl.onCreatureSelection((Creature) entity, true);
dispatched = true;
}
}
}
if (!anyEntitySelected) {
for (Island i : world.getIslands()) {
if (i == null) continue;
for (Entity e : i.getEntities()) {
if (!(e instanceof StaticEntity)) continue;
e.wasSelected = e.selected;
e.selected = false;
e.getWorldBoundingBox(bb);
float dst = origin.dst(e.posCache);
if (e.inFrustum && frustum.boundsInFrustum(bb) && dst < pickRayMaxDistance) e.selected = true;
}
}
}
}
@Override
public boolean touchDragged(int screenX, int screenY, int pointer) {
if (middleDown && Gdx.input.isKeyPressed(Keys.CONTROL_LEFT)) {
float f = 0.1f;
controller.target.y = controllerTarget.y + (screenY - mouseDown.y) * f;
camera.position.y = cameraPos.y + (screenY - mouseDown.y) * f;
camera.update();
controller.update();
}
return false;
}
@Override
public boolean mouseMoved(int screenX, int screenY) {
if (regionSelectionMode) pickVoxelRay(activeIsland, selectedVoxel, false, screenX, screenY);
else if (cursorEntity != null) {
pickVoxelRay(activeIsland, hoveredVoxel, false, screenX, screenY);
cursorEntity.getModelInstance().transform.setToTranslation(activeIsland.pos);
cursorEntity.getModelInstance().transform.translate(hoveredVoxel).translate(cursorEntity.getBoundingBox().getDimensions().x <= 1 ? cursorEntity.blockTrn.x : 0, cursorEntity.blockTrn.y, cursorEntity.getBoundingBox().getDimensions().z <= 1 ? cursorEntity.blockTrn.z : 0);
cursorEntity.setIsland(activeIsland);
cursorEntity.updateVoxelPos();
cursorEntityPlacable = cursorEntity.canBePlaced();
if (defaultCursorEntityMaterials == null) {
defaultCursorEntityMaterials = new Array<Material>();
for (Material m : cursorEntity.getModelInstance().materials)
defaultCursorEntityMaterials.add(m.copy());
}
for (int i = 0; i < cursorEntity.getModelInstance().materials.size; i++) {
Material m = cursorEntity.getModelInstance().materials.get(i);
if (!cursorEntityPlacable) {
m.set(new BlendingAttribute(0.8f), ColorAttribute.createDiffuse(Color.RED));
} else {
m.remove(ColorAttribute.Diffuse);
BlendingAttribute ba = (BlendingAttribute) defaultCursorEntityMaterials.get(i).get(BlendingAttribute.Type);
if (ba == null) m.remove(BlendingAttribute.Type);
else m.set(ba);
}
}
} else if (activeIsland != null) pickRay(true, false, screenX, screenY);
return false;
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button) {
mouseDown.set(screenX, screenY);
if (button == Buttons.MIDDLE) {
controllerTarget.set(controller.target);
cameraPos.set(camera.position);
middleDown = true;
Gdx.input.setCursorCatched(true);
}
return false;
}
@Override
public boolean tap(float x, float y, int count, int button) {
if (!doneLoading) return false;
if (button != Buttons.MIDDLE) {
if (!regionSelectionMode) {
if (cursorEntity != null) {
if (button == Buttons.LEFT) {
if (cursorEntityPlacable) {
for (int i = 0; i < defaultCursorEntityMaterials.size; i++) {
cursorEntity.getModelInstance().materials.set(i, defaultCursorEntityMaterials.get(i));
}
if (cursorEntity instanceof Structure) ((Structure) cursorEntity).setBuilt(false);
cursorEntity.getModelInstance().transform.translate(-activeIsland.pos.x, -activeIsland.pos.y, -activeIsland.pos.z);
activeIsland.addEntity(cursorEntity, true, false);
cursorEntity.updateVoxelPos();
if (!cursorEntityContinousPlacing) {
cursorEntity = null;
defaultCursorEntityMaterials = null;
cursorEntityPlacable = false;
} else {
cursorEntity = (StaticEntity) Entity.getForId(cursorEntity.getId(), cursorEntity.posCache.x, cursorEntity.posCache.y, cursorEntity.posCache.z);
cursorEntity.setIsland(activeIsland);
cursorEntity.getModelInstance().transform.translate(activeIsland.pos.x, activeIsland.pos.y, activeIsland.pos.z);
cursorEntity.updateVoxelPos();
if (cursorEntity instanceof Structure) ((Structure) cursorEntity).setBuilt(true);
cursorEntity.setVisible(true);
}
}
} else {
cursorEntity = null;
defaultCursorEntityMaterials = null;
cursorEntityPlacable = false;
}
} else {
selectionStartVoxel.set(-1, 0, 0);
pickRay(false, button == Buttons.LEFT, (int) x, (int) y);
}
} else {
if (selectionStartVoxel.x == -1) {
selectedVoxel.set(-1, 0, 0);
pickVoxelRay(activeIsland, selectionStartVoxel, regionSelectionLMB = button == Buttons.LEFT, (int) x, (int) y);
} else if (regionSelectionLMB == (button == Buttons.LEFT)) {
pickVoxelRay(activeIsland, selectedVoxel, button == Buttons.LEFT, (int) x, (int) y);
for (SelectionListener sl : listeners)
sl.onVoxelRangeSelection(activeIsland, selectionStartVoxel, selectedVoxel, regionSelectionLMB);
regionSelectionMode = false;
}
}
}
return false;
}
@Override
public boolean touchUp(int screenX, int screenY, int pointer, int button) {
if (button == Buttons.MIDDLE) {
middleDown = false;
Gdx.input.setCursorCatched(false);
}
return false;
}
public void addListener(SelectionListener value) {
listeners.insert(0, value);
}
public boolean removeListener(SelectionListener value) {
return listeners.removeValue(value, true);
}
/*
* Only call when @param:action != null
*/
public void action(String action) {
if (action.contains("|region")) {
selectionStartVoxel.set(-1, 0, 0);
selectedVoxel.set(-1, 0, 0);
regionSelectionMode = true;
}
if (action.contains("entity")) {
String[] a = action.split("\\|");
String s = a[0].replace("entity:", "");
Entity e = Entity.getForId((byte) Integer.parseInt(s), 0, 0, 0);
if (e instanceof Structure) {
((Structure) e).setBuilt(true);
((Structure) e).tickRequestsEnabled = false;
}
e.setVisible(true);
cursorEntity = (StaticEntity) e;
cursorEntityContinousPlacing = a.length > 1 && a[1].equals("cont");
}
activeAction = action;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.ml;
import com.google.common.collect.ImmutableList;
import io.prestosql.spi.PageBuilder;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.BlockBuilder;
import io.prestosql.spi.function.ScalarFunction;
import io.prestosql.spi.function.SqlType;
import io.prestosql.spi.function.TypeParameter;
import io.prestosql.spi.type.BigintType;
import io.prestosql.spi.type.DoubleType;
import io.prestosql.spi.type.StandardTypes;
import io.prestosql.spi.type.Type;
import java.util.List;
public final class MLFeaturesFunctions
{
public static final List<Class<?>> ML_FEATURE_FUNCTIONS = ImmutableList.of(Features1.class, Features2.class, Features3.class, Features4.class, Features5.class, Features6.class, Features7.class, Features8.class, Features9.class, Features10.class);
private static final String MAP_BIGINT_DOUBLE = "map(bigint,double)";
private MLFeaturesFunctions() {}
@ScalarFunction("features")
public static class Features1
{
private final PageBuilder pageBuilder;
public Features1(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1)
{
return featuresHelper(pageBuilder, f1);
}
}
@ScalarFunction("features")
public static class Features2
{
private final PageBuilder pageBuilder;
public Features2(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2)
{
return featuresHelper(pageBuilder, f1, f2);
}
}
@ScalarFunction("features")
public static class Features3
{
private final PageBuilder pageBuilder;
public Features3(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3)
{
return featuresHelper(pageBuilder, f1, f2, f3);
}
}
@ScalarFunction("features")
public static class Features4
{
private final PageBuilder pageBuilder;
public Features4(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4);
}
}
@ScalarFunction("features")
public static class Features5
{
private final PageBuilder pageBuilder;
public Features5(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5);
}
}
@ScalarFunction("features")
public static class Features6
{
private final PageBuilder pageBuilder;
public Features6(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5, @SqlType(StandardTypes.DOUBLE) double f6)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5, f6);
}
}
@ScalarFunction("features")
public static class Features7
{
private final PageBuilder pageBuilder;
public Features7(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5, @SqlType(StandardTypes.DOUBLE) double f6, @SqlType(StandardTypes.DOUBLE) double f7)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5, f6, f7);
}
}
@ScalarFunction("features")
public static class Features8
{
private final PageBuilder pageBuilder;
public Features8(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5, @SqlType(StandardTypes.DOUBLE) double f6, @SqlType(StandardTypes.DOUBLE) double f7, @SqlType(StandardTypes.DOUBLE) double f8)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5, f6, f7, f8);
}
}
@ScalarFunction("features")
public static class Features9
{
private final PageBuilder pageBuilder;
public Features9(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5, @SqlType(StandardTypes.DOUBLE) double f6, @SqlType(StandardTypes.DOUBLE) double f7, @SqlType(StandardTypes.DOUBLE) double f8, @SqlType(StandardTypes.DOUBLE) double f9)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5, f6, f7, f8, f9);
}
}
@ScalarFunction("features")
public static class Features10
{
private final PageBuilder pageBuilder;
public Features10(@TypeParameter(MAP_BIGINT_DOUBLE) Type mapType)
{
pageBuilder = new PageBuilder(ImmutableList.of(mapType));
}
@SqlType(MAP_BIGINT_DOUBLE)
public Block features(@SqlType(StandardTypes.DOUBLE) double f1, @SqlType(StandardTypes.DOUBLE) double f2, @SqlType(StandardTypes.DOUBLE) double f3, @SqlType(StandardTypes.DOUBLE) double f4, @SqlType(StandardTypes.DOUBLE) double f5, @SqlType(StandardTypes.DOUBLE) double f6, @SqlType(StandardTypes.DOUBLE) double f7, @SqlType(StandardTypes.DOUBLE) double f8, @SqlType(StandardTypes.DOUBLE) double f9, @SqlType(StandardTypes.DOUBLE) double f10)
{
return featuresHelper(pageBuilder, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10);
}
}
private static Block featuresHelper(PageBuilder pageBuilder, double... features)
{
if (pageBuilder.isFull()) {
pageBuilder.reset();
}
BlockBuilder mapBlockBuilder = pageBuilder.getBlockBuilder(0);
BlockBuilder blockBuilder = mapBlockBuilder.beginBlockEntry();
for (int i = 0; i < features.length; i++) {
BigintType.BIGINT.writeLong(blockBuilder, i);
DoubleType.DOUBLE.writeDouble(blockBuilder, features[i]);
}
mapBlockBuilder.closeEntry();
pageBuilder.declarePosition();
return mapBlockBuilder.getObject(mapBlockBuilder.getPositionCount() - 1, Block.class);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
/**
* Tests {@link DirectoryScanner} handling of differences
* between blocks on the disk and block in memory.
*/
public class TestDirectoryScanner {
private static final Log LOG = LogFactory.getLog(TestDirectoryScanner.class);
private static final Configuration CONF = new HdfsConfiguration();
private static final int DEFAULT_GEN_STAMP = 9999;
private MiniDFSCluster cluster;
private String bpid;
private DFSClient client;
private FsDatasetSpi<? extends FsVolumeSpi> fds = null;
private DirectoryScanner scanner = null;
private final Random rand = new Random();
private final Random r = new Random();
private static final int BLOCK_LENGTH = 100;
static {
CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_LENGTH);
CONF.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, 1);
CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
}
/** create a file with a length of <code>fileLen</code> */
private List<LocatedBlock> createFile(String fileNamePrefix,
long fileLen,
boolean isLazyPersist) throws IOException {
FileSystem fs = cluster.getFileSystem();
Path filePath = new Path("/" + fileNamePrefix + ".dat");
DFSTestUtil.createFile(
fs, filePath, isLazyPersist, 1024, fileLen,
BLOCK_LENGTH, (short) 1, r.nextLong(), false);
return client.getLocatedBlocks(filePath.toString(), 0, fileLen).getLocatedBlocks();
}
/** Truncate a block file */
private long truncateBlockFile() throws IOException {
synchronized (fds) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
File f = b.getBlockFile();
File mf = b.getMetaFile();
// Truncate a block file that has a corresponding metadata file
if (f.exists() && f.length() != 0 && mf.exists()) {
FileOutputStream s = null;
FileChannel channel = null;
try {
s = new FileOutputStream(f);
channel = s.getChannel();
channel.truncate(0);
LOG.info("Truncated block file " + f.getAbsolutePath());
return b.getBlockId();
} finally {
IOUtils.cleanup(LOG, channel, s);
}
}
}
}
return 0;
}
/** Delete a block file */
private long deleteBlockFile() {
synchronized(fds) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
File f = b.getBlockFile();
File mf = b.getMetaFile();
// Delete a block file that has corresponding metadata file
if (f.exists() && mf.exists() && f.delete()) {
LOG.info("Deleting block file " + f.getAbsolutePath());
return b.getBlockId();
}
}
}
return 0;
}
/** Delete block meta file */
private long deleteMetaFile() {
synchronized(fds) {
for (ReplicaInfo b : FsDatasetTestUtil.getReplicas(fds, bpid)) {
File file = b.getMetaFile();
// Delete a metadata file
if (file.exists() && file.delete()) {
LOG.info("Deleting metadata file " + file.getAbsolutePath());
return b.getBlockId();
}
}
}
return 0;
}
/**
* Duplicate the given block on all volumes.
* @param blockId
* @throws IOException
*/
private void duplicateBlock(long blockId) throws IOException {
synchronized (fds) {
ReplicaInfo b = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
try (FsDatasetSpi.FsVolumeReferences volumes =
fds.getFsVolumeReferences()) {
for (FsVolumeSpi v : volumes) {
if (v.getStorageID().equals(b.getVolume().getStorageID())) {
continue;
}
// Volume without a copy of the block. Make a copy now.
File sourceBlock = b.getBlockFile();
File sourceMeta = b.getMetaFile();
String sourceRoot = b.getVolume().getBasePath();
String destRoot = v.getBasePath();
String relativeBlockPath =
new File(sourceRoot).toURI().relativize(sourceBlock.toURI())
.getPath();
String relativeMetaPath =
new File(sourceRoot).toURI().relativize(sourceMeta.toURI())
.getPath();
File destBlock = new File(destRoot, relativeBlockPath);
File destMeta = new File(destRoot, relativeMetaPath);
destBlock.getParentFile().mkdirs();
FileUtils.copyFile(sourceBlock, destBlock);
FileUtils.copyFile(sourceMeta, destMeta);
if (destBlock.exists() && destMeta.exists()) {
LOG.info("Copied " + sourceBlock + " ==> " + destBlock);
LOG.info("Copied " + sourceMeta + " ==> " + destMeta);
}
}
}
}
}
/** Get a random blockId that is not used already */
private long getFreeBlockId() {
long id = rand.nextLong();
while (true) {
id = rand.nextLong();
if (FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, id) == null) {
break;
}
}
return id;
}
private String getBlockFile(long id) {
return Block.BLOCK_FILE_PREFIX + id;
}
private String getMetaFile(long id) {
return Block.BLOCK_FILE_PREFIX + id + "_" + DEFAULT_GEN_STAMP
+ Block.METADATA_EXTENSION;
}
/** Create a block file in a random volume*/
private long createBlockFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences volumes = fds.getFsVolumeReferences()) {
int numVolumes = volumes.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir = volumes.get(index).getFinalizedDir(bpid);
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
}
}
return id;
}
/** Create a metafile in a random volume*/
private long createMetaFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences refs = fds.getFsVolumeReferences()) {
int numVolumes = refs.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir = refs.get(index).getFinalizedDir(bpid);
File file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
}
return id;
}
/** Create block file and corresponding metafile in a rondom volume */
private long createBlockMetaFile() throws IOException {
long id = getFreeBlockId();
try (FsDatasetSpi.FsVolumeReferences refs = fds.getFsVolumeReferences()) {
int numVolumes = refs.size();
int index = rand.nextInt(numVolumes - 1);
File finalizedDir = refs.get(index).getFinalizedDir(bpid);
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
// Create files with same prefix as block file but extension names
// such that during sorting, these files appear around meta file
// to test how DirectoryScanner handles extraneous files
String name1 = file.getAbsolutePath() + ".l";
String name2 = file.getAbsolutePath() + ".n";
file = new File(name1);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name1);
}
file = new File(name2);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name2);
}
file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
}
}
return id;
}
private void scan(long totalBlocks, int diffsize, long missingMetaFile, long missingBlockFile,
long missingMemoryBlocks, long mismatchBlocks) throws IOException {
scan(totalBlocks, diffsize, missingMetaFile, missingBlockFile,
missingMemoryBlocks, mismatchBlocks, 0);
}
private void scan(long totalBlocks, int diffsize, long missingMetaFile, long missingBlockFile,
long missingMemoryBlocks, long mismatchBlocks, long duplicateBlocks) throws IOException {
scanner.reconcile();
assertTrue(scanner.diffs.containsKey(bpid));
LinkedList<DirectoryScanner.ScanInfo> diff = scanner.diffs.get(bpid);
assertTrue(scanner.stats.containsKey(bpid));
DirectoryScanner.Stats stats = scanner.stats.get(bpid);
assertEquals(diffsize, diff.size());
assertEquals(totalBlocks, stats.totalBlocks);
assertEquals(missingMetaFile, stats.missingMetaFile);
assertEquals(missingBlockFile, stats.missingBlockFile);
assertEquals(missingMemoryBlocks, stats.missingMemoryBlocks);
assertEquals(mismatchBlocks, stats.mismatchBlocks);
assertEquals(duplicateBlocks, stats.duplicateBlocks);
}
@Test (timeout=300000)
public void testRetainBlockOnPersistentStorage() throws Exception {
cluster = new MiniDFSCluster
.Builder(CONF)
.storageTypes(new StorageType[] { StorageType.RAM_DISK, StorageType.DEFAULT })
.numDataNodes(1)
.build();
try {
cluster.waitActive();
DataNode dataNode = cluster.getDataNodes().get(0);
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
// Add a file with 1 block
List<LocatedBlock> blocks =
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH, false);
// Ensure no difference between volumeMap and disk.
scan(1, 0, 0, 0, 0, 0);
// Make a copy of the block on RAM_DISK and ensure that it is
// picked up by the scanner.
duplicateBlock(blocks.get(0).getBlock().getBlockId());
scan(2, 1, 0, 0, 0, 0, 1);
verifyStorageType(blocks.get(0).getBlock().getBlockId(), false);
scan(1, 0, 0, 0, 0, 0);
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
cluster = null;
}
}
@Test (timeout=300000)
public void testDeleteBlockOnTransientStorage() throws Exception {
cluster = new MiniDFSCluster
.Builder(CONF)
.storageTypes(new StorageType[] { StorageType.RAM_DISK, StorageType.DEFAULT })
.numDataNodes(1)
.build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
DataNode dataNode = cluster.getDataNodes().get(0);
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
// Create a file file on RAM_DISK
List<LocatedBlock> blocks =
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH, true);
// Ensure no difference between volumeMap and disk.
scan(1, 0, 0, 0, 0, 0);
// Make a copy of the block on DEFAULT storage and ensure that it is
// picked up by the scanner.
duplicateBlock(blocks.get(0).getBlock().getBlockId());
scan(2, 1, 0, 0, 0, 0, 1);
// Ensure that the copy on RAM_DISK was deleted.
verifyStorageType(blocks.get(0).getBlock().getBlockId(), false);
scan(1, 0, 0, 0, 0, 0);
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
cluster = null;
}
}
@Test (timeout=600000)
public void testDirectoryScanner() throws Exception {
// Run the test with and without parallel scanning
for (int parallelism = 1; parallelism < 3; parallelism++) {
runTest(parallelism);
}
}
public void runTest(int parallelism) throws Exception {
cluster = new MiniDFSCluster.Builder(CONF).build();
try {
cluster.waitActive();
bpid = cluster.getNamesystem().getBlockPoolId();
fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0));
client = cluster.getFileSystem().getClient();
CONF.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY,
parallelism);
DataNode dataNode = cluster.getDataNodes().get(0);
scanner = new DirectoryScanner(dataNode, fds, CONF);
scanner.setRetainDiffs(true);
// Add files with 100 blocks
createFile(GenericTestUtils.getMethodName(), BLOCK_LENGTH * 100, false);
long totalBlocks = 100;
// Test1: No difference between volumeMap and disk
scan(100, 0, 0, 0, 0, 0);
// Test2: block metafile is missing
long blockId = deleteMetaFile();
scan(totalBlocks, 1, 1, 0, 0, 1);
verifyGenStamp(blockId, HdfsConstants.GRANDFATHER_GENERATION_STAMP);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test3: block file is missing
blockId = deleteBlockFile();
scan(totalBlocks, 1, 0, 1, 0, 0);
totalBlocks--;
verifyDeletion(blockId);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test4: A block file exists for which there is no metafile and
// a block in memory
blockId = createBlockFile();
totalBlocks++;
scan(totalBlocks, 1, 1, 0, 1, 0);
verifyAddition(blockId, HdfsConstants.GRANDFATHER_GENERATION_STAMP, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test5: A metafile exists for which there is no block file and
// a block in memory
blockId = createMetaFile();
scan(totalBlocks+1, 1, 0, 1, 1, 0);
File metafile = new File(getMetaFile(blockId));
assertTrue(!metafile.exists());
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test6: A block file and metafile exists for which there is no block in
// memory
blockId = createBlockMetaFile();
totalBlocks++;
scan(totalBlocks, 1, 0, 0, 1, 0);
verifyAddition(blockId, DEFAULT_GEN_STAMP, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test7: Delete bunch of metafiles
for (int i = 0; i < 10; i++) {
blockId = deleteMetaFile();
}
scan(totalBlocks, 10, 10, 0, 0, 10);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test8: Delete bunch of block files
for (int i = 0; i < 10; i++) {
blockId = deleteBlockFile();
}
scan(totalBlocks, 10, 0, 10, 0, 0);
totalBlocks -= 10;
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test9: create a bunch of blocks files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockFile();
}
totalBlocks += 10;
scan(totalBlocks, 10, 10, 0, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test10: create a bunch of metafiles
for (int i = 0; i < 10 ; i++) {
blockId = createMetaFile();
}
scan(totalBlocks+10, 10, 0, 10, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test11: create a bunch block files and meta files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockMetaFile();
}
totalBlocks += 10;
scan(totalBlocks, 10, 0, 0, 10, 0);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test12: truncate block files to test block length mismatch
for (int i = 0; i < 10 ; i++) {
truncateBlockFile();
}
scan(totalBlocks, 10, 0, 0, 0, 10);
scan(totalBlocks, 0, 0, 0, 0, 0);
// Test13: all the conditions combined
createMetaFile();
createBlockFile();
createBlockMetaFile();
deleteMetaFile();
deleteBlockFile();
truncateBlockFile();
scan(totalBlocks+3, 6, 2, 2, 3, 2);
scan(totalBlocks+1, 0, 0, 0, 0, 0);
// Test14: validate clean shutdown of DirectoryScanner
////assertTrue(scanner.getRunStatus()); //assumes "real" FSDataset, not sim
scanner.shutdown();
assertFalse(scanner.getRunStatus());
} finally {
if (scanner != null) {
scanner.shutdown();
scanner = null;
}
cluster.shutdown();
}
}
private void verifyAddition(long blockId, long genStamp, long size) {
final ReplicaInfo replicainfo;
replicainfo = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(replicainfo);
// Added block has the same file as the one created by the test
File file = new File(getBlockFile(blockId));
assertEquals(file.getName(),
FsDatasetTestUtil.getFile(fds, bpid, blockId).getName());
// Generation stamp is same as that of created file
assertEquals(genStamp, replicainfo.getGenerationStamp());
// File size matches
assertEquals(size, replicainfo.getNumBytes());
}
private void verifyDeletion(long blockId) {
// Ensure block does not exist in memory
assertNull(FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId));
}
private void verifyGenStamp(long blockId, long genStamp) {
final ReplicaInfo memBlock;
memBlock = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(memBlock);
assertEquals(genStamp, memBlock.getGenerationStamp());
}
private void verifyStorageType(long blockId, boolean expectTransient) {
final ReplicaInfo memBlock;
memBlock = FsDatasetTestUtil.fetchReplicaInfo(fds, bpid, blockId);
assertNotNull(memBlock);
assertThat(memBlock.getVolume().isTransientStorage(), is(expectTransient));
}
private static class TestFsVolumeSpi implements FsVolumeSpi {
@Override
public String[] getBlockPoolList() {
return new String[0];
}
@Override
public FsVolumeReference obtainReference() throws ClosedChannelException {
return null;
}
@Override
public long getAvailable() throws IOException {
return 0;
}
@Override
public String getBasePath() {
return (new File("/base")).getAbsolutePath();
}
@Override
public String getPath(String bpid) throws IOException {
return (new File("/base/current/" + bpid)).getAbsolutePath();
}
@Override
public File getFinalizedDir(String bpid) throws IOException {
return new File("/base/current/" + bpid + "/finalized");
}
@Override
public StorageType getStorageType() {
return StorageType.DEFAULT;
}
@Override
public String getStorageID() {
return "";
}
@Override
public boolean isTransientStorage() {
return false;
}
@Override
public void reserveSpaceForRbw(long bytesToReserve) {
}
@Override
public void releaseReservedSpace(long bytesToRelease) {
}
@Override
public BlockIterator newBlockIterator(String bpid, String name) {
throw new UnsupportedOperationException();
}
@Override
public BlockIterator loadBlockIterator(String bpid, String name)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public FsDatasetSpi getDataset() {
throw new UnsupportedOperationException();
}
}
private final static TestFsVolumeSpi TEST_VOLUME = new TestFsVolumeSpi();
private final static String BPID_1 = "BP-783049782-127.0.0.1-1370971773491";
private final static String BPID_2 = "BP-367845636-127.0.0.1-5895645674231";
void testScanInfoObject(long blockId, File blockFile, File metaFile)
throws Exception {
DirectoryScanner.ScanInfo scanInfo =
new DirectoryScanner.ScanInfo(blockId, blockFile, metaFile, TEST_VOLUME);
assertEquals(blockId, scanInfo.getBlockId());
if (blockFile != null) {
assertEquals(blockFile.getAbsolutePath(),
scanInfo.getBlockFile().getAbsolutePath());
} else {
assertNull(scanInfo.getBlockFile());
}
if (metaFile != null) {
assertEquals(metaFile.getAbsolutePath(),
scanInfo.getMetaFile().getAbsolutePath());
} else {
assertNull(scanInfo.getMetaFile());
}
assertEquals(TEST_VOLUME, scanInfo.getVolume());
}
void testScanInfoObject(long blockId) throws Exception {
DirectoryScanner.ScanInfo scanInfo =
new DirectoryScanner.ScanInfo(blockId, null, null, null);
assertEquals(blockId, scanInfo.getBlockId());
assertNull(scanInfo.getBlockFile());
assertNull(scanInfo.getMetaFile());
}
@Test(timeout=120000)
public void TestScanInfo() throws Exception {
testScanInfoObject(123,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123"),
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123__1001.meta"));
testScanInfoObject(464,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123"),
null);
testScanInfoObject(523,
null,
new File(TEST_VOLUME.getFinalizedDir(BPID_1).getAbsolutePath(),
"blk_123__1009.meta"));
testScanInfoObject(789,
null,
null);
testScanInfoObject(456);
testScanInfoObject(123,
new File(TEST_VOLUME.getFinalizedDir(BPID_2).getAbsolutePath(),
"blk_567"),
new File(TEST_VOLUME.getFinalizedDir(BPID_2).getAbsolutePath(),
"blk_567__1004.meta"));
}
}
| |
/*
* Copyright 2014 WANdisco
*
* WANdisco licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package c5db.replication;
import c5db.ReplicatorConstants;
import c5db.interfaces.replication.GeneralizedReplicator;
import c5db.interfaces.replication.IndexCommitNotice;
import c5db.interfaces.replication.ReplicateSubmissionInfo;
import c5db.interfaces.replication.Replicator;
import c5db.interfaces.replication.ReplicatorInstanceEvent;
import c5db.interfaces.replication.ReplicatorReceipt;
import c5db.util.C5Futures;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import org.jetbrains.annotations.Nullable;
import org.jetlang.channels.ChannelSubscription;
import org.jetlang.fibers.Fiber;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ExecutionException;
/**
* A GeneralizedReplicator that makes use of a {@link c5db.interfaces.replication.Replicator},
* processing its ReplicatorReceipts and IndexCommitNotices to provide a more general-purpose
* interface.
*/
public class C5GeneralizedReplicator implements GeneralizedReplicator {
private final long nodeId;
private final Replicator replicator;
private final Fiber fiber;
private SettableFuture<Void> availableFuture;
/**
* Queue of receipts for pending log requests and their futures; access this queue only
* from the fiber. See {@link c5db.replication.C5GeneralizedReplicator.ReceiptWithCompletionFuture}
*/
private final Queue<ReceiptWithCompletionFuture> receiptQueue =
new ArrayDeque<>(ReplicatorConstants.REPLICATOR_MAXIMUM_SIMULTANEOUS_LOG_REQUESTS);
/**
* Both the fiber and replicator must be started by the user of this class, and the
* user takes responsibility for their disposal.
*/
public C5GeneralizedReplicator(Replicator replicator, Fiber fiber) {
this.nodeId = replicator.getId();
this.replicator = replicator;
this.fiber = fiber;
setupCommitNoticeSubscription();
setupEventNoticeSubscription();
}
@Override
public ListenableFuture<ReplicateSubmissionInfo> replicate(List<ByteBuffer> data) throws InterruptedException,
InvalidReplicatorStateException {
final ReceiptWithCompletionFuture receiptWithCompletionFuture =
new ReceiptWithCompletionFuture(replicator.logData(data));
if (receiptWithCompletionFuture.receiptFuture == null) {
throw new InvalidReplicatorStateException("Replicator is not in the leader state");
}
// Add to the queue on the fiber for concurrency safety
fiber.execute(
() -> receiptQueue.add(receiptWithCompletionFuture));
return Futures.transform(receiptWithCompletionFuture.receiptFuture,
(ReplicatorReceipt receipt) ->
new ReplicateSubmissionInfo(receipt.seqNum, receiptWithCompletionFuture.completionFuture));
}
@Override
public ListenableFuture<Void> isAvailableFuture() {
SettableFuture<Void> returnedFuture = SettableFuture.create();
fiber.execute(() -> {
if (this.availableFuture == null) {
// By placing this future here, the handleEventNotice method will know someone is waiting
// for notification of availability; that method will handle setting this.
this.availableFuture = returnedFuture;
} else {
// Some other invocation of this method is already waiting for notification of availability,
// so setup that existing availableFuture to "forward" its result to the newly created one.
C5Futures.addCallback(this.availableFuture, returnedFuture::set, returnedFuture::setException, fiber);
}
});
return returnedFuture;
}
private void setupCommitNoticeSubscription() {
final String quorumId = replicator.getQuorumId();
final long serverNodeId = replicator.getId();
replicator.getCommitNoticeChannel().subscribe(
new ChannelSubscription<>(this.fiber, this::handleCommitNotice,
(notice) ->
notice.nodeId == serverNodeId
&& notice.quorumId.equals(quorumId)));
}
private void setupEventNoticeSubscription() {
replicator.getEventChannel().subscribe(fiber, this::handleEventNotice);
}
/**
* The core of the logic is in this method. When we receive an IndexCommitNotice, we need
* to find out which, if any, of the pending replicate requests are affected by it. We do
* that by examining their receipts, if the receipts are available.
*
* @param notice An IndexCommitNotice received from the internal Replicator.
*/
@FiberOnly
private void handleCommitNotice(IndexCommitNotice notice) {
while (!receiptQueue.isEmpty() && receiptQueue.peek().receiptFuture.isDone()) {
ReceiptWithCompletionFuture receiptWithCompletionFuture = receiptQueue.peek();
SettableFuture<Void> completionFuture = receiptWithCompletionFuture.completionFuture;
ReplicatorReceipt receipt = getReceiptOrSetException(receiptWithCompletionFuture);
if (receipt != null) {
if (notice.lastIndex < receipt.seqNum) {
// old commit notice
return;
} else if (receipt.seqNum < notice.firstIndex) {
completionFuture.setException(new IOException("commit notice skipped over the receipt's seqNum"));
} else if (notice.term != receipt.term) {
completionFuture.setException(new IOException("commit notice's term differs from that of receipt"));
} else {
// receipt.seqNum is within the range of the commit notice, and the terms match: replication is complete
completionFuture.set(null);
}
}
receiptQueue.poll();
}
}
/**
* This method runs whenever our wrapped Replicator emits an event notice. It checks to
* see if the replicator is announcing that it has been elected leader of its quorum. If so,
* then this GeneralizedReplicator is now available for submission of replication requests.
* Therefore, check if anyone has been waiting to be notified of our availability, by having
* left an unset availableFuture. If so, notify them by setting that pending future..
*/
@FiberOnly
private void handleEventNotice(ReplicatorInstanceEvent eventNotice) {
if (availableFuture != null
&& eventNotice.instance == replicator
&& eventNotice.eventType == ReplicatorInstanceEvent.EventType.LEADER_ELECTED
&& eventNotice.newLeader == nodeId) {
// Notify past callers of isAvailableFuture
availableFuture.set(null);
// Remove the future so that a subsequent invocation of this method will not try
// to set it again.
availableFuture = null;
}
}
/**
* This method assumes that the receiptFuture is "done". If the future is set with a value,
* return its value, the ReplicatorReceipt. On the other hand, if it is an exception, set
* the completionFuture with that exception and return null. A null return value guarantees
* the completionFuture has been set with an exception.
*/
@Nullable
private ReplicatorReceipt getReceiptOrSetException(ReceiptWithCompletionFuture receiptWithCompletionFuture) {
ListenableFuture<ReplicatorReceipt> receiptFuture = receiptWithCompletionFuture.receiptFuture;
SettableFuture<?> completionFuture = receiptWithCompletionFuture.completionFuture;
assert receiptFuture.isDone();
try {
ReplicatorReceipt receipt = C5Futures.getUninterruptibly(receiptFuture);
if (receipt == null) {
completionFuture.setException(new IOException("replicator returned a null receipt"));
}
return receipt;
} catch (ExecutionException e) {
completionFuture.setException(e);
return null;
}
}
/**
* C5GeneralizedReplicator keeps track of all the times it's requested to replicate
* data. Each of these replication requests, to its internal replicator, yields a
* receipt. Later on, the internal replicator will issue a commit notice, indicating
* that one or more earlier requests have been completed. The receipts are bundled
* together with SettableFutures so that the user of this object can be notified
* when that happens. ReceiptWithCompletionFuture is the class used to bundle the receipt and
* the completion future together.
*/
private static class ReceiptWithCompletionFuture {
public final ListenableFuture<ReplicatorReceipt> receiptFuture;
public final SettableFuture<Void> completionFuture = SettableFuture.create();
private ReceiptWithCompletionFuture(ListenableFuture<ReplicatorReceipt> receiptFuture) {
this.receiptFuture = receiptFuture;
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
import com.google.android.exoplayer.extractor.mp4.Mp4Extractor;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaExtractor;
import android.net.Uri;
import java.io.FileDescriptor;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
/**
* Extracts samples from a stream using Android's {@link MediaExtractor}.
* <p>
* Warning - This class is marked as deprecated because there are known device specific issues
* associated with its use, including playbacks not starting, playbacks stuttering and other
* miscellaneous failures. For mp4, m4a, mp3, webm, mpeg-ts and aac playbacks it is strongly
* recommended to use {@link ExtractorSampleSource} instead, along with the corresponding extractor
* (e.g. {@link Mp4Extractor} for mp4 playbacks). Where this is not possible this class can still be
* used, but please be aware of the associated risks. Valid use cases of this class that are not
* yet supported by {@link ExtractorSampleSource} include:
* <ul>
* <li>Playing a container format for which an ExoPlayer extractor does not yet exist (e.g. ogg).
* </li>
* <li>Playing media whose container format is unknown and so needs to be inferred automatically.
* </li>
* </ul>
* Over time we hope to enhance {@link ExtractorSampleSource} to support these use cases, and hence
* make use of this class unnecessary.
*/
// TODO: This implementation needs to be fixed so that its methods are non-blocking (either
// through use of a background thread, or through changes to the framework's MediaExtractor API).
@Deprecated
@TargetApi(16)
public final class FrameworkSampleSource implements SampleSource {
private static final int ALLOWED_FLAGS_MASK = C.SAMPLE_FLAG_SYNC | C.SAMPLE_FLAG_ENCRYPTED;
private static final int TRACK_STATE_DISABLED = 0;
private static final int TRACK_STATE_ENABLED = 1;
private static final int TRACK_STATE_FORMAT_SENT = 2;
// Parameters for a Uri data source.
private final Context context;
private final Uri uri;
private final Map<String, String> headers;
// Parameters for a FileDescriptor data source.
private final FileDescriptor fileDescriptor;
private final long fileDescriptorOffset;
private final long fileDescriptorLength;
private MediaExtractor extractor;
private TrackInfo[] trackInfos;
private boolean prepared;
private int remainingReleaseCount;
private int[] trackStates;
private boolean[] pendingDiscontinuities;
private long seekPositionUs;
/**
* Instantiates a new sample extractor reading from the specified {@code uri}.
*
* @param context Context for resolving {@code uri}.
* @param uri The content URI from which to extract data.
* @param headers Headers to send with requests for data.
* @param downstreamRendererCount Number of track renderers dependent on this sample source.
*/
public FrameworkSampleSource(Context context, Uri uri, Map<String, String> headers,
int downstreamRendererCount) {
Assertions.checkState(Util.SDK_INT >= 16);
this.remainingReleaseCount = downstreamRendererCount;
this.context = Assertions.checkNotNull(context);
this.uri = Assertions.checkNotNull(uri);
this.headers = headers;
fileDescriptor = null;
fileDescriptorOffset = 0;
fileDescriptorLength = 0;
}
/**
* Instantiates a new sample extractor reading from the specified seekable {@code fileDescriptor}.
* The caller is responsible for releasing the file descriptor.
*
* @param fileDescriptor File descriptor from which to read.
* @param offset The offset in bytes into the file where the data to be extracted starts.
* @param length The length in bytes of the data to be extracted.
* @param downstreamRendererCount Number of track renderers dependent on this sample source.
*/
public FrameworkSampleSource(FileDescriptor fileDescriptor, long offset, long length,
int downstreamRendererCount) {
Assertions.checkState(Util.SDK_INT >= 16);
this.remainingReleaseCount = downstreamRendererCount;
context = null;
uri = null;
headers = null;
this.fileDescriptor = Assertions.checkNotNull(fileDescriptor);
fileDescriptorOffset = offset;
fileDescriptorLength = length;
}
@Override
public boolean prepare(long positionUs) throws IOException {
if (!prepared) {
extractor = new MediaExtractor();
if (context != null) {
extractor.setDataSource(context, uri, headers);
} else {
extractor.setDataSource(fileDescriptor, fileDescriptorOffset, fileDescriptorLength);
}
trackStates = new int[extractor.getTrackCount()];
pendingDiscontinuities = new boolean[trackStates.length];
trackInfos = new TrackInfo[trackStates.length];
for (int i = 0; i < trackStates.length; i++) {
android.media.MediaFormat format = extractor.getTrackFormat(i);
long durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION)
? format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US;
String mime = format.getString(android.media.MediaFormat.KEY_MIME);
trackInfos[i] = new TrackInfo(mime, durationUs);
}
prepared = true;
}
return true;
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return trackStates.length;
}
@Override
public TrackInfo getTrackInfo(int track) {
Assertions.checkState(prepared);
return trackInfos[track];
}
@Override
public void enable(int track, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED);
trackStates[track] = TRACK_STATE_ENABLED;
extractor.selectTrack(track);
seekToUsInternal(positionUs, positionUs != 0);
}
@Override
public boolean continueBuffering(long positionUs) {
// MediaExtractor takes care of buffering and blocks until it has samples, so we can always
// return true here. Although note that the blocking behavior is itself as bug, as per the
// TODO further up this file. This method will need to return something else as part of fixing
// the TODO.
return true;
}
@Override
public int readData(int track, long positionUs, MediaFormatHolder formatHolder,
SampleHolder sampleHolder, boolean onlyReadDiscontinuity) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] != TRACK_STATE_DISABLED);
if (pendingDiscontinuities[track]) {
pendingDiscontinuities[track] = false;
return DISCONTINUITY_READ;
}
if (onlyReadDiscontinuity) {
return NOTHING_READ;
}
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
formatHolder.format = MediaFormat.createFromFrameworkMediaFormatV16(
extractor.getTrackFormat(track));
formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
trackStates[track] = TRACK_STATE_FORMAT_SENT;
return FORMAT_READ;
}
int extractorTrackIndex = extractor.getSampleTrackIndex();
if (extractorTrackIndex == track) {
if (sampleHolder.data != null) {
int offset = sampleHolder.data.position();
sampleHolder.size = extractor.readSampleData(sampleHolder.data, offset);
sampleHolder.data.position(offset + sampleHolder.size);
} else {
sampleHolder.size = 0;
}
sampleHolder.timeUs = extractor.getSampleTime();
sampleHolder.flags = extractor.getSampleFlags() & ALLOWED_FLAGS_MASK;
if (sampleHolder.isEncrypted()) {
sampleHolder.cryptoInfo.setFromExtractorV16(extractor);
}
seekPositionUs = C.UNKNOWN_TIME_US;
extractor.advance();
return SAMPLE_READ;
} else {
return extractorTrackIndex < 0 ? END_OF_STREAM : NOTHING_READ;
}
}
@Override
public void disable(int track) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] != TRACK_STATE_DISABLED);
extractor.unselectTrack(track);
pendingDiscontinuities[track] = false;
trackStates[track] = TRACK_STATE_DISABLED;
}
@Override
public void seekToUs(long positionUs) {
Assertions.checkState(prepared);
seekToUsInternal(positionUs, false);
}
@Override
public long getBufferedPositionUs() {
Assertions.checkState(prepared);
long bufferedDurationUs = extractor.getCachedDuration();
if (bufferedDurationUs == -1) {
return TrackRenderer.UNKNOWN_TIME_US;
} else {
long sampleTime = extractor.getSampleTime();
return sampleTime == -1 ? TrackRenderer.END_OF_TRACK_US : sampleTime + bufferedDurationUs;
}
}
@Override
public void release() {
Assertions.checkState(remainingReleaseCount > 0);
if (--remainingReleaseCount == 0 && extractor != null) {
extractor.release();
extractor = null;
}
}
@TargetApi(18)
private DrmInitData getDrmInitDataV18() {
// MediaExtractor only supports psshInfo for MP4, so it's ok to hard code the mimeType here.
Map<UUID, byte[]> psshInfo = extractor.getPsshInfo();
if (psshInfo == null || psshInfo.isEmpty()) {
return null;
}
DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.putAll(psshInfo);
return drmInitData;
}
private void seekToUsInternal(long positionUs, boolean force) {
// Unless forced, avoid duplicate calls to the underlying extractor's seek method in the case
// that there have been no interleaving calls to readSample.
if (force || seekPositionUs != positionUs) {
seekPositionUs = positionUs;
extractor.seekTo(positionUs, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
for (int i = 0; i < trackStates.length; ++i) {
if (trackStates[i] != TRACK_STATE_DISABLED) {
pendingDiscontinuities[i] = true;
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.util.collections;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import org.apache.pulsar.common.util.collections.ConcurrentOpenHashMap;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
public class ConcurrentOpenHashMapTest {
@Test
public void testConstructor() {
try {
new ConcurrentOpenHashMap<String, String>(0);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
try {
new ConcurrentOpenHashMap<String, String>(16, 0);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
try {
new ConcurrentOpenHashMap<String, String>(4, 8);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
}
@Test
public void simpleInsertions() {
ConcurrentOpenHashMap<String, String> map = new ConcurrentOpenHashMap<>(16);
assertTrue(map.isEmpty());
assertNull(map.put("1", "one"));
assertFalse(map.isEmpty());
assertNull(map.put("2", "two"));
assertNull(map.put("3", "three"));
assertEquals(map.size(), 3);
assertEquals(map.get("1"), "one");
assertEquals(map.size(), 3);
assertEquals(map.remove("1"), "one");
assertEquals(map.size(), 2);
assertEquals(map.get("1"), null);
assertEquals(map.get("5"), null);
assertEquals(map.size(), 2);
assertNull(map.put("1", "one"));
assertEquals(map.size(), 3);
assertEquals(map.put("1", "uno"), "one");
assertEquals(map.size(), 3);
}
@Test
public void testRemove() {
ConcurrentOpenHashMap<String, String> map = new ConcurrentOpenHashMap<>();
assertTrue(map.isEmpty());
assertNull(map.put("1", "one"));
assertFalse(map.isEmpty());
assertFalse(map.remove("0", "zero"));
assertFalse(map.remove("1", "uno"));
assertFalse(map.isEmpty());
assertTrue(map.remove("1", "one"));
assertTrue(map.isEmpty());
}
@Test
public void testRehashing() {
int n = 16;
ConcurrentOpenHashMap<String, Integer> map = new ConcurrentOpenHashMap<>(n / 2, 1);
assertEquals(map.capacity(), n);
assertEquals(map.size(), 0);
for (int i = 0; i < n; i++) {
map.put(Integer.toString(i), i);
}
assertEquals(map.capacity(), 2 * n);
assertEquals(map.size(), n);
}
@Test
public void testRehashingWithDeletes() {
int n = 16;
ConcurrentOpenHashMap<Integer, Integer> map = new ConcurrentOpenHashMap<>(n / 2, 1);
assertEquals(map.capacity(), n);
assertEquals(map.size(), 0);
for (int i = 0; i < n / 2; i++) {
map.put(i, i);
}
for (int i = 0; i < n / 2; i++) {
map.remove(i);
}
for (int i = n; i < (2 * n); i++) {
map.put(i, i);
}
assertEquals(map.capacity(), 2 * n);
assertEquals(map.size(), n);
}
@Test
public void concurrentInsertions() throws Throwable {
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>(16, 1);
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
String value = "value";
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are uniques
key -= key % (threadIdx + 1);
map.put(key, value);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(map.size(), N * nThreads);
executor.shutdown();
}
@Test
public void concurrentInsertionsAndReads() throws Throwable {
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>();
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
String value = "value";
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are uniques
key -= key % (threadIdx + 1);
map.put(key, value);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(map.size(), N * nThreads);
executor.shutdown();
}
@Test
public void testIteration() {
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>();
assertEquals(map.keys(), Collections.emptyList());
assertEquals(map.values(), Collections.emptyList());
map.put(0l, "zero");
assertEquals(map.keys(), Lists.newArrayList(0l));
assertEquals(map.values(), Lists.newArrayList("zero"));
map.remove(0l);
assertEquals(map.keys(), Collections.emptyList());
assertEquals(map.values(), Collections.emptyList());
map.put(0l, "zero");
map.put(1l, "one");
map.put(2l, "two");
List<Long> keys = map.keys();
keys.sort(null);
assertEquals(keys, Lists.newArrayList(0l, 1l, 2l));
List<String> values = map.values();
values.sort(null);
assertEquals(values, Lists.newArrayList("one", "two", "zero"));
map.put(1l, "uno");
keys = map.keys();
keys.sort(null);
assertEquals(keys, Lists.newArrayList(0l, 1l, 2l));
values = map.values();
values.sort(null);
assertEquals(values, Lists.newArrayList("two", "uno", "zero"));
map.clear();
assertTrue(map.isEmpty());
}
@Test
public void testHashConflictWithDeletion() {
final int Buckets = 16;
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>(Buckets, 1);
// Pick 2 keys that fall into the same bucket
long key1 = 1;
long key2 = 27;
int bucket1 = ConcurrentOpenHashMap.signSafeMod(ConcurrentOpenHashMap.hash(key1), Buckets);
int bucket2 = ConcurrentOpenHashMap.signSafeMod(ConcurrentOpenHashMap.hash(key2), Buckets);
assertEquals(bucket1, bucket2);
assertEquals(map.put(key1, "value-1"), null);
assertEquals(map.put(key2, "value-2"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove(key1), "value-1");
assertEquals(map.size(), 1);
assertEquals(map.put(key1, "value-1-overwrite"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove(key1), "value-1-overwrite");
assertEquals(map.size(), 1);
assertEquals(map.put(key2, "value-2-overwrite"), "value-2");
assertEquals(map.get(key2), "value-2-overwrite");
assertEquals(map.size(), 1);
assertEquals(map.remove(key2), "value-2-overwrite");
assertTrue(map.isEmpty());
}
@Test
public void testPutIfAbsent() {
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>();
assertEquals(map.putIfAbsent(1l, "one"), null);
assertEquals(map.get(1l), "one");
assertEquals(map.putIfAbsent(1l, "uno"), "one");
assertEquals(map.get(1l), "one");
}
@Test
public void testComputeIfAbsent() {
ConcurrentOpenHashMap<Integer, Integer> map = new ConcurrentOpenHashMap<>(16, 1);
AtomicInteger counter = new AtomicInteger();
Function<Integer, Integer> provider = key -> counter.getAndIncrement();
assertEquals(map.computeIfAbsent(0, provider).intValue(), 0);
assertEquals(map.get(0).intValue(), 0);
assertEquals(map.computeIfAbsent(1, provider).intValue(), 1);
assertEquals(map.get(1).intValue(), 1);
assertEquals(map.computeIfAbsent(1, provider).intValue(), 1);
assertEquals(map.get(1).intValue(), 1);
assertEquals(map.computeIfAbsent(2, provider).intValue(), 2);
assertEquals(map.get(2).intValue(), 2);
}
@Test
public void testEqualsKeys() {
class T {
int value;
T(int value) {
this.value = value;
}
@Override
public int hashCode() {
return Integer.hashCode(value);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof T) {
return value == ((T) obj).value;
}
return false;
}
}
ConcurrentOpenHashMap<T, String> map = new ConcurrentOpenHashMap<>();
T t1 = new T(1);
T t1_b = new T(1);
T t2 = new T(2);
assertEquals(t1, t1_b);
assertFalse(t1.equals(t2));
assertFalse(t1_b.equals(t2));
assertNull(map.put(t1, "t1"));
assertEquals(map.get(t1), "t1");
assertEquals(map.get(t1_b), "t1");
assertNull(map.get(t2));
assertEquals(map.remove(t1_b), "t1");
assertNull(map.get(t1));
assertNull(map.get(t1_b));
}
final static int Iterations = 1;
final static int ReadIterations = 1000;
final static int N = 1_000_000;
public void benchConcurrentOpenHashMap() throws Exception {
ConcurrentOpenHashMap<Long, String> map = new ConcurrentOpenHashMap<>(N, 1);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (long j = 0; j < N; j++) {
map.remove(i);
}
}
}
public void benchConcurrentHashMap() throws Exception {
ConcurrentHashMap<Long, String> map = new ConcurrentHashMap<Long, String>(N, 0.66f, 1);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (int j = 0; j < N; j++) {
map.remove(i);
}
}
}
void benchHashMap() throws Exception {
HashMap<Long, String> map = new HashMap<Long, String>(N, 0.66f);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (int j = 0; j < N; j++) {
map.remove(i);
}
}
}
public static void main(String[] args) throws Exception {
ConcurrentOpenHashMapTest t = new ConcurrentOpenHashMapTest();
long start = System.nanoTime();
t.benchHashMap();
long end = System.nanoTime();
System.out.println("HM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
start = System.nanoTime();
t.benchConcurrentHashMap();
end = System.nanoTime();
System.out.println("CHM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
start = System.nanoTime();
t.benchConcurrentOpenHashMap();
end = System.nanoTime();
System.out.println("CLHM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.auth;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder;
import com.amazonaws.services.securitytoken.model.AWSSecurityTokenServiceException;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.util.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
import org.apache.hadoop.fs.s3a.CredentialInitializationException;
import org.apache.hadoop.fs.s3a.Retries;
import org.apache.hadoop.fs.s3a.S3AUtils;
import org.apache.hadoop.fs.s3a.Invoker;
import org.apache.hadoop.fs.s3a.S3ARetryPolicy;
import org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider;
import org.apache.hadoop.security.UserGroupInformation;
import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.S3AUtils.buildAWSProviderList;
/**
* Support IAM Assumed roles by instantiating an instance of
* {@code STSAssumeRoleSessionCredentialsProvider} from configuration
* properties, including wiring up the inner authenticator, and,
* unless overridden, creating a session name from the current user.
*
* Classname is used in configuration files; do not move.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class AssumedRoleCredentialProvider implements AWSCredentialsProvider,
Closeable {
private static final Logger LOG =
LoggerFactory.getLogger(AssumedRoleCredentialProvider.class);
public static final String NAME
= "org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider";
public static final String E_NO_ROLE = "Unset property "
+ ASSUMED_ROLE_ARN;
private final STSAssumeRoleSessionCredentialsProvider stsProvider;
private final String sessionName;
private final long duration;
private final String arn;
private final AWSCredentialProviderList credentialsToSTS;
private final Invoker invoker;
/**
* Instantiate.
* This calls {@link #getCredentials()} to fail fast on the inner
* role credential retrieval.
* @param fsUri possibly null URI of the filesystem.
* @param conf configuration
* @throws IOException on IO problems and some parameter checking
* @throws IllegalArgumentException invalid parameters
* @throws AWSSecurityTokenServiceException problems getting credentials
*/
public AssumedRoleCredentialProvider(@Nullable URI fsUri, Configuration conf)
throws IOException {
arn = conf.getTrimmed(ASSUMED_ROLE_ARN, "");
if (StringUtils.isEmpty(arn)) {
throw new IOException(E_NO_ROLE);
}
// build up the base provider
credentialsToSTS = buildAWSProviderList(fsUri, conf,
ASSUMED_ROLE_CREDENTIALS_PROVIDER,
Arrays.asList(
SimpleAWSCredentialsProvider.class,
EnvironmentVariableCredentialsProvider.class),
Sets.newHashSet(this.getClass()));
LOG.debug("Credentials to obtain role credentials: {}", credentialsToSTS);
// then the STS binding
sessionName = conf.getTrimmed(ASSUMED_ROLE_SESSION_NAME,
buildSessionName());
duration = conf.getTimeDuration(ASSUMED_ROLE_SESSION_DURATION,
ASSUMED_ROLE_SESSION_DURATION_DEFAULT, TimeUnit.SECONDS);
String policy = conf.getTrimmed(ASSUMED_ROLE_POLICY, "");
LOG.debug("{}", this);
STSAssumeRoleSessionCredentialsProvider.Builder builder
= new STSAssumeRoleSessionCredentialsProvider.Builder(arn, sessionName);
builder.withRoleSessionDurationSeconds((int) duration);
if (StringUtils.isNotEmpty(policy)) {
LOG.debug("Scope down policy {}", policy);
builder.withScopeDownPolicy(policy);
}
String endpoint = conf.getTrimmed(ASSUMED_ROLE_STS_ENDPOINT, "");
String region = conf.getTrimmed(ASSUMED_ROLE_STS_ENDPOINT_REGION,
ASSUMED_ROLE_STS_ENDPOINT_REGION_DEFAULT);
AWSSecurityTokenServiceClientBuilder stsbuilder =
STSClientFactory.builder(
conf,
fsUri != null ? fsUri.getHost() : "",
credentialsToSTS,
endpoint,
region);
// the STS client is not tracked for a shutdown in close(), because it
// (currently) throws an UnsupportedOperationException in shutdown().
builder.withStsClient(stsbuilder.build());
//now build the provider
stsProvider = builder.build();
// to handle STS throttling by the AWS account, we
// need to retry
invoker = new Invoker(new S3ARetryPolicy(conf), this::operationRetried);
// and force in a fail-fast check just to keep the stack traces less
// convoluted
getCredentials();
}
/**
* Get credentials.
* @return the credentials
* @throws AWSSecurityTokenServiceException if none could be obtained.
*/
@Override
@Retries.RetryRaw
public AWSCredentials getCredentials() {
try {
return invoker.retryUntranslated("getCredentials",
true,
stsProvider::getCredentials);
} catch (IOException e) {
// this is in the signature of retryUntranslated;
// its hard to see how this could be raised, but for
// completeness, it is wrapped as an Amazon Client Exception
// and rethrown.
throw new CredentialInitializationException(
"getCredentials failed: " + e,
e);
} catch (AWSSecurityTokenServiceException e) {
LOG.error("Failed to get credentials for role {}",
arn, e);
throw e;
}
}
@Override
public void refresh() {
stsProvider.refresh();
}
/**
* Propagate the close() call to the inner stsProvider.
*/
@Override
public void close() {
S3AUtils.closeAutocloseables(LOG, stsProvider, credentialsToSTS);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(
"AssumedRoleCredentialProvider{");
sb.append("role='").append(arn).append('\'');
sb.append(", session'").append(sessionName).append('\'');
sb.append(", duration=").append(duration);
sb.append('}');
return sb.toString();
}
/**
* Build the session name from the current user's shortname.
* @return a string for the session name.
* @throws IOException failure to get the current user
*/
static String buildSessionName() throws IOException {
return sanitize(UserGroupInformation.getCurrentUser()
.getShortUserName());
}
/**
* Build a session name from the string, sanitizing it for the permitted
* characters.
* @param session source session
* @return a string for use in role requests.
*/
@VisibleForTesting
static String sanitize(String session) {
StringBuilder r = new StringBuilder(session.length());
for (char c: session.toCharArray()) {
if ("abcdefghijklmnopqrstuvwxyz0123456789,.@-".contains(
Character.toString(c).toLowerCase(Locale.ENGLISH))) {
r.append(c);
} else {
r.append('-');
}
}
return r.toString();
}
/**
* Callback from {@link Invoker} when an operation is retried.
* @param text text of the operation
* @param ex exception
* @param retries number of retries
* @param idempotent is the method idempotent
*/
public void operationRetried(
String text,
Exception ex,
int retries,
boolean idempotent) {
if (retries == 0) {
// log on the first retry attempt of the credential access.
// At worst, this means one log entry every intermittent renewal
// time.
LOG.info("Retried {}", text);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hbase.index.builder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.hbase.index.Indexer;
import org.apache.phoenix.hbase.index.parallel.QuickFailingTaskRunner;
import org.apache.phoenix.hbase.index.parallel.Task;
import org.apache.phoenix.hbase.index.parallel.TaskBatch;
import org.apache.phoenix.hbase.index.parallel.ThreadPoolBuilder;
import org.apache.phoenix.hbase.index.parallel.ThreadPoolManager;
/**
* Manage the building of index updates from primary table updates.
* <p>
* Internally, parallelizes updates through a thread-pool to a delegate index builder. Underlying
* {@link IndexBuilder} <b>must be thread safe</b> for each index update.
*/
public class IndexBuildManager implements Stoppable {
private static final Log LOG = LogFactory.getLog(IndexBuildManager.class);
private final IndexBuilder delegate;
private QuickFailingTaskRunner pool;
private boolean stopped;
/**
* Set the number of threads with which we can concurrently build index updates. Unused threads
* will be released, but setting the number of threads too high could cause frequent swapping and
* resource contention on the server - <i>tune with care</i>. However, if you are spending a lot
* of time building index updates, it could be worthwhile to spend the time to tune this parameter
* as it could lead to dramatic increases in speed.
*/
public static final String NUM_CONCURRENT_INDEX_BUILDER_THREADS_CONF_KEY = "index.builder.threads.max";
/** Default to a single thread. This is the safest course of action, but the slowest as well */
private static final int DEFAULT_CONCURRENT_INDEX_BUILDER_THREADS = 10;
/**
* Amount of time to keep idle threads in the pool. After this time (seconds) we expire the
* threads and will re-create them as needed, up to the configured max
*/
private static final String INDEX_BUILDER_KEEP_ALIVE_TIME_CONF_KEY =
"index.builder.threads.keepalivetime";
/**
* @param env environment in which <tt>this</tt> is running. Used to setup the
* {@link IndexBuilder} and executor
* @throws IOException if an {@link IndexBuilder} cannot be correctly steup
*/
public IndexBuildManager(RegionCoprocessorEnvironment env) throws IOException {
this(getIndexBuilder(env), new QuickFailingTaskRunner(ThreadPoolManager.getExecutor(
getPoolBuilder(env), env)));
}
private static IndexBuilder getIndexBuilder(RegionCoprocessorEnvironment e) throws IOException {
Configuration conf = e.getConfiguration();
Class<? extends IndexBuilder> builderClass =
conf.getClass(Indexer.INDEX_BUILDER_CONF_KEY, null, IndexBuilder.class);
try {
IndexBuilder builder = builderClass.newInstance();
builder.setup(e);
return builder;
} catch (InstantiationException e1) {
throw new IOException("Couldn't instantiate index builder:" + builderClass
+ ", disabling indexing on table " + e.getRegion().getTableDesc().getNameAsString());
} catch (IllegalAccessException e1) {
throw new IOException("Couldn't instantiate index builder:" + builderClass
+ ", disabling indexing on table " + e.getRegion().getTableDesc().getNameAsString());
}
}
private static ThreadPoolBuilder getPoolBuilder(RegionCoprocessorEnvironment env) {
String serverName = env.getRegionServerServices().getServerName().getServerName();
return new ThreadPoolBuilder(serverName + "-index-builder", env.getConfiguration()).
setCoreTimeout(INDEX_BUILDER_KEEP_ALIVE_TIME_CONF_KEY).
setMaxThread(NUM_CONCURRENT_INDEX_BUILDER_THREADS_CONF_KEY,
DEFAULT_CONCURRENT_INDEX_BUILDER_THREADS);
}
public IndexBuildManager(IndexBuilder builder, QuickFailingTaskRunner pool) {
this.delegate = builder;
this.pool = pool;
}
public Collection<Pair<Mutation, byte[]>> getIndexUpdate(
MiniBatchOperationInProgress<Mutation> miniBatchOp,
Collection<? extends Mutation> mutations) throws Throwable {
// notify the delegate that we have started processing a batch
this.delegate.batchStarted(miniBatchOp);
// parallelize each mutation into its own task
// each task is cancelable via two mechanisms: (1) underlying HRegion is closing (which would
// fail lookups/scanning) and (2) by stopping this via the #stop method. Interrupts will only be
// acknowledged on each thread before doing the actual lookup, but after that depends on the
// underlying builder to look for the closed flag.
TaskBatch<Collection<Pair<Mutation, byte[]>>> tasks =
new TaskBatch<Collection<Pair<Mutation, byte[]>>>(mutations.size());
for (final Mutation m : mutations) {
tasks.add(new Task<Collection<Pair<Mutation, byte[]>>>() {
@Override
public Collection<Pair<Mutation, byte[]>> call() throws IOException {
return delegate.getIndexUpdate(m);
}
});
}
List<Collection<Pair<Mutation, byte[]>>> allResults = null;
try {
allResults = pool.submitUninterruptible(tasks);
} catch (CancellationException e) {
throw e;
} catch (ExecutionException e) {
LOG.error("Found a failed index update!");
throw e.getCause();
}
// we can only get here if we get successes from each of the tasks, so each of these must have a
// correct result
Collection<Pair<Mutation, byte[]>> results = new ArrayList<Pair<Mutation, byte[]>>();
for (Collection<Pair<Mutation, byte[]>> result : allResults) {
assert result != null : "Found an unsuccessful result, but didn't propagate a failure earlier";
results.addAll(result);
}
return results;
}
public Collection<Pair<Mutation, byte[]>> getIndexUpdate(Delete delete) throws IOException {
// all we get is a single update, so it would probably just go slower if we needed to queue it
// up. It will increase underlying resource contention a little bit, but the mutation case is
// far more common, so let's not worry about it for now.
// short circuit so we don't waste time.
if (!this.delegate.isEnabled(delete)) {
return null;
}
return delegate.getIndexUpdate(delete);
}
public Collection<Pair<Mutation, byte[]>> getIndexUpdateForFilteredRows(
Collection<KeyValue> filtered) throws IOException {
// this is run async, so we can take our time here
return delegate.getIndexUpdateForFilteredRows(filtered);
}
public void batchCompleted(MiniBatchOperationInProgress<Mutation> miniBatchOp) {
delegate.batchCompleted(miniBatchOp);
}
public void batchStarted(MiniBatchOperationInProgress<Mutation> miniBatchOp)
throws IOException {
delegate.batchStarted(miniBatchOp);
}
public boolean isEnabled(Mutation m) throws IOException {
return delegate.isEnabled(m);
}
public byte[] getBatchId(Mutation m) {
return delegate.getBatchId(m);
}
@Override
public void stop(String why) {
if (stopped) {
return;
}
this.stopped = true;
this.delegate.stop(why);
this.pool.stop(why);
}
@Override
public boolean isStopped() {
return this.stopped;
}
public IndexBuilder getBuilderForTesting() {
return this.delegate;
}
}
| |
/*****************************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
package org.apache.xmpbox.schema;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import javax.xml.XMLConstants;
import org.apache.xmpbox.XMPMetadata;
import org.apache.xmpbox.XmpConstants;
import org.apache.xmpbox.type.AbstractField;
import org.apache.xmpbox.type.AbstractSimpleProperty;
import org.apache.xmpbox.type.AbstractStructuredType;
import org.apache.xmpbox.type.ArrayProperty;
import org.apache.xmpbox.type.Attribute;
import org.apache.xmpbox.type.BadFieldValueException;
import org.apache.xmpbox.type.BooleanType;
import org.apache.xmpbox.type.Cardinality;
import org.apache.xmpbox.type.ComplexPropertyContainer;
import org.apache.xmpbox.type.DateType;
import org.apache.xmpbox.type.IntegerType;
import org.apache.xmpbox.type.TextType;
import org.apache.xmpbox.type.TypeMapping;
import org.apache.xmpbox.type.Types;
/**
* This class represents a metadata schema that can be stored in an XMP document. It handles all generic properties that
* are available. See subclasses for access to specific properties. MODIFIED TO INCLUDE OBJECT REPRESENTATION
*
*/
public class XMPSchema extends AbstractStructuredType
{
/**
* Create a new blank schema that can be populated.
*
* @param metadata The parent XMP metadata that this schema will be part of.
* @param namespaceURI The URI of the namespace, e.g. "http://ns.adobe.com/pdf/1.3/"
* @param prefix The field prefix of the namespace.
* @param name The property name.
*/
public XMPSchema(XMPMetadata metadata, String namespaceURI, String prefix, String name)
{
super(metadata, namespaceURI, prefix, name);
addNamespace(getNamespace(), getPrefix());
}
/**
* Create a new blank schema that can be populated.
*
* @param metadata The parent XMP metadata that this schema will be part of.
*/
public XMPSchema(XMPMetadata metadata)
{
this(metadata, null, null, null);
}
/**
* Create a new blank schema that can be populated.
*
* @param metadata The parent XMP metadata that this schema will be part of.
* @param prefix The field prefix of the namespace.
*/
public XMPSchema(XMPMetadata metadata, String prefix)
{
this(metadata, null, prefix, null);
}
/**
* Create a new blank schema that can be populated.
*
* @param metadata The parent XMP metadata that this schema will be part of.
* @param namespaceURI The URI of the namespace, e.g. "http://ns.adobe.com/pdf/1.3/"
* @param prefix The field prefix of the namespace.
*/
public XMPSchema(XMPMetadata metadata, String namespaceURI, String prefix)
{
this(metadata, namespaceURI, prefix, null);
}
/**
* Retrieve a generic simple type property
*
* @param qualifiedName
* Full qualified name of property wanted
* @return The generic simple type property according to its qualified name
*/
public AbstractField getAbstractProperty(String qualifiedName)
{
for (AbstractField child : getContainer().getAllProperties())
{
if (child.getPropertyName().equals(qualifiedName))
{
return child;
}
}
return null;
}
/**
* Get the RDF about attribute
*
* @return The RDF 'about' attribute.
*/
public Attribute getAboutAttribute()
{
return getAttribute(XmpConstants.ABOUT_NAME);
}
/**
* Get the RDF about value.
*
* @return The RDF 'about' value. If there is no rdf:about attribute, an empty string is returned.
*/
public String getAboutValue()
{
Attribute prop = getAttribute(XmpConstants.ABOUT_NAME);
if (prop != null)
{
return prop.getValue();
}
// PDFBOX-1685 : if missing, rdf:about should be considered as empty string
return "";
}
/**
* Set the RDF 'about' attribute
*
* @param about
* the well-formed attribute
* @throws BadFieldValueException
* Bad Attribute name (not corresponding to about attribute)
*/
public void setAbout(Attribute about) throws BadFieldValueException
{
if (XmpConstants.RDF_NAMESPACE.equals(about.getNamespace())
&& XmpConstants.ABOUT_NAME.equals(about.getName()))
{
setAttribute(about);
return;
}
throw new BadFieldValueException("Attribute 'about' must be named 'rdf:about' or 'about'");
}
/**
* Set the RDF 'about' attribute. Passing in null will clear this attribute.
*
* @param about
* The new RFD about value.
*/
public void setAboutAsSimple(String about)
{
if (about == null)
{
removeAttribute(XmpConstants.ABOUT_NAME);
}
else
{
setAttribute(new Attribute(XmpConstants.RDF_NAMESPACE, XmpConstants.ABOUT_NAME, about));
}
}
private void setSpecifiedSimpleTypeProperty(Types type, String qualifiedName, Object propertyValue)
{
if (propertyValue == null)
{
// Search in properties to erase
for (AbstractField child : getContainer().getAllProperties())
{
if (child.getPropertyName().equals(qualifiedName))
{
getContainer().removeProperty(child);
return;
}
}
}
else
{
AbstractSimpleProperty specifiedTypeProperty;
try
{
TypeMapping tm = getMetadata().getTypeMapping();
specifiedTypeProperty = tm.instanciateSimpleProperty(null, getPrefix(), qualifiedName, propertyValue,
type);
}
catch (Exception e)
{
throw new IllegalArgumentException(
"Failed to create property with the specified type given in parameters", e);
}
// attribute placement for simple property has been removed
// Search in properties to erase
for (AbstractField child : getAllProperties())
{
if (child.getPropertyName().equals(qualifiedName))
{
removeProperty(child);
addProperty(specifiedTypeProperty);
return;
}
}
addProperty(specifiedTypeProperty);
}
}
/**
* Add a SimpleProperty to this schema
*
* @param prop
* The Property to add
*/
private void setSpecifiedSimpleTypeProperty(AbstractSimpleProperty prop)
{
// attribute placement for simple property has been removed
// Search in properties to erase
for (AbstractField child : getAllProperties())
{
if (child.getPropertyName().equals(prop.getPropertyName()))
{
removeProperty(child);
addProperty(prop);
return;
}
}
addProperty(prop);
}
/**
* Set TextType property
*
* @param prop
* The text property to add
*/
public void setTextProperty(TextType prop)
{
setSpecifiedSimpleTypeProperty(prop);
}
/**
* Set a simple text property on the schema.
*
* @param qualifiedName
* The name of the property, it must contain the namespace prefix, ie "pdf:Keywords"
* @param propertyValue
* The value for the property, can be any string. Passing null will remove the property.
*/
public void setTextPropertyValue(String qualifiedName, String propertyValue)
{
setSpecifiedSimpleTypeProperty(Types.Text, qualifiedName, propertyValue);
}
/**
* Set a simple text property on the schema, using the current prefix.
*
* @param simpleName
* the name of the property without prefix
* @param propertyValue
* The value for the property, can be any string. Passing null will remove the property.
*/
public void setTextPropertyValueAsSimple(String simpleName, String propertyValue)
{
this.setTextPropertyValue(simpleName, propertyValue);
}
/**
* Get a TextProperty Type from its name
*
* @param name The property name.
* @return The Text Type property wanted
*/
public TextType getUnqualifiedTextProperty(String name)
{
AbstractField prop = getAbstractProperty(name);
if (prop != null)
{
if (prop instanceof TextType)
{
return (TextType) prop;
}
else
{
throw new IllegalArgumentException("Property asked is not a Text Property");
}
}
return null;
}
/**
* Get the value of a simple text property.
*
* @param name The property name.
* @return The value of the text property or null if there is no value.
*
*/
public String getUnqualifiedTextPropertyValue(String name)
{
TextType tt = getUnqualifiedTextProperty(name);
return tt == null ? null : tt.getStringValue();
}
/**
* Get the Date property with its name
*
* @param qualifiedName
* The name of the property to get, it must include the namespace prefix, e.g. "pdf:Keywords".
* @return Date Type property
*
*/
public DateType getDateProperty(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof DateType)
{
return (DateType) prop;
}
else
{
throw new IllegalArgumentException("Property asked is not a Date Property");
}
}
return null;
}
/**
* Get a simple date property value on the schema, using the current prefix.
*
* @param simpleName
* the local name of the property to get
* @return The value of the property as a calendar.
*
*/
public Calendar getDatePropertyValueAsSimple(String simpleName)
{
return this.getDatePropertyValue(simpleName);
}
/**
* Get the value of the property as a date.
*
* @param qualifiedName
* The fully qualified property name for the date.
*
* @return The value of the property as a date.
*
*/
public Calendar getDatePropertyValue(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof DateType)
{
return ((DateType) prop).getValue();
}
else
{
throw new IllegalArgumentException("Property asked is not a Date Property");
}
}
return null;
}
/**
* Set a new DateProperty
*
* @param date
* The DateType Property
*/
public void setDateProperty(DateType date)
{
setSpecifiedSimpleTypeProperty(date);
}
/**
* Set a simple Date property on the schema, using the current prefix.
*
* @param simpleName
* the name of the property without prefix
* @param date
* The calendar value for the property, can be any string. Passing null will remove the property.
*/
public void setDatePropertyValueAsSimple(String simpleName, Calendar date)
{
this.setDatePropertyValue(simpleName, date);
}
/**
* Set the value of the property as a date.
*
* @param qualifiedName
* The fully qualified property name for the date.
* @param date
* The date to set, or null to clear.
*/
public void setDatePropertyValue(String qualifiedName, Calendar date)
{
setSpecifiedSimpleTypeProperty(Types.Date, qualifiedName, date);
}
/**
* Get a BooleanType property with its name
*
* @param qualifiedName
* the full qualified name of property wanted
* @return boolean Type property
*/
public BooleanType getBooleanProperty(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof BooleanType)
{
return (BooleanType) prop;
}
else
{
throw new IllegalArgumentException("Property asked is not a Boolean Property");
}
}
return null;
}
/**
* Get a simple boolean property value on the schema, using the current prefix.
*
* @param simpleName
* the local name of property wanted
* @return The value of the property as a boolean or null if the property does not exist.
*/
public Boolean getBooleanPropertyValueAsSimple(String simpleName)
{
return this.getBooleanPropertyValue(simpleName);
}
/**
* Get the value of the property as a Boolean. If you want to use this value
* like a condition, you <i>must</i> do a null check before.
*
* @param qualifiedName The fully qualified property name for the Boolean.
*
* @return The value of the property as a Boolean, or null if the property
* does not exist.
*/
public Boolean getBooleanPropertyValue(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof BooleanType)
{
return ((BooleanType) prop).getValue();
}
else
{
throw new IllegalArgumentException("Property asked is not a Boolean Property");
}
}
return null;
}
/**
* Set a BooleanType property
*
* @param bool
* the booleanType property
*/
public void setBooleanProperty(BooleanType bool)
{
setSpecifiedSimpleTypeProperty(bool);
}
/**
* Set a simple Boolean property on the schema, using the current prefix.
*
* @param simpleName
* the name of the property without prefix
* @param bool
* The value for the property, can be any string. Passing null will remove the property.
*/
public void setBooleanPropertyValueAsSimple(String simpleName, Boolean bool)
{
this.setBooleanPropertyValue(simpleName, bool);
}
/**
* Set the value of the property as a boolean.
*
* @param qualifiedName
* The fully qualified property name for the boolean.
* @param bool
* The boolean to set, or null to clear.
*/
public void setBooleanPropertyValue(String qualifiedName, Boolean bool)
{
setSpecifiedSimpleTypeProperty(Types.Boolean, qualifiedName, bool);
}
/**
* Get the Integer property with its name
*
* @param qualifiedName
* the full qualified name of property wanted
* @return Integer Type property
*/
public IntegerType getIntegerProperty(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof IntegerType)
{
return ((IntegerType) prop);
}
else
{
throw new IllegalArgumentException("Property asked is not an Integer Property");
}
}
return null;
}
/**
* Get a simple integer property value on the schema, using the current prefix.
*
* @param simpleName
* the local name of property wanted
* @return The value of the property as an integer.
*/
public Integer getIntegerPropertyValueAsSimple(String simpleName)
{
return this.getIntegerPropertyValue(simpleName);
}
/**
* Get the value of the property as an integer.
*
* @param qualifiedName
* The fully qualified property name for the integer.
*
* @return The value of the property as an integer.
*/
public Integer getIntegerPropertyValue(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof IntegerType)
{
return ((IntegerType) prop).getValue();
}
else
{
throw new IllegalArgumentException("Property asked is not an Integer Property");
}
}
return null;
}
/**
* Add an integerProperty
*
* @param prop
* The Integer Type property
*/
public void setIntegerProperty(IntegerType prop)
{
setSpecifiedSimpleTypeProperty(prop);
}
/**
* Set a simple Integer property on the schema, using the current prefix.
*
* @param simpleName
* the name of the property without prefix
* @param intValue
* The value for the property, can be any string. Passing null will remove the property.
*/
public void setIntegerPropertyValueAsSimple(String simpleName, Integer intValue)
{
this.setIntegerPropertyValue(simpleName, intValue);
}
/**
* Set the value of the property as an integer.
*
* @param qualifiedName
* The fully qualified property name for the integer.
* @param intValue
* The int to set, or null to clear.
*/
public void setIntegerPropertyValue(String qualifiedName, Integer intValue)
{
setSpecifiedSimpleTypeProperty(Types.Integer, qualifiedName, intValue);
}
/**
* Generic array property removing
*
* @param fieldValue
* the field value
*/
private void removeUnqualifiedArrayValue(String arrayName, String fieldValue)
{
ArrayProperty array = (ArrayProperty) getAbstractProperty(arrayName);
if (array != null)
{
ArrayList<AbstractField> toDelete = new ArrayList<AbstractField>();
Iterator<AbstractField> it = array.getContainer().getAllProperties().iterator();
while (it.hasNext())
{
AbstractSimpleProperty tmp = (AbstractSimpleProperty) it.next();
if (tmp.getStringValue().equals(fieldValue))
{
toDelete.add(tmp);
}
}
Iterator<AbstractField> eraseProperties = toDelete.iterator();
while (eraseProperties.hasNext())
{
array.getContainer().removeProperty(eraseProperties.next());
}
}
}
/**
* Remove all matching entries with the given value from the bag.
*
* @param bagName The bag name.
* @param bagValue
* The value to remove from the bagList.
*/
public void removeUnqualifiedBagValue(String bagName, String bagValue)
{
removeUnqualifiedArrayValue(bagName, bagValue);
}
/**
* add a bag value property on the schema, using the current prefix.
*
* @param simpleName
* the local name of property
* @param bagValue
* the string value to add
*/
public void addBagValueAsSimple(String simpleName, String bagValue)
{
this.internalAddBagValue(simpleName, bagValue);
}
private void internalAddBagValue(String qualifiedBagName, String bagValue)
{
ArrayProperty bag = (ArrayProperty) getAbstractProperty(qualifiedBagName);
TextType li = createTextType(XmpConstants.LIST_NAME, bagValue);
if (bag != null)
{
bag.getContainer().addProperty(li);
}
else
{
ArrayProperty newBag = createArrayProperty(qualifiedBagName, Cardinality.Bag);
newBag.getContainer().addProperty(li);
addProperty(newBag);
}
}
/**
* Add an entry to a bag property.
*
* @param simpleName
* The name of the bag without the namespace prefix
* @param bagValue
* The value to add to the bagList.
*/
public void addQualifiedBagValue(String simpleName, String bagValue)
{
internalAddBagValue(simpleName, bagValue);
}
/**
* Get all the values of the bag property. This will return a list of java.lang.String objects, this is a read-only
* list.
*
* @param bagName The bag name.
* @return All values of the bag property in a list.
*/
public List<String> getUnqualifiedBagValueList(String bagName)
{
ArrayProperty array = (ArrayProperty) getAbstractProperty(bagName);
if (array != null)
{
return array.getElementsAsString();
}
else
{
return null;
}
}
/**
* Remove all matching values from a sequence property.
*
* @param qualifiedSeqName
* The name of the sequence property. It must include the namespace prefix, e.g. "pdf:Keywords".
* @param seqValue
* The value to remove from the list.
*/
public void removeUnqualifiedSequenceValue(String qualifiedSeqName, String seqValue)
{
removeUnqualifiedArrayValue(qualifiedSeqName, seqValue);
}
/**
* Generic method to remove a field from an array with an Elementable Object
*
* @param arrayName
* the name of the property concerned
* @param fieldValue
* the elementable field value
*/
public void removeUnqualifiedArrayValue(String arrayName, AbstractField fieldValue)
{
String qualifiedArrayName = arrayName;
ArrayProperty array = (ArrayProperty) getAbstractProperty(qualifiedArrayName);
if (array != null)
{
ArrayList<AbstractField> toDelete = new ArrayList<AbstractField>();
Iterator<AbstractField> it = array.getContainer().getAllProperties().iterator();
while (it.hasNext())
{
AbstractSimpleProperty tmp = (AbstractSimpleProperty) it.next();
if (tmp.equals(fieldValue))
{
toDelete.add(tmp);
}
}
Iterator<AbstractField> eraseProperties = toDelete.iterator();
while (eraseProperties.hasNext())
{
array.getContainer().removeProperty(eraseProperties.next());
}
}
}
/**
* Remove a value from a sequence property. This will remove all entries from the list.
*
* @param qualifiedSeqName
* The name of the sequence property. It must include the namespace prefix, e.g. "pdf:Keywords".
* @param seqValue
* The value to remove from the list.
*/
public void removeUnqualifiedSequenceValue(String qualifiedSeqName, AbstractField seqValue)
{
removeUnqualifiedArrayValue(qualifiedSeqName, seqValue);
}
/**
* Add a new value to a sequence property.
*
* @param simpleSeqName
* The name of the sequence property without the namespace prefix
* @param seqValue
* The value to add to the sequence.
*/
public void addUnqualifiedSequenceValue(String simpleSeqName, String seqValue)
{
String qualifiedSeqName = simpleSeqName;
ArrayProperty seq = (ArrayProperty) getAbstractProperty(qualifiedSeqName);
TextType li = createTextType(XmpConstants.LIST_NAME, seqValue);
if (seq != null)
{
seq.getContainer().addProperty(li);
}
else
{
ArrayProperty newSeq = createArrayProperty(simpleSeqName, Cardinality.Seq);
newSeq.getContainer().addProperty(li);
addProperty(newSeq);
}
}
/**
* Add a new value to a bag property.
*
* @param qualifiedSeqName
* The name of the sequence property, it must include the namespace prefix, e.g. "pdf:Keywords"
* @param seqValue
* The value to add to the bag.
*/
public void addBagValue(String qualifiedSeqName, AbstractField seqValue)
{
ArrayProperty bag = (ArrayProperty) getAbstractProperty(qualifiedSeqName);
if (bag != null)
{
bag.getContainer().addProperty(seqValue);
}
else
{
ArrayProperty newBag = createArrayProperty(qualifiedSeqName, Cardinality.Bag);
newBag.getContainer().addProperty(seqValue);
addProperty(newBag);
}
}
/**
* Add a new value to a sequence property.
*
* @param seqName
* The name of the sequence property, it must include the namespace prefix, e.g. "pdf:Keywords"
* @param seqValue
* The value to add to the sequence.
*/
public void addUnqualifiedSequenceValue(String seqName, AbstractField seqValue)
{
String qualifiedSeqName = seqName;
ArrayProperty seq = (ArrayProperty) getAbstractProperty(qualifiedSeqName);
if (seq != null)
{
seq.getContainer().addProperty(seqValue);
}
else
{
ArrayProperty newSeq = createArrayProperty(seqName, Cardinality.Seq);
newSeq.getContainer().addProperty(seqValue);
addProperty(newSeq);
}
}
/**
* Get all the values in a sequence property.
*
* @param seqName
* The name of the sequence property without namespace prefix.
*
* @return A read-only list of java.lang.String objects or null if the property does not exist.
*/
public List<String> getUnqualifiedSequenceValueList(String seqName)
{
ArrayProperty array = (ArrayProperty) getAbstractProperty(seqName);
if (array != null)
{
return array.getElementsAsString();
}
else
{
return null;
}
}
/**
* Remove a date sequence value from the list.
*
* @param seqName
* The name of the sequence property, it must include the namespace prefix, e.g. "pdf:Keywords"
* @param date
* The date to remove from the sequence property.
*/
public void removeUnqualifiedSequenceDateValue(String seqName, Calendar date)
{
String qualifiedSeqName = seqName;
ArrayProperty seq = (ArrayProperty) getAbstractProperty(qualifiedSeqName);
if (seq != null)
{
ArrayList<AbstractField> toDelete = new ArrayList<AbstractField>();
Iterator<AbstractField> it = seq.getContainer().getAllProperties().iterator();
while (it.hasNext())
{
AbstractField tmp = it.next();
if (tmp instanceof DateType && ((DateType) tmp).getValue().equals(date))
{
toDelete.add(tmp);
}
}
Iterator<AbstractField> eraseProperties = toDelete.iterator();
while (eraseProperties.hasNext())
{
seq.getContainer().removeProperty(eraseProperties.next());
}
}
}
/**
* Add a date sequence value to the list using the current prefix
*
* @param simpleName
* the local name of the property
* @param date
* the value to add
*/
public void addSequenceDateValueAsSimple(String simpleName, Calendar date)
{
addUnqualifiedSequenceDateValue(simpleName, date);
}
/**
* Add a date sequence value to the list.
*
* @param seqName
* The name of the sequence property, it must include the namespace prefix, e.g. "pdf:Keywords"
* @param date
* The date to add to the sequence property.
*/
public void addUnqualifiedSequenceDateValue(String seqName, Calendar date)
{
addUnqualifiedSequenceValue(
seqName,
getMetadata().getTypeMapping().createDate(null, XmpConstants.DEFAULT_RDF_LOCAL_NAME,
XmpConstants.LIST_NAME, date));
}
/**
* Get all the date values in a sequence property.
*
* @param seqName
* The name of the sequence property, it must include the namespace prefix, e.g. "pdf:Keywords".
*
* @return A read-only list of java.util.Calendar objects or null if the property does not exist.
*
*/
public List<Calendar> getUnqualifiedSequenceDateValueList(String seqName)
{
String qualifiedSeqName = seqName;
List<Calendar> retval = null;
ArrayProperty seq = (ArrayProperty) getAbstractProperty(qualifiedSeqName);
if (seq != null)
{
retval = new ArrayList<Calendar>();
for (AbstractField child : seq.getContainer().getAllProperties())
{
if (child instanceof DateType)
{
retval.add(((DateType) child).getValue());
}
}
}
return retval;
}
/**
* Method used to place the 'x-default' value in first in Language alternatives as said in xmp spec
*
* @param alt
* The property to reorganize
*/
public void reorganizeAltOrder(ComplexPropertyContainer alt)
{
Iterator<AbstractField> it = alt.getAllProperties().iterator();
AbstractField xdefault = null;
boolean xdefaultFound = false;
// If alternatives contains x-default in first value
if (it.hasNext() && it.next().getAttribute(XmpConstants.LANG_NAME).getValue().equals(XmpConstants.X_DEFAULT))
{
return;
}
// Find the xdefault definition
while (it.hasNext() && !xdefaultFound)
{
xdefault = it.next();
if (xdefault.getAttribute(XmpConstants.LANG_NAME).getValue().equals(XmpConstants.X_DEFAULT))
{
alt.removeProperty(xdefault);
xdefaultFound = true;
}
}
if (xdefaultFound)
{
it = alt.getAllProperties().iterator();
ArrayList<AbstractField> reordered = new ArrayList<AbstractField>();
ArrayList<AbstractField> toDelete = new ArrayList<AbstractField>();
reordered.add(xdefault);
while (it.hasNext())
{
AbstractField tmp = it.next();
reordered.add(tmp);
toDelete.add(tmp);
}
Iterator<AbstractField> eraseProperties = toDelete.iterator();
while (eraseProperties.hasNext())
{
alt.removeProperty(eraseProperties.next());
}
it = reordered.iterator();
while (it.hasNext())
{
alt.addProperty(it.next());
}
}
}
/**
* Set the value of a multi-lingual property.
*
* @param name
* The name of the property, it must include the namespace prefix, e.g. "pdf:Keywords"
* @param language
* The language code of the value. If null then "x-default" is assumed.
* @param value
* The value of the property in the specified language.
*/
public void setUnqualifiedLanguagePropertyValue(String name, String language, String value)
{
String qualifiedName = name;
AbstractField property = getAbstractProperty(qualifiedName);
ArrayProperty arrayProp;
if (property != null)
{
// Analyzing content of property
if (property instanceof ArrayProperty)
{
arrayProp = (ArrayProperty) property;
// Try to find a definition
for (AbstractField child : arrayProp.getContainer().getAllProperties())
{
// try to find the same lang definition
if (child.getAttribute(XmpConstants.LANG_NAME).getValue().equals(language))
{
// the same language has been found
arrayProp.getContainer().removeProperty(child);
if (value != null)
{
TextType langValue = createTextType(XmpConstants.LIST_NAME, value);
langValue.setAttribute(new Attribute(XMLConstants.XML_NS_URI, XmpConstants.LANG_NAME,
language));
arrayProp.getContainer().addProperty(langValue);
}
reorganizeAltOrder(arrayProp.getContainer());
return;
}
}
// if no definition found, we add a new one
TextType langValue = createTextType(XmpConstants.LIST_NAME, value);
langValue.setAttribute(new Attribute(XMLConstants.XML_NS_URI, XmpConstants.LANG_NAME, language));
arrayProp.getContainer().addProperty(langValue);
reorganizeAltOrder(arrayProp.getContainer());
}
}
else
{
arrayProp = createArrayProperty(name, Cardinality.Alt);
TextType langValue = createTextType(XmpConstants.LIST_NAME, value);
langValue.setAttribute(new Attribute(XMLConstants.XML_NS_URI, XmpConstants.LANG_NAME, language));
arrayProp.getContainer().addProperty(langValue);
addProperty(arrayProp);
}
}
/**
* Get the value of a multi-lingual property.
*
* @param name
* The name of the property, without the namespace prefix.
* @param expectedLanguage
* The language code of the value. If null then "x-default" is assumed.
*
* @return The value of the language property.
*/
public String getUnqualifiedLanguagePropertyValue(String name, String expectedLanguage)
{
String language = (expectedLanguage != null) ? expectedLanguage : XmpConstants.X_DEFAULT;
AbstractField property = getAbstractProperty(name);
if (property != null)
{
if (property instanceof ArrayProperty)
{
ArrayProperty arrayProp = (ArrayProperty) property;
for (AbstractField child : arrayProp.getContainer().getAllProperties())
{
Attribute text = child.getAttribute(XmpConstants.LANG_NAME);
if (text != null && text.getValue().equals(language))
{
return ((TextType) child).getStringValue();
}
}
return null;
}
else
{
throw new IllegalArgumentException("The property '" + name + "' is not of Lang Alt type");
}
}
return null;
}
/**
* Get a list of all languages that are currently defined for a specific
* property.
*
* @param name The name of the property, it must include the namespace
* prefix, e.g. "pdf:Keywords".
*
* @return A list of all languages, this will return an non-null empty list
* if none have been defined, and null if the property doesn't exist.
*/
public List<String> getUnqualifiedLanguagePropertyLanguagesValue(String name)
{
AbstractField property = getAbstractProperty(name);
if (property != null)
{
if (property instanceof ArrayProperty)
{
List<String> retval = new ArrayList<String>();
ArrayProperty arrayProp = (ArrayProperty) property;
for (AbstractField child : arrayProp.getContainer().getAllProperties())
{
Attribute text = child.getAttribute(XmpConstants.LANG_NAME);
if (text != null)
{
retval.add(text.getValue());
}
else
{
retval.add(XmpConstants.X_DEFAULT);
}
}
return retval;
}
else
{
throw new IllegalArgumentException("The property '" + name + "' is not of Lang Alt type");
}
}
// no property with that name
return null;
}
/**
* A basic schema merge, it merges bags and sequences and replace everything else.
*
* @param xmpSchema
* The schema to merge.
* @throws IOException
* If there is an error during the merge.
*/
public void merge(XMPSchema xmpSchema) throws IOException
{
if (!xmpSchema.getClass().equals(this.getClass()))
{
throw new IOException("Can only merge schemas of the same type.");
}
for (Attribute att : xmpSchema.getAllAttributes())
{
if (att.getNamespace().equals(getNamespace()))
{
setAttribute(att);
}
}
String analyzedPropQualifiedName;
for (AbstractField child : xmpSchema.getContainer().getAllProperties())
{
if (child.getPrefix().equals(getPrefix()))
{
if (child instanceof ArrayProperty)
{
analyzedPropQualifiedName = child.getPropertyName();
for (AbstractField tmpEmbeddedProperty : getAllProperties())
{
if (tmpEmbeddedProperty instanceof ArrayProperty &&
tmpEmbeddedProperty.getPropertyName().equals(analyzedPropQualifiedName))
{
Iterator<AbstractField> itNewValues = ((ArrayProperty) child).getContainer().getAllProperties().iterator();
if (mergeComplexProperty(itNewValues, (ArrayProperty) tmpEmbeddedProperty))
{
return;
}
}
}
}
else
{
addProperty(child);
}
}
}
}
private boolean mergeComplexProperty(Iterator<AbstractField> itNewValues, ArrayProperty arrayProperty)
{
while (itNewValues.hasNext())
{
TextType tmpNewValue = (TextType) itNewValues.next();
Iterator<AbstractField> itOldValues = arrayProperty.getContainer().getAllProperties().iterator();
while (itOldValues.hasNext())
{
TextType tmpOldValue = (TextType) itOldValues.next();
if (tmpOldValue.getStringValue().equals(tmpNewValue.getStringValue()))
{
return true;
}
}
arrayProperty.getContainer().addProperty(tmpNewValue);
}
return false;
}
/**
* Get an AbstractField list corresponding to the content of an array
* property.
*
* @param name The property name whitout namespace.
* @return List of properties contained in the array property.
* @throws BadFieldValueException If the property with the requested name isn't an array.
*/
public List<AbstractField> getUnqualifiedArrayList(String name) throws BadFieldValueException
{
ArrayProperty array = null;
for (AbstractField child : getAllProperties())
{
if (child.getPropertyName().equals(name))
{
if (child instanceof ArrayProperty)
{
array = (ArrayProperty) child;
break;
}
throw new BadFieldValueException("Property asked is not an array");
}
}
if (array != null)
{
return new ArrayList<AbstractField>(array.getContainer().getAllProperties());
}
return null;
}
protected AbstractSimpleProperty instanciateSimple(String param, Object value)
{
TypeMapping tm = getMetadata().getTypeMapping();
return tm.instanciateSimpleField(getClass(), null, getPrefix(), param, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.event;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.lang3.Validate;
/**
* An EventListenerSupport object can be used to manage a list of event
* listeners of a particular type. The class provides
* {@link #addListener(Object)} and {@link #removeListener(Object)} methods
* for registering listeners, as well as a {@link #fire()} method for firing
* events to the listeners.
*
* <p/>
* To use this class, suppose you want to support ActionEvents. You would do:
* <code><pre>
* public class MyActionEventSource
* {
* private EventListenerSupport<ActionListener> actionListeners =
* EventListenerSupport.create(ActionListener.class);
*
* public void someMethodThatFiresAction()
* {
* ActionEvent e = new ActionEvent(this, ActionEvent.ACTION_PERFORMED, "somethingCool");
* actionListeners.fire().actionPerformed(e);
* }
* }
* </pre></code>
*
* Serializing an {@link EventListenerSupport} instance will result in any
* non-{@link Serializable} listeners being silently dropped.
*
* @param <L> the type of event listener that is supported by this proxy.
*
* @since 3.0
* @version $Id$
*/
public class EventListenerSupport<L> implements Serializable {
/** Serialization version */
private static final long serialVersionUID = 3593265990380473632L;
/**
* The list used to hold the registered listeners. This list is
* intentionally a thread-safe copy-on-write-array so that traversals over
* the list of listeners will be atomic.
*/
private List<L> listeners = new CopyOnWriteArrayList<L>();
/**
* The proxy representing the collection of listeners. Calls to this proxy
* object will sent to all registered listeners.
*/
private transient L proxy;
/**
* Empty typed array for #getListeners().
*/
private transient L[] prototypeArray;
/**
* Creates an EventListenerSupport object which supports the specified
* listener type.
*
* @param <T> the type of the listener interface
* @param listenerInterface the type of listener interface that will receive
* events posted using this class.
*
* @return an EventListenerSupport object which supports the specified
* listener type.
*
* @throws NullPointerException if <code>listenerInterface</code> is
* <code>null</code>.
* @throws IllegalArgumentException if <code>listenerInterface</code> is
* not an interface.
*/
public static <T> EventListenerSupport<T> create(final Class<T> listenerInterface) {
return new EventListenerSupport<T>(listenerInterface);
}
/**
* Creates an EventListenerSupport object which supports the provided
* listener interface.
*
* @param listenerInterface the type of listener interface that will receive
* events posted using this class.
*
* @throws NullPointerException if <code>listenerInterface</code> is
* <code>null</code>.
* @throws IllegalArgumentException if <code>listenerInterface</code> is
* not an interface.
*/
public EventListenerSupport(final Class<L> listenerInterface) {
this(listenerInterface, Thread.currentThread().getContextClassLoader());
}
/**
* Creates an EventListenerSupport object which supports the provided
* listener interface using the specified class loader to create the JDK
* dynamic proxy.
*
* @param listenerInterface the listener interface.
* @param classLoader the class loader.
*
* @throws NullPointerException if <code>listenerInterface</code> or
* <code>classLoader</code> is <code>null</code>.
* @throws IllegalArgumentException if <code>listenerInterface</code> is
* not an interface.
*/
public EventListenerSupport(final Class<L> listenerInterface, final ClassLoader classLoader) {
this();
Validate.notNull(listenerInterface, "Listener interface cannot be null.");
Validate.notNull(classLoader, "ClassLoader cannot be null.");
Validate.isTrue(listenerInterface.isInterface(), "Class {0} is not an interface",
listenerInterface.getName());
initializeTransientFields(listenerInterface, classLoader);
}
/**
* Create a new EventListenerSupport instance.
* Serialization-friendly constructor.
*/
private EventListenerSupport() {
}
/**
* Returns a proxy object which can be used to call listener methods on all
* of the registered event listeners. All calls made to this proxy will be
* forwarded to all registered listeners.
*
* @return a proxy object which can be used to call listener methods on all
* of the registered event listeners
*/
public L fire() {
return proxy;
}
//**********************************************************************************************************************
// Other Methods
//**********************************************************************************************************************
/**
* Registers an event listener.
*
* @param listener the event listener (may not be <code>null</code>).
*
* @throws NullPointerException if <code>listener</code> is
* <code>null</code>.
*/
public void addListener(final L listener) {
Validate.notNull(listener, "Listener object cannot be null.");
listeners.add(listener);
}
/**
* Returns the number of registered listeners.
*
* @return the number of registered listeners.
*/
int getListenerCount() {
return listeners.size();
}
/**
* Unregisters an event listener.
*
* @param listener the event listener (may not be <code>null</code>).
*
* @throws NullPointerException if <code>listener</code> is
* <code>null</code>.
*/
public void removeListener(final L listener) {
Validate.notNull(listener, "Listener object cannot be null.");
listeners.remove(listener);
}
/**
* Get an array containing the currently registered listeners.
* Modification to this array's elements will have no effect on the
* {@link EventListenerSupport} instance.
* @return L[]
*/
public L[] getListeners() {
return listeners.toArray(prototypeArray);
}
/**
* Serialize.
* @param objectOutputStream the output stream
* @throws IOException if an IO error occurs
*/
private void writeObject(final ObjectOutputStream objectOutputStream) throws IOException {
final ArrayList<L> serializableListeners = new ArrayList<L>();
// don't just rely on instanceof Serializable:
ObjectOutputStream testObjectOutputStream = new ObjectOutputStream(new ByteArrayOutputStream());
for (final L listener : listeners) {
try {
testObjectOutputStream.writeObject(listener);
serializableListeners.add(listener);
} catch (final IOException exception) {
//recreate test stream in case of indeterminate state
testObjectOutputStream = new ObjectOutputStream(new ByteArrayOutputStream());
}
}
/*
* we can reconstitute everything we need from an array of our listeners,
* which has the additional advantage of typically requiring less storage than a list:
*/
objectOutputStream.writeObject(serializableListeners.toArray(prototypeArray));
}
/**
* Deserialize.
* @param objectInputStream the input stream
* @throws IOException if an IO error occurs
* @throws ClassNotFoundException if the class cannot be resolved
*/
private void readObject(final ObjectInputStream objectInputStream) throws IOException, ClassNotFoundException {
@SuppressWarnings("unchecked") // Will throw CCE here if not correct
final
L[] listeners = (L[]) objectInputStream.readObject();
this.listeners = new CopyOnWriteArrayList<L>(listeners);
@SuppressWarnings("unchecked") // Will throw CCE here if not correct
final
Class<L> listenerInterface = (Class<L>) listeners.getClass().getComponentType();
initializeTransientFields(listenerInterface, Thread.currentThread().getContextClassLoader());
}
/**
* Initialize transient fields.
* @param listenerInterface the class of the listener interface
* @param classLoader the class loader to be used
*/
private void initializeTransientFields(final Class<L> listenerInterface, final ClassLoader classLoader) {
@SuppressWarnings("unchecked") // Will throw CCE here if not correct
final
L[] array = (L[]) Array.newInstance(listenerInterface, 0);
this.prototypeArray = array;
createProxy(listenerInterface, classLoader);
}
/**
* Create the proxy object.
* @param listenerInterface the class of the listener interface
* @param classLoader the class loader to be used
*/
private void createProxy(final Class<L> listenerInterface, final ClassLoader classLoader) {
proxy = listenerInterface.cast(Proxy.newProxyInstance(classLoader,
new Class[] { listenerInterface }, createInvocationHandler()));
}
/**
* Create the {@link InvocationHandler} responsible for broadcasting calls
* to the managed listeners. Subclasses can override to provide custom behavior.
* @return ProxyInvocationHandler
*/
protected InvocationHandler createInvocationHandler() {
return new ProxyInvocationHandler();
}
/**
* An invocation handler used to dispatch the event(s) to all the listeners.
*/
protected class ProxyInvocationHandler implements InvocationHandler {
/**
* Propagates the method call to all registered listeners in place of
* the proxy listener object.
*
* @param proxy the proxy object representing a listener on which the
* invocation was called.
* @param method the listener method that will be called on all of the
* listeners.
* @param args event arguments to propagate to the listeners.
* @return the result of the method call
* @throws Throwable if an error occurs
*/
@Override
public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable {
for (final L listener : listeners) {
method.invoke(listener, args);
}
return null;
}
}
}
| |
/**
* Copyright 2010 - 2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.exodus.tree.patricia;
import org.jetbrains.annotations.NotNull;
import java.util.Iterator;
final class ChildReferenceSet implements Iterable<ChildReference> {
private static final int CAPACITY_THRESHOLD = 2;
private ChildReference[] refs;
private int size;
ChildReferenceSet() {
clear(0);
}
void clear(final int capacity) {
refs = capacity == 0 ? null : new ChildReference[Math.max(capacity, CAPACITY_THRESHOLD)];
size = 0;
}
int size() {
return size;
}
void setSize(int size) {
this.size = size;
}
boolean isEmpty() {
return size == 0;
}
ChildReference get(final byte b) {
final int index = searchFor(b);
return index < 0 ? null : refs[index];
}
ChildReference getRight() {
final int size = this.size;
return size > 0 ? refs[size - 1] : null;
}
int searchFor(final byte b) {
final ChildReference[] refs = this.refs;
final int key = b & 0xff;
int low = 0;
int high = size - 1;
while (low <= high) {
int mid = (low + high + 1) >>> 1;
final ChildReference midRef = refs[mid];
final int cmp = midRef == null ? 1 : (midRef.firstByte & 0xff) - key;
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -low - 1;
}
ChildReference referenceAt(final int index) {
return refs[index];
}
void putRight(@NotNull final ChildReference ref) {
final int size = this.size;
ensureCapacity(size + 1, size);
refs[size] = ref;
this.size = size + 1;
}
void insertAt(final int index, @NotNull final ChildReferenceMutable ref) {
final int size = this.size + 1;
ensureCapacity(size, index);
refs[index] = ref;
this.size = size;
}
void setAt(final int index, @NotNull final ChildReference ref) {
refs[index] = ref;
}
boolean remove(final byte b) {
final int index = searchFor(b);
if (index < 0) {
return false;
}
final int size = this.size;
if (size == 1) {
refs = null;
} else {
final ChildReference[] refs = this.refs;
final int refsToCopy = size - index - 1;
if (refsToCopy > 0) {
System.arraycopy(refs, index + 1, refs, index, refsToCopy);
}
refs[index + refsToCopy] = null;
}
this.size = size - 1;
return true;
}
@Override
public ChildReferenceIterator iterator() {
return iterator(0);
}
ChildReferenceIterator iterator(final int index) {
return new ChildReferenceIterator(this, index);
}
private void ensureCapacity(final int capacity, final int insertPos) {
final ChildReference[] refs = this.refs;
if (refs == null) {
this.refs = new ChildReference[Math.max(capacity, CAPACITY_THRESHOLD)];
} else {
final int length = refs.length;
if (length >= capacity) {
if (insertPos < length - 1) {
System.arraycopy(refs, insertPos, refs, insertPos + 1, length - insertPos - 1);
}
} else {
this.refs = new ChildReference[Math.max(length + CAPACITY_THRESHOLD, capacity)];
System.arraycopy(refs, 0, this.refs, 0, insertPos);
System.arraycopy(refs, insertPos, this.refs, insertPos + 1, length - insertPos);
// refs[insertPos] == null
}
}
}
static final class ChildReferenceIterator implements Iterator<ChildReference> {
private final ChildReference[] refs;
private final int size;
private int index;
ChildReferenceIterator(ChildReferenceSet set, int index) {
refs = set.refs;
size = set.size;
this.index = index;
}
@Override
public boolean hasNext() {
return index < size;
}
@Override
public ChildReference next() {
ChildReference ref;
do {
if (index >= size) {
return null;
}
ref = refs[index];
++index;
} while (ref == null);
return ref;
}
public boolean hasPrev() {
return index > 0;
}
public ChildReference prev() {
ChildReference ref;
do {
if (index <= 0) {
return null;
}
--index;
ref = refs[index];
} while (ref == null);
return ref;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
int getIndex() {
return index;
}
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.tracker.importer;
import org.hamcrest.Matcher;
import org.hisp.dhis.Constants;
import org.hisp.dhis.dto.ApiResponse;
import org.hisp.dhis.dto.TrackerApiResponse;
import org.hisp.dhis.helpers.QueryParamsBuilder;
import org.hisp.dhis.tracker.TrackerNtiApiTest;
import org.json.JSONException;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.skyscreamer.jsonassert.JSONAssert;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import static org.hamcrest.Matchers.*;
/**
* @author Gintare Vilkelyte <vilkelyte.gintare@gmail.com>
*/
public class TrackerExportTests
extends TrackerNtiApiTest
{
private static String teiId;
private static String enrollmentId;
private static String eventId;
private static String relationshipId;
@BeforeAll
public void beforeAll()
throws Exception
{
loginActions.loginAsSuperUser();
TrackerApiResponse response = importTeiWithEnrollmentAndEvent();
teiId = response.validateSuccessfulImport().extractImportedTeis().get( 0 );
enrollmentId = response.extractImportedEnrollments().get( 0 );
relationshipId = response.extractImportedRelationships().get( 0 );
eventId = response.extractImportedEvents().get( 0 );
}
private Stream<Arguments> shouldReturnRequestedFields()
{
return Stream.of( new Arguments[] {
Arguments.of( "/trackedEntities/" + teiId,
"enrollments.createdAt,relationships[from.trackedEntity,to.trackedEntity]",
null ),
Arguments.of( "/trackedEntities/" + teiId, "trackedEntity,enrollments", null ),
Arguments.of( "/enrollments/" + enrollmentId, "program,status,enrolledAt", null ),
Arguments.of( "/enrollments/" + enrollmentId, "**", "enrollment,updatedAt,createdAt,occurredAt,enrolledAt",
null ),
Arguments.of( "/trackedEntities/" + teiId, "*",
"attributes,enrollments[createdAt,events],trackedEntity,orgUnit" ),
Arguments.of( "/trackedEntities/" + teiId, "**", "attributes,enrollments[createdAt,events]" ),
Arguments.of( "/events/" + eventId, "enrollment,createdAt", null ),
Arguments.of( "/relationships/" + relationshipId, "from,to.trackedEntity[*]", null )
} );
}
@MethodSource()
@ParameterizedTest
public void shouldReturnRequestedFields( String endpoint, String fields, String fieldsToValidate )
{
ApiResponse response = trackerActions.get( endpoint + "?fields=" + fields );
response.validate()
.statusCode( 200 );
List<String> fieldList = fieldsToValidate == null ? splitFields( fields ) : splitFields( fieldsToValidate );
fieldList.forEach(
p -> {
response.validate()
.body( p, allOf( not( nullValue() ), not( contains( nullValue() ) ), not( emptyIterable() ) ) );
} );
}
@Test
public void singleTeiAndCollectionTeiShouldReturnSameResult()
throws JSONException
{
TrackerApiResponse trackedEntity = trackerActions.getTrackedEntity( "Kj6vYde4LHh",
new QueryParamsBuilder()
.add( "fields", "*" )
.add( "includeAllAttributes", "true" ) );
TrackerApiResponse trackedEntities = trackerActions.getTrackedEntities( new QueryParamsBuilder()
.add( "fields", "*" )
.add( "includeAllAttributes", "true" )
.add( "trackedEntity", "Kj6vYde4LHh" )
.add( "orgUnit", "O6uvpzGd5pu" ) );
JSONAssert.assertEquals( trackedEntity.getBody().toString(), trackedEntities.extractJsonObject( "instances[0]" ).toString(),
false );
}
private List<String> splitFields( String fields )
{
List<String> split = new ArrayList<>();
// separate fields using comma delimiter, skipping commas within []
Arrays.stream( fields.split( "(?![^)(]*\\([^)(]*?\\)\\)),(?![^\\[]*\\])" ) ).forEach( field -> {
if ( field.contains( "[" ) )
{
for ( String s : field.substring( field.indexOf( "[" ) + 1, field.indexOf( "]" ) ).split( "," ) )
{
if ( s.equalsIgnoreCase( "*" ) )
{
split.add( field.substring( 0, field.indexOf( "[" ) ) );
return;
}
split.add( field.substring( 0, field.indexOf( "[" ) ) + "." + s );
}
return;
}
split.add( field );
} );
return split;
}
@Test
public void shouldReturnSingleTeiGivenFilter()
{
trackerActions.get( "trackedEntities?orgUnit=O6uvpzGd5pu&program=f1AyMswryyQ&filter=kZeSYCgaHTk:in:Bravo" )
.validate()
.statusCode( 200 )
.body( "instances.findAll { it.trackedEntity == 'Kj6vYde4LHh' }.size()", is( 1 ) )
.body( "instances.attributes.flatten().findAll { it.attribute == 'kZeSYCgaHTk' }.value", everyItem( is( "Bravo" ) ) );
}
Stream<Arguments> shouldReturnTeisMatchingAttributeCriteria()
{
return Stream.of(
Arguments.of( "like", "av", containsString( "av" ) ),
Arguments.of( "sw", "Te", startsWith( "Te" ) ),
Arguments.of( "ew", "AVO", endsWith( "avo" ) ),
Arguments.of( "ew", "Bravo", endsWith( "Bravo" ) ),
Arguments.of( "in", "Bravo", equalTo( "Bravo" ) ) );
}
@MethodSource( )
@ParameterizedTest
public void shouldReturnTeisMatchingAttributeCriteria( String operator, String searchCriteria, Matcher everyItemMatcher )
{
QueryParamsBuilder queryParamsBuilder = new QueryParamsBuilder()
.add( "orgUnit", "O6uvpzGd5pu" )
.add( "program", Constants.TRACKER_PROGRAM_ID )
.add( "attribute", String.format( "kZeSYCgaHTk:%s:%s", operator, searchCriteria ) );
trackerActions.getTrackedEntities( queryParamsBuilder )
.validate().statusCode( 200 )
.body( "instances", hasSize( greaterThanOrEqualTo( 1 ) ) )
.body( "instances.attributes.flatten().findAll { it.attribute == 'kZeSYCgaHTk' }.value", everyItem( everyItemMatcher ) );
}
@Test
public void shouldReturnSingleTeiGivenFilterWhileSkippingPaging()
{
trackerActions.get( "trackedEntities?skipPaging=true&orgUnit=O6uvpzGd5pu&program=f1AyMswryyQ&filter=kZeSYCgaHTk:in:Bravo" )
.validate()
.statusCode( 200 )
.body( "instances.findAll { it.trackedEntity == 'Kj6vYde4LHh' }.size()", is( 1 ) )
.body( "instances.attributes.flatten().findAll { it.attribute == 'kZeSYCgaHTk' }.value", everyItem( is( "Bravo" ) ) );
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.matrix;
import org.apache.commons.lang.Validate;
import com.opengamma.util.ArgumentChecker;
/**
* A minimal implementation of a 2D matrix of doubles.
*
*/
public class DoubleMatrix2D implements Matrix<Double> {
private final double[][] _data;
private final int _rows;
private final int _columns;
private final int _elements;
/** * Empty 2D matrix */
public static final DoubleMatrix2D EMPTY_MATRIX = new DoubleMatrix2D(new double[0][0]);
/**
* @deprecated
* Does not copy data on constructions. Do not use.
* @param data The data
* @return A matrix
*/
@Deprecated
public static DoubleMatrix2D noCopy(final double[][] data) {
return new DoubleMatrix2D(data, false);
}
/**
* Sets up an empty matrix
* @param rows Number of rows
* @param columns Number of columns
*/
public DoubleMatrix2D(final int rows, final int columns) {
Validate.isTrue(rows > 0, "row number cannot be negative or zero");
Validate.isTrue(columns > 0, "column number cannot be negative or zero");
_rows = rows;
_columns = columns;
_data = new double[_rows][_columns];
_elements = _rows * _columns;
}
// REVIEW could do with a constructor that does NOT copy the data
/**
* @param data The data, not null. The data is expected in row-column form.
* @throws IllegalArgumentException If the matrix is not rectangular
*/
public DoubleMatrix2D(final double[][] data) {
Validate.notNull(data);
if (data.length == 0) {
_data = new double[0][0];
_elements = 0;
_rows = 0;
_columns = 0;
} else {
_rows = data.length;
_columns = data[0].length;
_data = new double[_rows][_columns];
for (int i = 0; i < _rows; i++) {
System.arraycopy(data[i], 0, _data[i], 0, data[i].length);
}
_elements = _rows * _columns;
}
}
/**
* @param data The data, not null. The data is expected in row-column form.
* @throws IllegalArgumentException If the matrix is not rectangular
*/
public DoubleMatrix2D(final Double[][] data) {
Validate.notNull(data);
if (data.length == 0) {
_data = new double[0][0];
_elements = 0;
_rows = 0;
_columns = 0;
} else {
_rows = data.length;
_columns = data[0].length;
_data = new double[_rows][_columns];
for (int i = 0; i < _rows; i++) {
for (int j = 0; j < _columns; j++) {
_data[i][j] = data[i][j];
}
}
_elements = _rows * _columns;
}
}
private DoubleMatrix2D(final double[][] data, @SuppressWarnings("unused") final boolean copy) {
_rows = data.length;
_columns = data[0].length;
_elements = _rows * _columns;
_data = data;
}
/**
* Returns the row for a particular index.
* @param index The index
* @return The row
*/
public DoubleMatrix1D getRowVector(final int index) {
return new DoubleMatrix1D(_data[index]);
}
/**
* Returns the column for a particular index.
* @param index The index
* @return The column
*/
public DoubleMatrix1D getColumnVector(final int index) {
final double[] res = new double[_rows];
for (int i = 0; i < _rows; i++) {
res[i] = _data[i][index];
}
return new DoubleMatrix1D(res);
}
/**
* {@inheritDoc}
*/
@Override
public Double getEntry(final int... index) {
ArgumentChecker.notNull(index, "indices");
ArgumentChecker.isTrue(index[0] < _data.length, "x index {} is greater than length of array {}", index[0], _data.length);
ArgumentChecker.isTrue(index[1] < _data[0].length, "y index {} is greater than length of array {}", index[1], _data[0].length);
return _data[index[0]][index[1]];
}
/**
* Returns the underlying matrix data. If this is changed so is the matrix.
* @see #toArray to get a copy of data
* @return An array of arrays containing the matrix elements
*/
public double[][] getData() {
return _data;
}
/**
* Convert the matrix to an array of double arrays.
* As its elements are copied, the array is independent from the matrix data.
* @return An array of arrays containing a copy of matrix elements
*/
public double[][] toArray() {
final DoubleMatrix2D temp = new DoubleMatrix2D(_data);
return temp.getData();
}
/**
* {@inheritDoc}
*/
@Override
public int getNumberOfElements() {
return _elements;
}
/**
* @return The number of rows in this matrix
*/
public int getNumberOfRows() {
return _rows;
}
/**
* @return The number of columns in this matrix
*/
public int getNumberOfColumns() {
return _columns;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _columns;
result = prime * result + _rows;
int count = 0;
for (int i = 0; i < _rows; i++) {
for (int j = 0; j < _columns; j++) {
result = prime * result + Double.valueOf(_data[i][j]).hashCode();
if (count == 10) {
break;
}
count++;
}
if (count == 10) {
break;
}
}
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DoubleMatrix2D other = (DoubleMatrix2D) obj;
if (_columns != other._columns) {
return false;
}
if (_rows != other._rows) {
return false;
}
for (int i = 0; i < _rows; i++) {
for (int j = 0; j < _columns; j++) {
if (Double.doubleToLongBits(_data[i][j]) != Double.doubleToLongBits(other._data[i][j])) {
return false;
}
}
}
return true;
}
@Override
public String toString() {
final StringBuffer sb = new StringBuffer();
for (final double[] d : _data) {
for (int i = 0; i < d.length; i++) {
sb.append(d[i]);
sb.append(i == d.length - 1 ? "\n" : "\t");
}
}
return sb.toString();
}
}
| |
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.lang.System.Logger.Level;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Deque;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.StringJoiner;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.spi.ToolProvider;
import java.util.stream.Stream;
import jdk.jfr.Category;
import jdk.jfr.Event;
import jdk.jfr.Label;
import jdk.jfr.Name;
import jdk.jfr.Recording;
import jdk.jfr.StackTrace;
public record Bach(Options options, Logbook logbook, Paths paths, Tools tools) {
public static void main(String... args) {
var bach = Bach.instance(args);
var code = bach.main();
System.exit(code);
}
private static final AtomicReference<Bach> INSTANCE = new AtomicReference<>();
public static Bach instance(String... args) {
return instance(() -> Bach.of(args));
}
public static Bach instance(Consumer<String> out, Consumer<String> err, String... args) {
return instance(() -> Bach.of(out, err, args));
}
public static Bach instance(Supplier<Bach> supplier) {
var oldInstance = INSTANCE.get();
if (oldInstance != null) return oldInstance;
var newInstance = supplier.get();
if (INSTANCE.compareAndSet(null, newInstance)) return newInstance;
return INSTANCE.get();
}
public static Bach of(String... args) {
return Bach.of(System.out::println, System.err::println, args);
}
public static Bach of(Consumer<String> out, Consumer<String> err, String... args) {
var options = Options.of(args);
var logbook = Logbook.of(out, err, options);
var paths = Paths.of(options);
var tools = Tools.of(options);
return new Bach(options, logbook, paths, tools);
}
boolean is(Flag flag) {
return options.flags.contains(flag);
}
public void banner(String text) {
var line = "=".repeat(text.length());
logbook.out.accept("""
%s
%s
%s""".formatted(line, text, line));
}
public void build() {
run(ToolCall.of("banner").with("BUILD"));
run(ToolCall.of("info"));
run(ToolCall.of("compile"));
}
public void info() {
var out = logbook.out();
out.accept("bach.paths = %s".formatted(paths));
if (!is(Flag.VERBOSE)) return;
tools
.finder()
.visit(
0,
(depth, finder) -> {
var indent = " ".repeat(depth);
out.accept(indent + finder.title());
if (depth == 0) return;
finder.findAll().stream()
.sorted(Comparator.comparing(ToolProvider::name))
.forEach(tool -> out.accept(indent + " - " + tool.name()));
});
Stream.of(
ToolCall.of("jar").with("--version"),
ToolCall.of("javac").with("--version"),
ToolCall.of("javadoc").with("--version"))
.sequential()
.forEach(call -> run(call, true));
Stream.of(
ToolCall.of("jdeps").with("--version"),
ToolCall.of("jlink").with("--version"),
ToolCall.of("jmod").with("--version"),
ToolCall.of("jpackage").with("--version"))
.parallel()
.forEach(call -> run(call, true));
}
public void compile() {
logbook.log(Level.WARNING, "TODO compile()");
}
int main() {
try (var recording = new Recording()) {
recording.start();
logbook.log(Level.DEBUG, "BEGIN");
options.calls().forEach(this::run);
logbook.log(Level.DEBUG, "END.");
recording.stop();
var jfr = Files.createDirectories(paths.out()).resolve("bach-logbook.jfr");
recording.dump(jfr);
var logfile = paths.out().resolve("bach-logbook.md");
logbook.out.accept("-> %s".formatted(jfr.toUri()));
logbook.out.accept("-> %s".formatted(logfile.toUri()));
var duration = Duration.between(recording.getStartTime(), recording.getStopTime());
logbook.out.accept(
"Run took %d.%02d seconds".formatted(duration.toSeconds(), duration.toMillis()));
Files.write(logfile, logbook.toMarkdownLines());
return 0;
} catch (Exception exception) {
logbook.log(Level.ERROR, exception.toString());
return -1;
}
}
public void run(ToolCall call) {
run(call, is(Flag.VERBOSE));
}
public void run(ToolCall call, boolean verbose) {
var event = new RunEvent();
event.name = call.name();
event.args = String.join(" ", call.arguments());
/* Log tool call as a single line */ {
var line = new StringJoiner(" ");
line.add(event.name);
if (!event.args.isEmpty()) {
var arguments =
verbose || event.args.length() <= 50
? event.args
: event.args.substring(0, 45) + "[...]";
line.add(arguments);
}
logbook.log(Level.INFO, line.toString());
}
var start = Instant.now();
var tool = tools.finder().find(call.name()).orElseThrow();
var out = new StringWriter();
var err = new StringWriter();
var args = call.arguments().toArray(String[]::new);
event.begin();
event.code = tool.run(new PrintWriter(out), new PrintWriter(err), args);
event.end();
event.out = out.toString().strip();
event.err = err.toString().strip();
event.commit();
if (verbose) {
if (!event.out.isEmpty()) logbook.out().accept(event.out.indent(2).stripTrailing());
if (!event.err.isEmpty()) logbook.err().accept(event.err.indent(2).stripTrailing());
var duration = Duration.between(start, Instant.now());
var line =
"%s ran %d.%02d seconds and returned code %d"
.formatted(call.name(), duration.toSeconds(), duration.toMillis(), event.code);
var printer = event.code == 0 ? logbook.out() : logbook().err();
printer.accept(line);
}
}
public record Paths(Path root, Path out) {
public static Paths of(Options options) {
return new Paths(options.__chroot(), options.__destination());
}
}
public record Tools(ToolFinder finder) {
public static Tools of(Options options) {
return new Tools(
ToolFinder.compose(
ToolFinder.ofProperties(options.__chroot.resolve(".bach/tool-provider")),
ToolFinder.of(
new ToolFinder.Provider("banner", Tools::banner),
new ToolFinder.Provider("build", Tools::build),
new ToolFinder.Provider("compile", Tools::compile),
new ToolFinder.Provider("info", Tools::info)),
ToolFinder.ofSystem() //
));
}
static int banner(PrintWriter out, PrintWriter err, String... args) {
if (args.length == 0) {
err.println("Usage: banner TEXT");
return 1;
}
Bach.instance(out::println, err::println).banner(String.join(" ", args));
return 0;
}
static int build(PrintWriter out, PrintWriter err, String... args) {
Bach.instance(out::println, err::println).build();
return 0;
}
static int compile(PrintWriter out, PrintWriter err, String... args) {
Bach.instance(out::println, err::println).compile();
return 0;
}
static int info(PrintWriter out, PrintWriter err, String... args) {
Bach.instance(out::println, err::println).info();
return 0;
}
}
enum Flag {
VERBOSE
}
public record Options(
Set<Flag> flags,
Level __logbook_threshold,
Path __chroot,
Path __destination,
List<ToolCall> calls) {
static Options of(String... args) {
var flags = EnumSet.noneOf(Flag.class);
var level = Level.INFO;
var root = Path.of("");
var destination = Path.of(".bach", "out");
var arguments = new ArrayDeque<>(List.of(args));
var calls = new ArrayList<ToolCall>();
while (!arguments.isEmpty()) {
var argument = arguments.removeFirst();
if (argument.startsWith("--")) {
if (argument.equals("--verbose")) {
flags.add(Flag.VERBOSE);
continue;
}
var delimiter = argument.indexOf('=', 2);
var key = delimiter == -1 ? argument : argument.substring(0, delimiter);
var value = delimiter == -1 ? arguments.removeFirst() : argument.substring(delimiter + 1);
if (key.equals("--logbook-threshold")) {
level = Level.valueOf(value);
continue;
}
if (key.equals("--chroot")) {
root = Path.of(value).normalize();
continue;
}
if (key.equals("--destination")) {
destination = Path.of(value).normalize();
continue;
}
throw new IllegalArgumentException("Unsupported option `%s`".formatted(key));
}
calls.add(new ToolCall(argument, arguments.stream().toList()));
break;
}
return new Options(
Set.copyOf(flags), level, root, root.resolve(destination), List.copyOf(calls));
}
}
public record Logbook(
Consumer<String> out, Consumer<String> err, Level threshold, Deque<LogEvent> logs) {
public static Logbook of(Consumer<String> out, Consumer<String> err, Options options) {
return new Logbook(out, err, options.__logbook_threshold(), new ConcurrentLinkedDeque<>());
}
public void log(Level level, String message) {
var event = new LogEvent();
event.level = level.name();
event.message = message;
event.commit();
logs.add(event);
if (level.getSeverity() < threshold.getSeverity()) return;
var consumer = level.getSeverity() <= Level.INFO.getSeverity() ? out : err;
consumer.accept(message);
}
public List<String> toMarkdownLines() {
try {
var lines = new ArrayList<>(List.of("# Logbook"));
lines.add("");
lines.add("## Log Events");
lines.add("");
lines.add("```text");
logs.forEach(log -> lines.add("[%c] %s".formatted(log.level.charAt(0), log.message)));
lines.add("```");
return List.copyOf(lines);
} catch (Exception exception) {
throw new RuntimeException("Failed to read recorded events?", exception);
}
}
}
public record ToolCall(String name, List<String> arguments) {
public static ToolCall of(String name, Object... arguments) {
if (arguments.length == 0) return new ToolCall(name, List.of());
if (arguments.length == 1) return new ToolCall(name, List.of(arguments[0].toString()));
return new ToolCall(name, List.of()).with(Stream.of(arguments));
}
public ToolCall with(Stream<?> objects) {
var strings = objects.map(Object::toString);
return new ToolCall(name, Stream.concat(arguments.stream(), strings).toList());
}
public ToolCall with(Object argument) {
return with(Stream.of(argument));
}
public ToolCall with(String key, Object value, Object... values) {
var call = with(Stream.of(key, value));
return values.length == 0 ? call : call.with(Stream.of(values));
}
public ToolCall withFindFiles(String glob) {
return withFindFiles(Path.of(""), glob);
}
public ToolCall withFindFiles(Path start, String glob) {
return withFindFiles(start, "glob", glob);
}
public ToolCall withFindFiles(Path start, String syntax, String pattern) {
var syntaxAndPattern = syntax + ':' + pattern;
var matcher = start.getFileSystem().getPathMatcher(syntaxAndPattern);
return withFindFiles(start, Integer.MAX_VALUE, matcher);
}
public ToolCall withFindFiles(Path start, int maxDepth, PathMatcher matcher) {
try (var files = Files.find(start, maxDepth, (p, a) -> matcher.matches(p))) {
return with(files);
} catch (Exception exception) {
throw new RuntimeException("Find files failed in: " + start, exception);
}
}
}
/**
* A finder of tool providers.
*
* <p>What {@link java.lang.module.ModuleFinder ModuleFinder} is to {@link
* java.lang.module.ModuleReference ModuleReference}, is {@link ToolFinder} to {@link
* ToolProvider}.
*/
@FunctionalInterface
public interface ToolFinder {
List<ToolProvider> findAll();
default Optional<ToolProvider> find(String name) {
return findAll().stream().filter(tool -> tool.name().equals(name)).findFirst();
}
default String title() {
return getClass().getSimpleName();
}
default void visit(int depth, BiConsumer<Integer, ToolFinder> visitor) {
visitor.accept(depth, this);
}
static ToolFinder compose(ToolFinder... finders) {
record CompositeToolFinder(List<ToolFinder> finders) implements ToolFinder {
@Override
public List<ToolProvider> findAll() {
return finders.stream().flatMap(finder -> finder.findAll().stream()).toList();
}
@Override
public Optional<ToolProvider> find(String name) {
for (var finder : finders) {
var tool = finder.find(name);
if (tool.isPresent()) return tool;
}
return Optional.empty();
}
@Override
public void visit(int depth, BiConsumer<Integer, ToolFinder> visitor) {
visitor.accept(depth, this);
depth++;
for (var finder : finders) finder.visit(depth, visitor);
}
}
return new CompositeToolFinder(List.of(finders));
}
static ToolFinder of(ToolProvider... providers) {
record DirectToolFinder(List<ToolProvider> findAll) implements ToolFinder {}
return new DirectToolFinder(List.of(providers));
}
static ToolFinder of(ClassLoader loader) {
return ToolFinder.of(ServiceLoader.load(ToolProvider.class, loader));
}
static ToolFinder of(ServiceLoader<ToolProvider> loader) {
record ServiceLoaderToolFinder(ServiceLoader<ToolProvider> loader) implements ToolFinder {
@Override
public List<ToolProvider> findAll() {
synchronized (loader) {
return loader.stream().map(ServiceLoader.Provider::get).toList();
}
}
}
return new ServiceLoaderToolFinder(loader);
}
static ToolFinder ofSystem() {
return ToolFinder.of(ClassLoader.getSystemClassLoader());
}
static ToolFinder ofProperties(Path directory) {
record PropertiesToolProvider(String name, Properties properties) implements ToolProvider {
@Override
public int run(PrintWriter out, PrintWriter err, String... args) {
var numbers = properties.stringPropertyNames().stream().map(Integer::valueOf).sorted();
for (var number : numbers.map(Object::toString).map(properties::getProperty).toList()) {
var lines = number.lines().map(String::trim).toList();
var call = ToolCall.of(lines.get(0)).with(lines.stream().skip(1));
Bach.instance().run(call);
}
return 0;
}
}
record PropertiesToolFinder(Path directory) implements ToolFinder {
@Override
public String title() {
return "PropertiesToolFinder(%s)".formatted(directory);
}
@Override
public List<ToolProvider> findAll() {
if (!Files.isDirectory(directory)) return List.of();
var list = new ArrayList<ToolProvider>();
try (var paths = Files.newDirectoryStream(directory, "*.properties")) {
for (var path : paths) {
if (Files.isDirectory(path)) continue;
var filename = path.getFileName().toString();
var name = filename.substring(0, filename.length() - ".properties".length());
var properties = new Properties();
properties.load(new StringReader(Files.readString(path)));
list.add(new PropertiesToolProvider(name, properties));
}
} catch (Exception exception) {
throw new RuntimeException(exception);
}
return List.copyOf(list);
}
}
return new PropertiesToolFinder(directory);
}
record Provider(String name, ToolFunction function) implements ToolProvider {
@FunctionalInterface
public interface ToolFunction {
int run(PrintWriter out, PrintWriter err, String... args);
}
@Override
public int run(PrintWriter out, PrintWriter err, String... args) {
return function.run(out, err, args);
}
}
}
@Category("Bach")
@Name("Bach.LogEvent")
@Label("Log")
@StackTrace(false)
private static final class LogEvent extends Event {
String level;
String message;
}
@Category("Bach")
@Name("Bach.RunEvent")
@Label("Run")
@StackTrace(false)
private static final class RunEvent extends Event {
String name;
String args;
int code;
String out;
String err;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.trafficmanager;
import com.microsoft.azure.management.apigeneration.Fluent;
import com.microsoft.azure.management.resources.fluentcore.arm.models.GroupableResource;
import com.microsoft.azure.management.resources.fluentcore.arm.models.Resource;
import com.microsoft.azure.management.resources.fluentcore.model.Appliable;
import com.microsoft.azure.management.resources.fluentcore.model.Creatable;
import com.microsoft.azure.management.resources.fluentcore.model.Refreshable;
import com.microsoft.azure.management.resources.fluentcore.model.Updatable;
import com.microsoft.azure.management.trafficmanager.implementation.ProfileInner;
import com.microsoft.azure.management.trafficmanager.implementation.TrafficManager;
import java.util.Map;
/**
* An immutable client-side representation of an Azure traffic manager profile.
*/
@Fluent
public interface TrafficManagerProfile extends
GroupableResource<TrafficManager, ProfileInner>,
Refreshable<TrafficManagerProfile>,
Updatable<TrafficManagerProfile.Update> {
/**
* @return the relative DNS name of the traffic manager profile
*/
String dnsLabel();
/**
* @return fully qualified domain name (FQDN) of the traffic manager profile.
*/
String fqdn();
/**
* @return the DNS Time-To-Live (TTL), in seconds
*/
long timeToLive();
/**
* @return true if the traffic manager profile is enabled, false if enabled
*/
boolean isEnabled();
/**
* @return the routing method used to route traffic to traffic manager profile endpoints
*/
TrafficRoutingMethod trafficRoutingMethod();
/**
* @return profile monitor status which is combination of the endpoint monitor status values for all endpoints in
* the profile, and the configured profile status
*/
ProfileMonitorStatus monitorStatus();
/**
* @return the port that is monitored to check the health of traffic manager profile endpoints
*/
long monitoringPort();
/**
* @return the path that is monitored to check the health of traffic manager profile endpoints
*/
String monitoringPath();
/**
*
* @return external endpoints in the traffic manager profile, indexed by the name
*/
Map<String, TrafficManagerExternalEndpoint> externalEndpoints();
/**
*
* @return Azure endpoints in the traffic manager profile, indexed by the name
*/
Map<String, TrafficManagerAzureEndpoint> azureEndpoints();
/**
*
* @return nested traffic manager profile endpoints in this traffic manager profile, indexed by the name
*/
Map<String, TrafficManagerNestedProfileEndpoint> nestedProfileEndpoints();
/**
* The entirety of the traffic manager profile definition.
*/
interface Definition extends
DefinitionStages.Blank,
DefinitionStages.WithLeafDomainLabel,
DefinitionStages.WithTrafficRoutingMethod,
DefinitionStages.WithCreate {
}
/**
* Grouping of traffic manager profile definition stages.
*/
interface DefinitionStages {
/**
* The stage of the traffic manager profile definition allowing to specify the resource group.
*/
interface Blank extends GroupableResource.DefinitionStages.WithGroupAndRegion<WithLeafDomainLabel> {
}
/**
* The stage of the traffic manager profile definition allowing to specify the relative DNS name.
*/
interface WithLeafDomainLabel {
/**
* Specify the relative DNS name of the profile.
* <p>
* The fully qualified domain name (FQDN)
* will be constructed automatically by appending the rest of the domain to this label.
*
* @param dnsLabel the relative DNS name of the profile
* @return the next stage of the definition
*/
WithTrafficRoutingMethod withLeafDomainLabel(String dnsLabel);
}
/**
* The stage of the traffic manager profile definition allowing to specify the traffic routing method
* for the profile.
*/
interface WithTrafficRoutingMethod {
/**
* Specifies that end user traffic should be routed to the endpoint based on its priority
* i.e. use the endpoint with highest priority and if it is not available fallback to next highest
* priority endpoint.
*
* @return the next stage of the definition
*/
WithEndpoint withPriorityBasedRouting();
/**
* Specifies that end user traffic should be distributed to the endpoints based on the weight assigned
* to the endpoint.
*
* @return the next stage of the definition
*/
WithEndpoint withWeightBasedRouting();
/**
* Specifies that end user traffic should be routed based on the geographic location of the endpoint
* close to user.
*
* @return the next stage of the definition
*/
WithEndpoint withPerformanceBasedRouting();
/**
* Specify the traffic routing method for the profile.
*
* @param routingMethod the traffic routing method for the profile
* @return the next stage of the definition
*/
WithEndpoint withTrafficRoutingMethod(TrafficRoutingMethod routingMethod);
}
/**
* The stage of the traffic manager profile definition allowing to specify endpoint.
*/
interface WithEndpoint {
/**
* Specifies definition of an Azure endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.DefinitionStages.AzureTargetEndpointBlank<WithCreate> defineAzureTargetEndpoint(String name);
/**
* Specifies definition of an external endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.DefinitionStages.ExternalTargetEndpointBlank<WithCreate> defineExternalTargetEndpoint(String name);
/**
* Specifies definition of an nested profile endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.DefinitionStages.NestedProfileTargetEndpointBlank<WithCreate> defineNestedTargetEndpoint(String name);
}
/**
* The stage of the traffic manager profile definition allowing to specify the endpoint monitoring configuration.
*/
interface WithMonitoringConfiguration {
/**
* Specify to use HTTP monitoring for the endpoints that checks for HTTP 200 response from the path '/'
* at regular intervals, using port 80.
*
* @return the next stage of the definition
*/
WithCreate withHttpMonitoring();
/**
* Specify to use HTTPS monitoring for the endpoints that checks for HTTPS 200 response from the path '/'
* at regular intervals, using port 443.
*
* @return the next stage of the definition
*/
WithCreate withHttpsMonitoring();
/**
* Specify the HTTP monitoring for the endpoints that checks for HTTP 200 response from the specified
* path at regular intervals, using the specified port.
*
* @param port the monitoring port
* @param path the monitoring path
* @return the next stage of the definition
*/
WithCreate withHttpMonitoring(int port, String path);
/**
* Specify the HTTPS monitoring for the endpoints that checks for HTTPS 200 response from the specified
* path at regular intervals, using the specified port.
*
* @param port the monitoring port
* @param path the monitoring path
* @return the next stage of the definition
*/
WithCreate withHttpsMonitoring(int port, String path);
}
/**
* The stage of the traffic manager profile definition allowing to specify the DNS TTL.
*/
interface WithTtl {
/**
* Specify the DNS TTL in seconds.
*
* @param ttlInSeconds DNS TTL in seconds
* @return the next stage of the definition
*/
WithCreate withTimeToLive(int ttlInSeconds);
}
/**
* The stage of the traffic manager profile definition allowing to disable the profile.
*/
interface WithProfileStatus {
/**
* Specify that the profile needs to be disabled.
* <p>
* Disabling the profile will disables traffic to all endpoints in the profile
*
* @return the next stage of the definition
*/
WithCreate withProfileStatusDisabled();
}
/**
* The stage of the definition which contains all the minimum required inputs for the resource to be created
* (via {@link WithCreate#create()}), but also allows for any other optional settings to be specified.
*/
interface WithCreate extends
Creatable<TrafficManagerProfile>,
Resource.DefinitionWithTags<WithCreate>,
DefinitionStages.WithMonitoringConfiguration,
DefinitionStages.WithTtl,
DefinitionStages.WithProfileStatus,
DefinitionStages.WithEndpoint {
}
}
/**
* Grouping of traffic manager update stages.
*/
interface UpdateStages {
/**
* The stage of the traffic manager profile update allowing to specify the traffic routing method
* for the profile.
*/
interface WithTrafficRoutingMethod {
/**
* Specifies that end user traffic should be routed to the endpoint based on its priority
* i.e. use the endpoint with highest priority and if it is not available fallback to next highest
* priority endpoint.
*
* @return the next stage of the traffic manager profile update
*/
Update withPriorityBasedRouting();
/**
* Specifies that end user traffic should be distributed to the endpoints based on the weight assigned
* to the endpoint.
*
* @return the next stage of the traffic manager profile update
*/
Update withWeightBasedRouting();
/**
* Specifies that end user traffic should be routed based on the geographic location of the endpoint
* close to user.
*
* @return the next stage of the traffic manager profile update
*/
Update withPerformanceBasedRouting();
/**
* Specifies the traffic routing method for the profile.
*
* @param routingMethod the traffic routing method for the profile
* @return the next stage of the traffic manager profile update
*/
Update withTrafficRoutingMethod(TrafficRoutingMethod routingMethod);
}
/**
* The stage of the traffic manager profile update allowing to specify the endpoint monitoring configuration.
*/
interface WithMonitoringConfiguration {
/**
* Specify to use HTTP monitoring for the endpoints that checks for HTTP 200 response from the path '/'
* at regular intervals, using port 80.
*
* @return the next stage of the traffic manager profile update
*/
Update withHttpMonitoring();
/**
* Specify to use HTTPS monitoring for the endpoints that checks for HTTPS 200 response from the path '/'
* at regular intervals, using port 443.
*
* @return the next stage of the traffic manager profile update
*/
Update withHttpsMonitoring();
/**
* Specify the HTTP monitoring for the endpoints that checks for HTTP 200 response from the specified
* path at regular intervals, using the specified port.
*
* @param port the monitoring port
* @param path the monitoring path
* @return the next stage of the traffic manager profile update
*/
Update withHttpMonitoring(int port, String path);
/**
* Specify the HTTPS monitoring for the endpoints that checks for HTTPS 200 response from the specified
* path at regular intervals, using the specified port.
*
* @param port the monitoring port
* @param path the monitoring path
* @return the next stage of the traffic manager profile update
*/
Update withHttpsMonitoring(int port, String path);
}
/**
* The stage of the traffic manager profile update allowing to specify endpoints.
*/
interface WithEndpoint {
/**
* Begins the definition of an Azure endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.UpdateDefinitionStages.AzureTargetEndpointBlank<Update> defineAzureTargetEndpoint(String name);
/**
* Begins the definition of an external endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.UpdateDefinitionStages.ExternalTargetEndpointBlank<Update> defineExternalTargetEndpoint(String name);
/**
* Begins the definition of a nested profile endpoint to be attached to the traffic manager profile.
*
* @param name the name for the endpoint
* @return the stage representing configuration for the endpoint
*/
TrafficManagerEndpoint.UpdateDefinitionStages.NestedProfileTargetEndpointBlank<Update> defineNestedTargetEndpoint(String name);
/**
* Begins the description of an update of an existing Azure endpoint in this profile.
*
* @param name the name of the Azure endpoint
* @return the stage representing updating configuration for the Azure endpoint
*/
TrafficManagerEndpoint.UpdateAzureEndpoint updateAzureTargetEndpoint(String name);
/**
* Begins the description of an update of an existing external endpoint in this profile.
*
* @param name the name of the external endpoint
* @return the stage representing updating configuration for the external endpoint
*/
TrafficManagerEndpoint.UpdateExternalEndpoint updateExternalTargetEndpoint(String name);
/**
* Begins the description of an update of an existing nested traffic manager profile endpoint
* in this profile.
*
* @param name the name of the nested profile endpoint
* @return the stage representing updating configuration for the nested traffic manager profile endpoint
*/
TrafficManagerEndpoint.UpdateNestedProfileEndpoint updateNestedProfileTargetEndpoint(String name);
/**
* Removes an endpoint in the profile.
*
* @param name the name of the endpoint
* @return the next stage of the traffic manager profile update
*/
Update withoutEndpoint(String name);
}
/**
* The stage of the traffic manager profile update allowing to specify the DNS TTL.
*/
interface WithTtl {
/**
* Specify the DNS TTL in seconds.
*
* @param ttlInSeconds DNS TTL in seconds
* @return the next stage of the traffic manager profile update
*/
Update withTimeToLive(int ttlInSeconds);
}
/**
* The stage of the traffic manager profile update allowing to disable or enable the profile.
*/
interface WithProfileStatus {
/**
* Specify that the profile needs to be disabled.
* <p>
* Disabling the profile will disables traffic to all endpoints in the profile
*
* @return the next stage of the traffic manager profile update
*/
Update withProfileStatusDisabled();
/**
* Specify that the profile needs to be enabled.
* <p>
* Enabling the profile will enables traffic to all endpoints in the profile
*
* @return the next stage of the traffic manager profile update
*/
Update withProfileStatusEnabled();
}
}
/**
* The template for an update operation, containing all the settings that can be modified.
* <p>
* Call {@link Update#apply()} to apply the changes to the resource in Azure.
*/
interface Update extends
Appliable<TrafficManagerProfile>,
UpdateStages.WithTrafficRoutingMethod,
UpdateStages.WithMonitoringConfiguration,
UpdateStages.WithEndpoint,
UpdateStages.WithTtl,
UpdateStages.WithProfileStatus,
Resource.UpdateWithTags<Update> {
}
}
| |
/*******************************************************************************
* Copyright (c) 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.bpel.common.extension.model.impl;
import java.util.Iterator;
import org.eclipse.bpel.common.extension.model.Extension;
import org.eclipse.bpel.common.extension.model.ExtensionMap;
import org.eclipse.bpel.common.extension.model.ExtensionmodelFactory;
import org.eclipse.bpel.common.extension.model.ExtensionmodelPackage;
import org.eclipse.bpel.common.extension.model.adapters.ExtendedObjectUserAdapter;
import org.eclipse.bpel.common.extension.model.adapters.impl.ExtensionMapAdapterImpl;
import org.eclipse.bpel.common.extension.model.util.ExtensionmodelAdapterFactory;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.impl.EFactoryImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Factory</b>.
* <!-- end-user-doc -->
* @generated
*/
public class ExtensionmodelFactoryImpl extends EFactoryImpl implements ExtensionmodelFactory {
/**
* Creates an instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ExtensionmodelFactoryImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EObject create(EClass eClass) {
switch (eClass.getClassifierID()) {
case ExtensionmodelPackage.EXTENSION_MAP: return createExtensionMap();
case ExtensionmodelPackage.EXTENSION: return createExtension();
default:
throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier");
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ExtensionMap createExtensionMapGen() {
ExtensionMapImpl extensionMap = new ExtensionMapImpl();
return extensionMap;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Extension createExtension() {
ExtensionImpl extension = new ExtensionImpl();
return extension;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ExtensionmodelPackage getExtensionmodelPackage() {
return (ExtensionmodelPackage)getEPackage();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @deprecated
* @generated
*/
@Deprecated
public static ExtensionmodelPackage getPackage() {
return ExtensionmodelPackage.eINSTANCE;
}
/**
* Method findExtensionSet.
* Given a namespace and the contents of a Resource (as an EList), this
* method will find the ExtensionMap object for the given namespace.
* <code>null</code> is returned if one doesn't exists
* @param namespace - ExtensionMap namespace
* @param contents - Resource contents
* @return ExtensionMap
* @customized
*/
public ExtensionMap findExtensionMap(String namespace, EList contents) {
ExtensionMap extensionSet = null;
for (Iterator iter = contents.iterator(); iter.hasNext();) {
EObject element = (EObject) iter.next();
if(element instanceof ExtensionMap && ((ExtensionMap)element).getNamespace().equals(namespace)){
extensionSet = (ExtensionMap)element;
}
}
if(extensionSet != null){
ExtensionmodelAdapterFactory adapterFactory = new ExtensionmodelAdapterFactory();
adapterFactory.adapt(extensionSet,ExtensionMapAdapterImpl.class);
}
return extensionSet;
}
/**
* @customized
*/
public ExtensionMap createExtensionMap() {
ExtensionMap extensionMap = createExtensionMapGen();
ExtensionmodelAdapterFactory adapterFactory = new ExtensionmodelAdapterFactory();
adapterFactory.adapt(extensionMap,ExtensionMapAdapterImpl.class);
return extensionMap;
}
/**
* Method createExtensionSet.
* Creates an ExtensionMap object for the given namespace.
* @param namespace
* @return ExtensionMap
* @customized
*/
public ExtensionMap createExtensionMap(String namespace) {
ExtensionMap extensionSet = ExtensionmodelFactory.eINSTANCE.createExtensionMap();
extensionSet.setNamespace(namespace);
return extensionSet;
}
/**
* Method adaptEObject.
* This method associates an adapter to a model object.
* @param target - object to which the adaptor is to be associated
* @param adapter - adapter to asociate to target
* @customized
*/
public void adaptEObject(EObject target, Adapter adapter) {
for (Iterator adapters = target.eAdapters().iterator(); adapters.hasNext(); )
{
Adapter currAdapter = (Adapter)adapters.next();
if (currAdapter.equals(adapter))
{
return;
}
}
target.eAdapters().add(adapter);
}
/**
* @see org.eclipse.bpel.common.extension.model.ExtensionmodelFactory#findExtensionSets(java.lang.String, org.eclipse.emf.common.util.EList)
*/
public EList findExtensionMaps(String namespace, EList contents) {
EList extensionSetList = new BasicEList();
for (Iterator iter = contents.iterator(); iter.hasNext();) {
EObject element = (EObject) iter.next();
if(element instanceof ExtensionMap && ((ExtensionMap)element).getNamespace().equals(namespace)){
extensionSetList.add(element);
ExtensionmodelAdapterFactory adapterFactory = new ExtensionmodelAdapterFactory();
adapterFactory.adapt(element,ExtensionMapAdapterImpl.class);
}
}
return extensionSetList;
}
public Adapter getExtensionAdapter(EObject extendedObject, String namespace) {
EList adapterList = extendedObject.eAdapters();
for (Iterator iter = adapterList.iterator(); iter.hasNext();) {
Adapter element = (Adapter) iter.next();
if(element instanceof ExtendedObjectUserAdapter && ((ExtendedObjectUserAdapter)element).getNamespace().equals(namespace)){
return element;
}
}
return null;
}
} //ExtensionmodelFactoryImpl
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import com.facebook.buck.json.BuildFileParseException;
import com.facebook.buck.model.BuildTargetException;
import com.facebook.buck.parser.PartialGraph;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleType;
import com.facebook.buck.rules.DependencyGraph;
import com.facebook.buck.rules.InputRule;
import com.facebook.buck.util.Ansi;
import com.facebook.buck.util.BuckConstant;
import com.facebook.buck.util.ProjectFilesystem;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.collect.TreeMultimap;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Comparator;
import java.util.Set;
/**
* Outputs targets that own a specified list of files.
*/
public class AuditOwnerCommand extends AbstractCommandRunner<AuditOwnerOptions> {
private static final String FILE_INDENT = " ";
public AuditOwnerCommand(CommandRunnerParams params) {
super(params);
}
@VisibleForTesting
static final class OwnersReport {
final ImmutableSetMultimap<BuildRule, InputRule> owners;
final ImmutableSet<InputRule> inputsWithNoOwners;
final ImmutableSet<String> nonExistentInputs;
final ImmutableSet<String> nonFileInputs;
public OwnersReport(SetMultimap<BuildRule, InputRule> owners,
Set<InputRule> inputsWithNoOwners,
Set<String> nonExistentInputs,
Set<String> nonFileInputs) {
this.owners = ImmutableSetMultimap.copyOf(owners);
this.inputsWithNoOwners = ImmutableSet.copyOf(inputsWithNoOwners);
this.nonExistentInputs = ImmutableSet.copyOf(nonExistentInputs);
this.nonFileInputs = ImmutableSet.copyOf(nonFileInputs);
}
}
@Override
AuditOwnerOptions createOptions(BuckConfig buckConfig) {
return new AuditOwnerOptions(buckConfig);
}
@Override
int runCommandWithOptionsInternal(AuditOwnerOptions options) throws IOException {
// Build full graph.
PartialGraph graph;
try {
graph = PartialGraph.createFullGraph(
getProjectFilesystem(),
options.getDefaultIncludes(),
getParser(),
getBuckEventBus());
} catch (BuildTargetException | BuildFileParseException e) {
console.printBuildFailureWithoutStacktrace(e);
return 1;
}
OwnersReport report = generateOwnersReport(graph.getDependencyGraph(), options);
printReport(options, report);
return 0;
}
@VisibleForTesting
OwnersReport generateOwnersReport(DependencyGraph graph, AuditOwnerOptions options) {
// Process arguments assuming they are all relative file paths.
Set<InputRule> inputs = Sets.newHashSet();
Set<String> nonExistentInputs = Sets.newHashSet();
Set<String> nonFileInputs = Sets.newHashSet();
ProjectFilesystem projectFilesystem = getProjectFilesystem();
Function<String, String> pathRelativizer = projectFilesystem.getPathRelativizer();
for (String filePath : options.getArguments()) {
File file = projectFilesystem.getFileForRelativePath(filePath);
if (!file.exists()) {
nonExistentInputs.add(filePath);
} else if (!file.isFile()) {
nonFileInputs.add(filePath);
} else {
inputs.add(InputRule.inputPathAsInputRule(filePath, pathRelativizer));
}
}
// Try to find owners for each valid and existing file.
Set<InputRule> inputsWithNoOwners = Sets.newHashSet(inputs);
SetMultimap<BuildRule, InputRule> owners = createOwnersMap();
for (BuildRule rule : graph.getNodes()) {
for (InputRule ruleInput : rule.getInputs()) {
if (inputs.contains(ruleInput)) {
inputsWithNoOwners.remove(ruleInput);
owners.put(rule, ruleInput);
}
}
}
// Try to guess owners for nonexistent files.
if (options.isGuessForDeletedEnabled()) {
guessOwnersForNonExistentFiles(graph, owners, nonExistentInputs);
}
return new OwnersReport(owners, inputsWithNoOwners, nonExistentInputs, nonFileInputs);
}
/**
* Guess target owners for deleted/missing files by finding first
* BUCK file and assuming that all targets in this file used
* missing file as input.
*/
private void guessOwnersForNonExistentFiles(DependencyGraph graph,
SetMultimap<BuildRule, InputRule> owners, Set<String> nonExistentFiles) {
ProjectFilesystem projectFilesystem = getProjectFilesystem();
Function<String, String> pathRelativizer = projectFilesystem.getPathRelativizer();
for (String nonExistentFile : nonExistentFiles) {
File file = projectFilesystem.getFileForRelativePath(nonExistentFile);
File buck = findBuckFileFor(file);
for (BuildRule rule : graph.getNodes()) {
if (rule.getType() == BuildRuleType.PROJECT_CONFIG) {
continue;
}
try {
File ruleBuck = rule.getBuildTarget().getBuildFile(projectFilesystem);
if (buck.getCanonicalFile().equals(ruleBuck.getCanonicalFile())) {
owners.put(rule, InputRule.inputPathAsInputRule(nonExistentFile, pathRelativizer));
}
} catch (IOException | BuildTargetException e) {
throw Throwables.propagate(e);
}
}
}
}
private File findBuckFileFor(File file) {
File dir = file;
if (!dir.isDirectory()) {
dir = dir.getParentFile();
}
File projectRoot = getProjectFilesystem().getProjectRoot();
while (dir != null && !dir.equals(projectRoot)) {
File buck = new File(dir, BuckConstant.BUILD_RULES_FILE_NAME);
if (buck.exists()) {
return buck;
}
dir = dir.getParentFile();
}
throw new RuntimeException("Failed to find BUCK file for " + file.getPath());
}
private void printReport(AuditOwnerOptions options, OwnersReport report) {
if (options.isFullReportEnabled()) {
printFullReport(report);
} else {
printOwnersOnlyReport(report);
}
}
/**
* Print only targets which were identified as owners.
*/
private void printOwnersOnlyReport(OwnersReport report) {
Set<BuildRule> sortedRules = report.owners.keySet();
for (BuildRule rule : sortedRules) {
console.getStdOut().println(rule.getFullyQualifiedName());
}
}
/**
* Print detailed report on all owners.
*/
private void printFullReport(OwnersReport report) {
PrintStream out = console.getStdOut();
Ansi ansi = console.getAnsi();
if (report.owners.isEmpty()) {
out.println(ansi.asErrorText("No owners found"));
} else {
out.println(ansi.asSuccessText("Owners:"));
for (BuildRule rule : report.owners.keySet()) {
out.println(rule.getFullyQualifiedName());
Set<InputRule> files = report.owners.get(rule);
for (InputRule input : files) {
out.println(FILE_INDENT + input);
}
}
}
if (!report.inputsWithNoOwners.isEmpty()) {
out.println();
out.println(ansi.asErrorText("Files without owners:"));
for (InputRule input : report.inputsWithNoOwners) {
out.println(FILE_INDENT + input);
}
}
if (!report.nonExistentInputs.isEmpty()) {
out.println();
out.println(ansi.asErrorText("Non existent files:"));
for (String input : report.nonExistentInputs) {
out.println(FILE_INDENT + input);
}
}
if (!report.nonFileInputs.isEmpty()) {
out.println();
out.println(ansi.asErrorText("Non-file inputs:"));
for (String input : report.nonFileInputs) {
out.println(FILE_INDENT + input);
}
}
}
private SetMultimap<BuildRule, InputRule> createOwnersMap() {
Comparator<BuildRule> keyComparator = new Comparator<BuildRule>() {
@Override
public int compare(BuildRule o1, BuildRule o2) {
return o1.getFullyQualifiedName().compareTo(o2.getFullyQualifiedName());
}
};
Comparator<InputRule> valueComparator = new Comparator<InputRule>() {
@Override
public int compare(InputRule o1, InputRule o2) {
return o1.compareTo(o2);
}
};
return TreeMultimap.create(keyComparator, valueComparator);
}
@Override
String getUsageIntro() {
return "prints targets that own specified files";
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.resources.implementation;
import com.microsoft.azure.AzureClient;
import com.microsoft.azure.AzureServiceClient;
import com.microsoft.rest.credentials.ServiceClientCredentials;
import com.microsoft.rest.RestClient;
/**
* Initializes a new instance of the ResourceManagementClientImpl class.
*/
public final class ResourceManagementClientImpl extends AzureServiceClient {
/** the {@link AzureClient} used for long running operations. */
private AzureClient azureClient;
/**
* Gets the {@link AzureClient} used for long running operations.
* @return the azure client;
*/
public AzureClient getAzureClient() {
return this.azureClient;
}
/** The ID of the target subscription. */
private String subscriptionId;
/**
* Gets The ID of the target subscription.
*
* @return the subscriptionId value.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* Sets The ID of the target subscription.
*
* @param subscriptionId the subscriptionId value.
* @return the service client itself
*/
public ResourceManagementClientImpl withSubscriptionId(String subscriptionId) {
this.subscriptionId = subscriptionId;
return this;
}
/** The API version to use for this operation. */
private String apiVersion;
/**
* Gets The API version to use for this operation.
*
* @return the apiVersion value.
*/
public String apiVersion() {
return this.apiVersion;
}
/** Gets or sets the preferred language for the response. */
private String acceptLanguage;
/**
* Gets Gets or sets the preferred language for the response.
*
* @return the acceptLanguage value.
*/
public String acceptLanguage() {
return this.acceptLanguage;
}
/**
* Sets Gets or sets the preferred language for the response.
*
* @param acceptLanguage the acceptLanguage value.
* @return the service client itself
*/
public ResourceManagementClientImpl withAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
return this;
}
/** Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. */
private int longRunningOperationRetryTimeout;
/**
* Gets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @return the longRunningOperationRetryTimeout value.
*/
public int longRunningOperationRetryTimeout() {
return this.longRunningOperationRetryTimeout;
}
/**
* Sets Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value.
* @return the service client itself
*/
public ResourceManagementClientImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) {
this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout;
return this;
}
/** When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */
private boolean generateClientRequestId;
/**
* Gets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @return the generateClientRequestId value.
*/
public boolean generateClientRequestId() {
return this.generateClientRequestId;
}
/**
* Sets When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @param generateClientRequestId the generateClientRequestId value.
* @return the service client itself
*/
public ResourceManagementClientImpl withGenerateClientRequestId(boolean generateClientRequestId) {
this.generateClientRequestId = generateClientRequestId;
return this;
}
/**
* The DeploymentsInner object to access its operations.
*/
private DeploymentsInner deployments;
/**
* Gets the DeploymentsInner object to access its operations.
* @return the DeploymentsInner object.
*/
public DeploymentsInner deployments() {
return this.deployments;
}
/**
* The ProvidersInner object to access its operations.
*/
private ProvidersInner providers;
/**
* Gets the ProvidersInner object to access its operations.
* @return the ProvidersInner object.
*/
public ProvidersInner providers() {
return this.providers;
}
/**
* The ResourceGroupsInner object to access its operations.
*/
private ResourceGroupsInner resourceGroups;
/**
* Gets the ResourceGroupsInner object to access its operations.
* @return the ResourceGroupsInner object.
*/
public ResourceGroupsInner resourceGroups() {
return this.resourceGroups;
}
/**
* The ResourcesInner object to access its operations.
*/
private ResourcesInner resources;
/**
* Gets the ResourcesInner object to access its operations.
* @return the ResourcesInner object.
*/
public ResourcesInner resources() {
return this.resources;
}
/**
* The TagsInner object to access its operations.
*/
private TagsInner tags;
/**
* Gets the TagsInner object to access its operations.
* @return the TagsInner object.
*/
public TagsInner tags() {
return this.tags;
}
/**
* The DeploymentOperationsInner object to access its operations.
*/
private DeploymentOperationsInner deploymentOperations;
/**
* Gets the DeploymentOperationsInner object to access its operations.
* @return the DeploymentOperationsInner object.
*/
public DeploymentOperationsInner deploymentOperations() {
return this.deploymentOperations;
}
/**
* Initializes an instance of ResourceManagementClient client.
*
* @param credentials the management credentials for Azure
*/
public ResourceManagementClientImpl(ServiceClientCredentials credentials) {
this("https://management.azure.com", credentials);
}
/**
* Initializes an instance of ResourceManagementClient client.
*
* @param baseUrl the base URL of the host
* @param credentials the management credentials for Azure
*/
public ResourceManagementClientImpl(String baseUrl, ServiceClientCredentials credentials) {
super(baseUrl, credentials);
initialize();
}
/**
* Initializes an instance of ResourceManagementClient client.
*
* @param restClient the REST client to connect to Azure.
*/
public ResourceManagementClientImpl(RestClient restClient) {
super(restClient);
initialize();
}
protected void initialize() {
this.apiVersion = "2016-09-01";
this.acceptLanguage = "en-US";
this.longRunningOperationRetryTimeout = 30;
this.generateClientRequestId = true;
this.deployments = new DeploymentsInner(restClient().retrofit(), this);
this.providers = new ProvidersInner(restClient().retrofit(), this);
this.resourceGroups = new ResourceGroupsInner(restClient().retrofit(), this);
this.resources = new ResourcesInner(restClient().retrofit(), this);
this.tags = new TagsInner(restClient().retrofit(), this);
this.deploymentOperations = new DeploymentOperationsInner(restClient().retrofit(), this);
this.azureClient = new AzureClient(this);
}
/**
* Gets the User-Agent header for the client.
*
* @return the user agent string.
*/
@Override
public String userAgent() {
return String.format("Azure-SDK-For-Java/%s (%s)",
getClass().getPackage().getImplementationVersion(),
"ResourceManagementClient, 2016-09-01");
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2016 saybur
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.tarvon.fractala.util;
import java.awt.EventQueue;
import java.awt.Graphics;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JColorChooser;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JPanel;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.image.BufferedImage;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.swing.JSpinner;
import javax.swing.JTextField;
import javax.swing.SpinnerNumberModel;
import com.google.common.base.Strings;
import com.tarvon.fractala.Fractals;
/**
* Graphical utility to help select {@link ColorChooser} values.
*
* @author saybur
*
*/
public class ColorHelper
{
@SuppressWarnings("serial")
private final class ColorPanel extends JPanel
{
private ColorChooser chooser;
public ColorPanel()
{
setOpaque(true);
setPreferredSize(new Dimension(1024, 512));
}
@Override
public void paint(Graphics g)
{
// get draw size
final Dimension size = this.getSize();
if(size == null)
return;
final int width = size.width;
final int height = size.height;
if(width == 0 || height == 0)
return;
// alias chooser
final ColorChooser chooser = this.chooser;
if(chooser == null)
{
g.setColor(Color.BLACK);
g.fillRect(0, 0, width, height);
return;
}
// get seed
final int seed = ((Integer) seedSpinner.getValue())
.intValue();
// draw
final BufferedImage image = Fractals
.createSimplexFractal(seed, 10)
.call()
.normalize(0, MAX)
.toImage(chooser);
g.drawImage(image, 0, 0, null);
}
}
private static final int ROWS = 16;
private static final double MAX = 100.0;
private static final Color ERROR_COLOR = new Color(255, 200, 200);
private static final Color GOOD_COLOR = new Color(255, 255, 255);
public static void main(String[] args)
{
EventQueue.invokeLater(new Runnable()
{
public void run()
{
try
{
ColorHelper window = new ColorHelper();
window.frame.setVisible(true);
}
catch(Exception e)
{
e.printStackTrace();
}
}
});
}
private final JFrame frame;
private final JSpinner seedSpinner;
private final List<JSpinner> spinners;
private final List<JTextField> fields;
private final List<JCheckBox> checkboxes;
private final List<JButton> buttons;
private final ColorPanel colorPanel;
private final JDialog colorChooserDialog;
private final JColorChooser colorChooser;
private boolean colorChooserOk;
public ColorHelper()
{
frame = new JFrame("Color Helper");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.pack();
// make the color panel, and the chooser
colorPanel = new ColorPanel();
frame.getContentPane().add(colorPanel, BorderLayout.EAST);
colorChooser = new JColorChooser();
colorChooserDialog = JColorChooser.createDialog(frame,
"Choose Color",
true,
colorChooser,
e -> colorChooserOk = true,
null);
colorChooserOk = false;
// create GUI elements
spinners = IntStream.range(0, ROWS).boxed()
.map(i ->
{
final JSpinner spinner = new JSpinner(new SpinnerNumberModel(
0.0, 0.0, MAX, 0.1));
spinner.setBackground(ERROR_COLOR);
spinner.addChangeListener(e -> update());
spinner.setPreferredSize(new Dimension(
80,
spinner.getPreferredSize().height));
return spinner;
})
.collect(Collectors.toList());
fields = IntStream.range(0, ROWS).boxed()
.map(i ->
{
final JTextField field = new JTextField();
field.setBackground(ERROR_COLOR);
field.addActionListener(e -> update());
field.setPreferredSize(new Dimension(
80,
field.getPreferredSize().height));
return field;
})
.collect(Collectors.toList());
checkboxes = IntStream.range(0, ROWS).boxed()
.map(i ->
{
final JCheckBox checkbox = new JCheckBox("?");
checkbox.setSelected(true);
checkbox.addActionListener(e -> update());
return checkbox;
})
.collect(Collectors.toList());
buttons = fields.stream()
.map(f ->
{
final JButton button = new JButton("...");
button.setBackground(Color.BLACK);
button.addActionListener(e -> chooseColor(f));
return button;
})
.collect(Collectors.toList());
final List<JPanel> panels = IntStream.range(0, ROWS).boxed()
.map(i ->
{
final JButton b = buttons.get(i);
final JCheckBox c = checkboxes.get(i);
final JPanel panel = new JPanel();
panel.setLayout(new GridLayout(1, 2));
panel.add(b);
panel.add(c);
return panel;
})
.collect(Collectors.toList());
// put a few defaults in
spinners.get(0).setValue(0.0);
fields.get(0).setText("#000000");
spinners.get(1).setValue(MAX);
fields.get(1).setText("#FFFFFF");
// create table of entry objects
final JPanel entryPanel = new JPanel();
entryPanel.setLayout(new GridLayout(ROWS, 3, 2, 2));
for(int i = 0; i < ROWS; i++)
{
entryPanel.add(spinners.get(i));
entryPanel.add(fields.get(i));
entryPanel.add(panels.get(i));
}
frame.getContentPane().add(entryPanel);
// the seed spinner
seedSpinner = new JSpinner(new SpinnerNumberModel(
1000, 1, Integer.MAX_VALUE, 1));
seedSpinner.addChangeListener(e -> update());
frame.getContentPane().add(seedSpinner, BorderLayout.NORTH);
// redrawing button
final JButton redrawButton = new JButton("Redraw");
redrawButton.addActionListener(e -> update());
frame.getContentPane().add(redrawButton, BorderLayout.SOUTH);
update();
frame.pack();
}
private void chooseColor(JTextField f)
{
// pick existing field color
Color prev;
try
{
prev = Color.decode(f.getText());
}
catch(Exception ex)
{
prev = Color.BLACK;
}
// let the user pick a color
colorChooser.setColor(prev);
colorChooserOk = false;
colorChooserDialog.setVisible(true);
Color picked = colorChooser.getColor();
// then assign
if(colorChooserOk)
{
String c = Integer.toHexString(picked.getRGB());
if(c.length() == 7)
{
c = c.substring(1, 7);
}
else if(c.length() > 7)
{
c = c.substring(2, 8);
}
else if(c.length() < 6)
{
c = Strings.padStart(c, 6, '0');
}
f.setText("#" + c);
// update gui post return
update();
}
}
private void update()
{
final ColorChooser.Builder b = ColorChooser.builder();
for(int i = 0; i < ROWS; i++)
{
final JCheckBox chbx = checkboxes.get(i);
final JSpinner spinner = spinners.get(i);
final JTextField field = fields.get(i);
final JButton button = buttons.get(i);
// fetch color or error
final Color color;
try
{
color = Color.decode(field.getText());
field.setBackground(GOOD_COLOR);
button.setBackground(color);
}
catch(Exception e)
{
field.setBackground(ERROR_COLOR);
button.setBackground(Color.BLACK);
continue;
}
// now skip if disabled
if(! chbx.isSelected())
continue;
// fetch value or error
final double value;
try
{
value = ((Double) spinner.getValue()).doubleValue();
spinner.setBackground(GOOD_COLOR);
}
catch(Exception e)
{
spinner.setBackground(ERROR_COLOR);
continue;
}
// insert into chooser
b.add(value, color);
}
// assign
try
{
ColorChooser chooser = b.create();
colorPanel.chooser = chooser;
}
catch(Exception e)
{
colorPanel.chooser = null;
}
colorPanel.repaint();
}
}
| |
// ***************************************************************************
// * Copyright 2014 Joseph Molnar
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// ***************************************************************************
package com.talvish.tales.samples.complexservice;
import java.lang.reflect.Array;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.talvish.tales.contracts.services.http.PathParam;
import com.talvish.tales.contracts.services.http.RequestParam;
import com.talvish.tales.contracts.services.http.ResourceContract;
import com.talvish.tales.contracts.services.http.ResourceOperation;
/***
* This is a contract demonstrating both simple structure (contain just primitive types)
* and complex types (contains other structures).
* @author Joseph Molnar
*
*/
@ResourceContract( name="com.tales.data_structure_contract", versions={ "20140124" } )
public class DataStructureResource {
/**
* Returns a structure that contains just primitive types.
*/
@ResourceOperation( name="get_simple_structure", path="GET : get_simple_structure" )
public SimpleStructure getSimpleStructure( ) {
return new SimpleStructure( "a value", 121231230090l, 3.14f );
}
/**
* Receives a structure that contains just primitive types.
*/
@ResourceOperation( name="set_simple_structure", path="GET | POST : set_simple_structure")
public SimpleStructure setSimpleStructure( @RequestParam( name="value" )SimpleStructure theValue ) {
return theValue;
}
/**
* Returns a structure that contains just another structure.
*/
@ResourceOperation( name="get_complex_structure", path="GET : get_complex_structure" )
public ComplexStructure getComplexStructure( ) {
return new ComplexStructure( new SimpleStructure( "a value", 121231230090l, 3.14f ), 10 );
}
/**
* Receives a structure that contains another structure types.
*/
@ResourceOperation( name="set_complex_structure", path="GET | POST : set_complex_structure")
public ComplexStructure setComplexStructure( @RequestParam( name="value" )ComplexStructure theValue ) {
return theValue;
}
/**
* Returns a polymorphic structure.
*/
@ResourceOperation( name="get_polymorphic_structure", path="GET : get_polymorphic_structure" )
public PolymorphicStructure getPolymorphicStructure( ) {
return new PolymorphicStructure(
new SimpleStructure( "a value", 121231230090l, 3.14f ),
new ComplexStructure( new SimpleStructure( "old", 32l, 14.4f ), 16 ) );
}
/**
* Receives a polymorphc structure.
*/
@ResourceOperation( name="set_polymorphic_structure", path="GET | POST : set_polymorphic_structure")
public PolymorphicStructure setPolymorphicStructure( @RequestParam( name="value" )PolymorphicStructure theValue ) {
return theValue;
}
/**
* Returns a structure that is polymorphic, where the entities are super and subclasses.
*/
@ResourceOperation( name="get_inheritance_container", path="GET : get_inheritance_container" )
public InheritanceContainer getInheritanceContainer( ) {
InheritanceContainer returnValue = new InheritanceContainer( new InheritanceSuperclass( "hello", false ) );
return returnValue;
}
/**
* Returns a structure that is polymorphic, where the entities are super and subclasses.
*/
@ResourceOperation( name="get_self_looping_class", path="GET : get_self_looping_class" )
public SelfLoopingClass getSelfLoopingClass( ) {
SelfLoopingClass returnValue = new SelfLoopingClass( "a" );
return returnValue;
}
/**
* Returns a simple array.
*/
@ResourceOperation( name="get_array", path="GET : get_array" )
public Long[] getArray( ) {
Long[] longs = { 1l, 98765432190l };
return longs;
}
/**
* Returns a complex generic array.
*/
@ResourceOperation( name="get_generic_array", path="GET : get_generic_array" )
public GenericStructure<LocalDate,String>[] getGenericArray( ) {
@SuppressWarnings("unchecked")
GenericStructure<LocalDate,String>[] array = ( GenericStructure<LocalDate,String>[] )Array.newInstance( GenericStructure.class, 2);
array[ 0 ] = new GenericStructure<LocalDate,String>( LocalDate.MAX, "max", 1 );
array[ 1 ] = new GenericStructure<LocalDate,String>( LocalDate.MIN, "min", 2 );
return array;
}
/**
* Returns a generic structure.
*/
@ResourceOperation( name="get_generic_structure", path="GET : get_generic_structure" )
public GenericStructure<String,Long> getGenericStructure( ) {
return new GenericStructure<String,Long>(
"string",
1l,
2 );
}
/**
* Returns the path passed in.
*/
@ResourceOperation( name="path", path="GET : path/{path : [a-z]+(/[a-z]+)*}" )
public String getPath( @PathParam( name="path" )String thePath ) {
return thePath;
}
// NOTE: an example of something that doesn't work since it uses generics and inheritance
// /**
// * Returns a polymorphic structure.
// */
// @ResourceOperation( name="get_generic_structure_two", path="GET : get_generic_structure_two" )
// public GenericStructureTwo<String,Long> getGenericStructureTwo( ) {
// return new GenericStructureTwo<String,Long>(
// "not a string",
// 2l );
// }
/**
* Returns a list of strings. Demonstrates both Java generics and list responses.
*/
@ResourceOperation( name="get_list", path="GET : get_list" )
public List<String> getList( ) {
List<String> list = new ArrayList<String>( );
list.add( "entry one" );
list.add( "entry two" );
return list;
}
/**
* Receives a list of strings. Demonstrates both Java generics and list responses.
*/
@ResourceOperation( name="set_list", path="GET | POST : set_list" )
public List<String> getList( @RequestParam( name="list" )List<String> theList) {
return theList;
}
/**
* Returns a map keyed by strings and holding a structure.
*/
@ResourceOperation( name="get_map", path="GET : get_map" )
public Map<String,SimpleStructure> getMap( ) {
Map<String,SimpleStructure> map = new HashMap<String,SimpleStructure>( );
map.put( "key_one", new SimpleStructure( "string one", 1l, 1.0f) );
map.put( "key_two", new SimpleStructure( "string two", 2l, 2.0f) );
return map;
}
/**
* Receives a map keyed by strings and holding a structure.
*/
@ResourceOperation( name="set_map", path="GET | POST : set_map" )
public Map<String,SimpleStructure> setMap( @RequestParam( name="map" )Map<String,SimpleStructure> theMap) {
return theMap;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.beam.sdk.TestUtils;
import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.runners.Parameterized;
import org.junit.runners.Suite;
/**
* Tests for the ApproximateUnique aggregator transform.
*/
@RunWith(Suite.class)
@Suite.SuiteClasses({
ApproximateUniqueTest.ApproximateUniqueWithDuplicatesTest.class,
ApproximateUniqueTest.ApproximateUniqueVariationsTest.class,
ApproximateUniqueTest.ApproximateUniqueMiscTest.class
})
public class ApproximateUniqueTest implements Serializable {
// implements Serializable just to make it easy to use anonymous inner DoFn subclasses
@Rule
public final transient TestPipeline p = TestPipeline.create();
private static class VerifyEstimateFn implements SerializableFunction<Long, Void> {
private final long uniqueCount;
private final int sampleSize;
private VerifyEstimateFn(final long uniqueCount, final int sampleSize) {
this.uniqueCount = uniqueCount;
this.sampleSize = sampleSize;
}
@Override
public Void apply(final Long estimate) {
verifyEstimate(uniqueCount, sampleSize, estimate);
return null;
}
}
/**
* Checks that the estimation error, i.e., the difference between
* {@code uniqueCount} and {@code estimate} is less than
* {@code 2 / sqrt(sampleSize}).
*/
private static void verifyEstimate(final long uniqueCount,
final int sampleSize,
final long estimate) {
if (uniqueCount < sampleSize) {
assertEquals("Number of hashes is less than the sample size. "
+ "Estimate should be exact", uniqueCount, estimate);
}
final double error = 100.0 * Math.abs(estimate - uniqueCount) / uniqueCount;
final double maxError = 100.0 * 2 / Math.sqrt(sampleSize);
assertTrue("Estimate=" + estimate + " Actual=" + uniqueCount + " Error="
+ error + "%, MaxError=" + maxError + "%.", error < maxError);
assertTrue("Estimate=" + estimate + " Actual=" + uniqueCount + " Error="
+ error + "%, MaxError=" + maxError + "%.", error < maxError);
}
private static class VerifyEstimatePerKeyFn
implements SerializableFunction<Iterable<KV<Long, Long>>, Void> {
private final int sampleSize;
private VerifyEstimatePerKeyFn(final int sampleSize) {
this.sampleSize = sampleSize;
}
@Override
public Void apply(final Iterable<KV<Long, Long>> estimatePerKey) {
for (final KV<Long, Long> result : estimatePerKey) {
verifyEstimate(result.getKey(), sampleSize, result.getValue());
}
return null;
}
}
/**
* Tests for ApproximateUnique with duplicates.
*/
@RunWith(Parameterized.class)
public static class ApproximateUniqueWithDuplicatesTest extends
ApproximateUniqueTest {
@Parameterized.Parameter
public int elementCount;
@Parameterized.Parameter(1)
public int uniqueCount;
@Parameterized.Parameter(2)
public int sampleSize;
@Parameterized.Parameters(name = "total_{0}_unique_{1}_sample_{2}")
public static Iterable<Object[]> data() throws IOException {
return ImmutableList.<Object[]>builder()
.add(
new Object[] {
100, 100, 100
},
new Object[] {
1000, 1000, 100
},
new Object[] {
1500, 1000, 100
},
new Object[] {
10000, 1000, 100
})
.build();
}
private void runApproximateUniqueWithDuplicates(final int elementCount,
final int uniqueCount, final int sampleSize) {
assert elementCount >= uniqueCount;
final List<Double> elements = Lists.newArrayList();
for (int i = 0; i < elementCount; i++) {
elements.add(1.0 / (i % uniqueCount + 1));
}
Collections.shuffle(elements);
final PCollection<Double> input = p.apply(Create.of(elements));
final PCollection<Long> estimate =
input.apply(ApproximateUnique.<Double>globally(sampleSize));
PAssert.thatSingleton(estimate).satisfies(new VerifyEstimateFn(uniqueCount, sampleSize));
p.run();
}
@Test
@Category(NeedsRunner.class)
public void testApproximateUniqueWithDuplicates() {
runApproximateUniqueWithDuplicates(elementCount, uniqueCount, sampleSize);
}
}
/**
* Tests for ApproximateUnique with different sample sizes.
*/
@RunWith(Parameterized.class)
public static class ApproximateUniqueVariationsTest extends ApproximateUniqueTest {
private static final int TEST_PAGES = 100;
private static final List<String> TEST_LINES =
new ArrayList<>(TEST_PAGES * TestUtils.LINES.size());
static {
for (int i = 0; i < TEST_PAGES; i++) {
TEST_LINES.addAll(TestUtils.LINES);
}
}
@Parameterized.Parameter
public int sampleSize;
@Parameterized.Parameters(name = "sampleSize_{0}")
public static Iterable<Object[]> data() throws IOException {
return ImmutableList.<Object[]>builder()
.add(new Object[] {
16
},
new Object[] {
64
},
new Object[] {
128
},
new Object[] {
256
},
new Object[] {
512
},
new Object[] {
1000
},
new Object[] {
2014
},
new Object[] {
15
})
.build();
}
/**
* Applies {@code ApproximateUnique(sampleSize)} verifying that the estimation
* error falls within the maximum allowed error of {@code 2/sqrt(sampleSize)}.
*/
private void runApproximateUniquePipeline(final int sampleSize) {
final PCollection<String> input = p.apply(Create.of(TEST_LINES));
final PCollection<Long> approximate =
input.apply(ApproximateUnique.<String>globally(sampleSize));
final PCollectionView<Long> exact =
input
.apply(Distinct.<String>create())
.apply(Count.<String>globally())
.apply(View.<Long>asSingleton());
final PCollection<KV<Long, Long>> approximateAndExact = approximate
.apply(ParDo.of(new DoFn<Long, KV<Long, Long>>() {
@ProcessElement
public void processElement(final ProcessContext c) {
c.output(KV.of(c.element(), c.sideInput(exact)));
}
}).withSideInputs(exact));
PAssert.that(approximateAndExact).satisfies(new VerifyEstimatePerKeyFn(sampleSize));
p.run();
}
/**
* Applies {@link ApproximateUnique} for different sample sizes and verifies
* that the estimation error falls within the maximum allowed error of
* {@code 2 / sqrt(sampleSize)}.
*/
@Test
@Category(NeedsRunner.class)
public void testApproximateUniqueWithDifferentSampleSizes() {
if (sampleSize > 16) {
runApproximateUniquePipeline(sampleSize);
} else {
try {
p.enableAbandonedNodeEnforcement(false);
runApproximateUniquePipeline(15);
fail("Accepted sampleSize < 16");
} catch (final IllegalArgumentException e) {
assertTrue("Expected an exception due to sampleSize < 16",
e.getMessage().startsWith("ApproximateUnique needs a sampleSize >= 16"));
}
}
}
}
/**
* Further tests for ApproximateUnique.
*/
@RunWith(JUnit4.class)
public static class ApproximateUniqueMiscTest extends ApproximateUniqueTest {
@Test
public void testEstimationErrorToSampleSize() {
assertEquals(40000, ApproximateUnique.sampleSizeFromEstimationError(0.01));
assertEquals(10000, ApproximateUnique.sampleSizeFromEstimationError(0.02));
assertEquals(2500, ApproximateUnique.sampleSizeFromEstimationError(0.04));
assertEquals(1600, ApproximateUnique.sampleSizeFromEstimationError(0.05));
assertEquals(400, ApproximateUnique.sampleSizeFromEstimationError(0.1));
assertEquals(100, ApproximateUnique.sampleSizeFromEstimationError(0.2));
assertEquals(25, ApproximateUnique.sampleSizeFromEstimationError(0.4));
assertEquals(16, ApproximateUnique.sampleSizeFromEstimationError(0.5));
}
@Test
@Category(RunnableOnService.class)
public void testApproximateUniqueWithSmallInput() {
final PCollection<Integer> input = p.apply(
Create.of(Arrays.asList(1, 2, 3, 3)));
final PCollection<Long> estimate = input
.apply(ApproximateUnique.<Integer>globally(1000));
PAssert.thatSingleton(estimate).isEqualTo(3L);
p.run();
}
@Test
@Category(NeedsRunner.class)
public void testApproximateUniqueWithSkewedDistributionsAndLargeSampleSize() {
runApproximateUniqueWithSkewedDistributions(10000, 2000, 1000);
}
private void runApproximateUniqueWithSkewedDistributions(final int elementCount,
final int uniqueCount,
final int sampleSize) {
final List<Integer> elements = Lists.newArrayList();
// Zipf distribution with approximately elementCount items.
final double s = 1 - 1.0 * uniqueCount / elementCount;
final double maxCount = Math.pow(uniqueCount, s);
for (int k = 0; k < uniqueCount; k++) {
final int count = Math.max(1, (int) Math.round(maxCount * Math.pow(k, -s)));
// Element k occurs count times.
for (int c = 0; c < count; c++) {
elements.add(k);
}
}
final PCollection<Integer> input = p.apply(Create.of(elements));
final PCollection<Long> estimate =
input.apply(ApproximateUnique.<Integer>globally(sampleSize));
PAssert.thatSingleton(estimate).satisfies(new VerifyEstimateFn(uniqueCount, sampleSize));
p.run();
}
@Test
@Category(NeedsRunner.class)
public void testApproximateUniquePerKey() {
final List<KV<Long, Long>> elements = Lists.newArrayList();
final List<Long> keys = ImmutableList.of(20L, 50L, 100L);
final int elementCount = 1000;
final int sampleSize = 100;
// Use the key as the number of unique values.
for (final long uniqueCount : keys) {
for (long value = 0; value < elementCount; value++) {
elements.add(KV.of(uniqueCount, value % uniqueCount));
}
}
final PCollection<KV<Long, Long>> input = p.apply(Create.of(elements));
final PCollection<KV<Long, Long>> counts =
input.apply(ApproximateUnique.<Long, Long>perKey(sampleSize));
PAssert.that(counts).satisfies(new VerifyEstimatePerKeyFn(sampleSize));
p.run();
}
@Test
public void testApproximateUniqueGetName() {
assertEquals("ApproximateUnique.PerKey", ApproximateUnique.<Long, Long>perKey(16).getName());
assertEquals("ApproximateUnique.Globally", ApproximateUnique.<Integer>globally(16).getName());
}
@Test
public void testDisplayData() {
final ApproximateUnique.Globally<Integer> specifiedSampleSize =
ApproximateUnique.globally(1234);
final ApproximateUnique.PerKey<String, Integer> specifiedMaxError =
ApproximateUnique.perKey(0.1234);
assertThat(DisplayData.from(specifiedSampleSize), hasDisplayItem("sampleSize", 1234));
final DisplayData maxErrorDisplayData = DisplayData.from(specifiedMaxError);
assertThat(maxErrorDisplayData, hasDisplayItem("maximumEstimationError", 0.1234));
assertThat("calculated sampleSize should be included", maxErrorDisplayData,
hasDisplayItem("sampleSize"));
}
}
}
| |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.integration;
import java.time.Duration;
import javax.management.MBeanServer;
import javax.sql.DataSource;
import io.rsocket.transport.netty.server.TcpServerTransport;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.autoconfigure.condition.AnyNestedCondition;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.condition.ConditionalOnSingleCandidate;
import org.springframework.boot.autoconfigure.condition.SearchStrategy;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jmx.JmxAutoConfiguration;
import org.springframework.boot.autoconfigure.rsocket.RSocketMessagingAutoConfiguration;
import org.springframework.boot.autoconfigure.task.TaskSchedulingAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.context.properties.PropertyMapper;
import org.springframework.boot.context.properties.source.MutuallyExclusiveConfigurationPropertiesException;
import org.springframework.boot.task.TaskSchedulerBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.env.Environment;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.integration.config.EnableIntegrationManagement;
import org.springframework.integration.config.IntegrationManagementConfigurer;
import org.springframework.integration.context.IntegrationContextUtils;
import org.springframework.integration.gateway.GatewayProxyFactoryBean;
import org.springframework.integration.jdbc.store.JdbcMessageStore;
import org.springframework.integration.jmx.config.EnableIntegrationMBeanExport;
import org.springframework.integration.monitor.IntegrationMBeanExporter;
import org.springframework.integration.rsocket.ClientRSocketConnector;
import org.springframework.integration.rsocket.IntegrationRSocketEndpoint;
import org.springframework.integration.rsocket.ServerRSocketConnector;
import org.springframework.integration.rsocket.ServerRSocketMessageHandler;
import org.springframework.integration.rsocket.outbound.RSocketOutboundGateway;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.messaging.rsocket.RSocketRequester;
import org.springframework.messaging.rsocket.RSocketStrategies;
import org.springframework.messaging.rsocket.annotation.support.RSocketMessageHandler;
import org.springframework.scheduling.Trigger;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.scheduling.support.CronTrigger;
import org.springframework.scheduling.support.PeriodicTrigger;
import org.springframework.util.StringUtils;
/**
* {@link org.springframework.boot.autoconfigure.EnableAutoConfiguration
* Auto-configuration} for Spring Integration.
*
* @author Artem Bilan
* @author Dave Syer
* @author Stephane Nicoll
* @author Vedran Pavic
* @author Madhura Bhave
* @since 1.1.0
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(EnableIntegration.class)
@EnableConfigurationProperties(IntegrationProperties.class)
@AutoConfigureAfter({ DataSourceAutoConfiguration.class, JmxAutoConfiguration.class,
TaskSchedulingAutoConfiguration.class })
public class IntegrationAutoConfiguration {
@Bean(name = IntegrationContextUtils.INTEGRATION_GLOBAL_PROPERTIES_BEAN_NAME)
@ConditionalOnMissingBean(name = IntegrationContextUtils.INTEGRATION_GLOBAL_PROPERTIES_BEAN_NAME)
public static org.springframework.integration.context.IntegrationProperties integrationGlobalProperties(
IntegrationProperties properties) {
org.springframework.integration.context.IntegrationProperties integrationProperties = new org.springframework.integration.context.IntegrationProperties();
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
map.from(properties.getChannel().isAutoCreate()).to(integrationProperties::setChannelsAutoCreate);
map.from(properties.getChannel().getMaxUnicastSubscribers())
.to(integrationProperties::setChannelsMaxUnicastSubscribers);
map.from(properties.getChannel().getMaxBroadcastSubscribers())
.to(integrationProperties::setChannelsMaxBroadcastSubscribers);
map.from(properties.getError().isRequireSubscribers())
.to(integrationProperties::setErrorChannelRequireSubscribers);
map.from(properties.getError().isIgnoreFailures()).to(integrationProperties::setErrorChannelIgnoreFailures);
map.from(properties.getEndpoint().isThrowExceptionOnLateReply())
.to(integrationProperties::setMessagingTemplateThrowExceptionOnLateReply);
map.from(properties.getEndpoint().getReadOnlyHeaders()).as(StringUtils::toStringArray)
.to(integrationProperties::setReadOnlyHeaders);
map.from(properties.getEndpoint().getNoAutoStartup()).as(StringUtils::toStringArray)
.to(integrationProperties::setNoAutoStartupEndpoints);
return integrationProperties;
}
/**
* Basic Spring Integration configuration.
*/
@Configuration(proxyBeanMethods = false)
@EnableIntegration
protected static class IntegrationConfiguration {
@Bean(PollerMetadata.DEFAULT_POLLER)
@ConditionalOnMissingBean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPollerMetadata(IntegrationProperties integrationProperties) {
IntegrationProperties.Poller poller = integrationProperties.getPoller();
MutuallyExclusiveConfigurationPropertiesException.throwIfMultipleNonNullValuesIn((entries) -> {
entries.put("spring.integration.poller.cron",
StringUtils.hasText(poller.getCron()) ? poller.getCron() : null);
entries.put("spring.integration.poller.fixed-delay", poller.getFixedDelay());
entries.put("spring.integration.poller.fixed-rate", poller.getFixedRate());
});
PollerMetadata pollerMetadata = new PollerMetadata();
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
map.from(poller::getMaxMessagesPerPoll).to(pollerMetadata::setMaxMessagesPerPoll);
map.from(poller::getReceiveTimeout).as(Duration::toMillis).to(pollerMetadata::setReceiveTimeout);
map.from(poller).as(this::asTrigger).to(pollerMetadata::setTrigger);
return pollerMetadata;
}
private Trigger asTrigger(IntegrationProperties.Poller poller) {
if (StringUtils.hasText(poller.getCron())) {
return new CronTrigger(poller.getCron());
}
if (poller.getFixedDelay() != null) {
return createPeriodicTrigger(poller.getFixedDelay(), poller.getInitialDelay(), false);
}
if (poller.getFixedRate() != null) {
return createPeriodicTrigger(poller.getFixedRate(), poller.getInitialDelay(), true);
}
return null;
}
private Trigger createPeriodicTrigger(Duration period, Duration initialDelay, boolean fixedRate) {
PeriodicTrigger trigger = new PeriodicTrigger(period.toMillis());
if (initialDelay != null) {
trigger.setInitialDelay(initialDelay.toMillis());
}
trigger.setFixedRate(fixedRate);
return trigger;
}
}
/**
* Expose a standard {@link ThreadPoolTaskScheduler} if the user has not enabled task
* scheduling explicitly.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnBean(TaskSchedulerBuilder.class)
@ConditionalOnMissingBean(name = IntegrationContextUtils.TASK_SCHEDULER_BEAN_NAME)
protected static class IntegrationTaskSchedulerConfiguration {
@Bean(name = IntegrationContextUtils.TASK_SCHEDULER_BEAN_NAME)
public ThreadPoolTaskScheduler taskScheduler(TaskSchedulerBuilder builder) {
return builder.build();
}
}
/**
* Spring Integration JMX configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(EnableIntegrationMBeanExport.class)
@ConditionalOnMissingBean(value = IntegrationMBeanExporter.class, search = SearchStrategy.CURRENT)
@ConditionalOnBean(MBeanServer.class)
@ConditionalOnProperty(prefix = "spring.jmx", name = "enabled", havingValue = "true", matchIfMissing = true)
protected static class IntegrationJmxConfiguration {
@Bean
public IntegrationMBeanExporter integrationMbeanExporter(BeanFactory beanFactory, Environment environment) {
IntegrationMBeanExporter exporter = new IntegrationMBeanExporter();
String defaultDomain = environment.getProperty("spring.jmx.default-domain");
if (StringUtils.hasLength(defaultDomain)) {
exporter.setDefaultDomain(defaultDomain);
}
String serverBean = environment.getProperty("spring.jmx.server", "mbeanServer");
exporter.setServer(beanFactory.getBean(serverBean, MBeanServer.class));
return exporter;
}
}
/**
* Integration management configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(EnableIntegrationManagement.class)
@ConditionalOnMissingBean(value = IntegrationManagementConfigurer.class,
name = IntegrationManagementConfigurer.MANAGEMENT_CONFIGURER_NAME, search = SearchStrategy.CURRENT)
protected static class IntegrationManagementConfiguration {
@Configuration(proxyBeanMethods = false)
@EnableIntegrationManagement(
defaultLoggingEnabled = "${spring.integration.management.default-logging-enabled:true}")
protected static class EnableIntegrationManagementConfiguration {
}
}
/**
* Integration component scan configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(GatewayProxyFactoryBean.class)
@Import(IntegrationAutoConfigurationScanRegistrar.class)
protected static class IntegrationComponentScanConfiguration {
}
/**
* Integration JDBC configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(JdbcMessageStore.class)
@ConditionalOnSingleCandidate(DataSource.class)
protected static class IntegrationJdbcConfiguration {
@Bean
@SuppressWarnings("deprecation")
@ConditionalOnMissingBean({ IntegrationDataSourceScriptDatabaseInitializer.class,
IntegrationDataSourceInitializer.class })
public IntegrationDataSourceScriptDatabaseInitializer integrationDataSourceInitializer(DataSource dataSource,
IntegrationProperties properties) {
return new IntegrationDataSourceScriptDatabaseInitializer(dataSource, properties.getJdbc());
}
}
/**
* Integration RSocket configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass({ IntegrationRSocketEndpoint.class, RSocketRequester.class, io.rsocket.RSocket.class })
@Conditional(IntegrationRSocketConfiguration.AnyRSocketChannelAdapterAvailable.class)
protected static class IntegrationRSocketConfiguration {
/**
* Check if either an {@link IntegrationRSocketEndpoint} or
* {@link RSocketOutboundGateway} bean is available.
*/
static class AnyRSocketChannelAdapterAvailable extends AnyNestedCondition {
AnyRSocketChannelAdapterAvailable() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@ConditionalOnBean(IntegrationRSocketEndpoint.class)
static class IntegrationRSocketEndpointAvailable {
}
@ConditionalOnBean(RSocketOutboundGateway.class)
static class RSocketOutboundGatewayAvailable {
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(TcpServerTransport.class)
@AutoConfigureBefore(RSocketMessagingAutoConfiguration.class)
protected static class IntegrationRSocketServerConfiguration {
@Bean
@ConditionalOnMissingBean(ServerRSocketMessageHandler.class)
public RSocketMessageHandler serverRSocketMessageHandler(RSocketStrategies rSocketStrategies,
IntegrationProperties integrationProperties) {
RSocketMessageHandler messageHandler = new ServerRSocketMessageHandler(
integrationProperties.getRsocket().getServer().isMessageMappingEnabled());
messageHandler.setRSocketStrategies(rSocketStrategies);
return messageHandler;
}
@Bean
@ConditionalOnMissingBean
public ServerRSocketConnector serverRSocketConnector(ServerRSocketMessageHandler messageHandler) {
return new ServerRSocketConnector(messageHandler);
}
}
@Configuration(proxyBeanMethods = false)
protected static class IntegrationRSocketClientConfiguration {
@Bean
@ConditionalOnMissingBean
@Conditional(RemoteRSocketServerAddressConfigured.class)
public ClientRSocketConnector clientRSocketConnector(IntegrationProperties integrationProperties,
RSocketStrategies rSocketStrategies) {
IntegrationProperties.RSocket.Client client = integrationProperties.getRsocket().getClient();
ClientRSocketConnector clientRSocketConnector = (client.getUri() != null)
? new ClientRSocketConnector(client.getUri())
: new ClientRSocketConnector(client.getHost(), client.getPort());
clientRSocketConnector.setRSocketStrategies(rSocketStrategies);
return clientRSocketConnector;
}
/**
* Check if a remote address is configured for the RSocket Integration client.
*/
static class RemoteRSocketServerAddressConfigured extends AnyNestedCondition {
RemoteRSocketServerAddressConfigured() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@ConditionalOnProperty(prefix = "spring.integration.rsocket.client", name = "uri")
static class WebSocketAddressConfigured {
}
@ConditionalOnProperty(prefix = "spring.integration.rsocket.client", name = { "host", "port" })
static class TcpAddressConfigured {
}
}
}
}
}
| |
package org.jgroups.protocols.jzookeeper.zabNRWFindd;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
import org.jgroups.Address;
import org.jgroups.Event;
import org.jgroups.Message;
import org.jgroups.View;
import org.jgroups.Message.Flag;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.Property;
import org.jgroups.stack.Protocol;
import com.google.common.util.concurrent.AtomicDouble;
/*
* It is orignal protocol of Apache Zookeeper. Also it has features of testing throuhput, latency (in Nano), ant etc.
* When using testing, it provides warm up test before starting real test.
* @author Ibrahim EL-Sanosi
*/
public class Zab extends Protocol {
private final static String ProtocolName = "Zab";
private static int numberOfSenderInEachClient = 25;
private final AtomicLong zxid = new AtomicLong(0);
private ExecutorService executorProposal;
private ExecutorService executorDelivery;
private ExecutorService MeasuringNumCommit;
//private ExecutorService delayTimeout; For Measuring d
private Address local_addr;
private volatile Address leader;
private volatile View view;
private volatile boolean is_leader = false;
private List<Address> zabMembers = Collections
.synchronizedList(new ArrayList<Address>());
private long lastZxidProposed = 0, lastZxidCommitted = 0;
//private final List<Double> delays = new ArrayList<Double>(); For Measuring d
//Map<Address, SortedSet<Timeout>> mapTimeouts = new HashMap<Address, SortedSet<Timeout>>(); //For Measuring d
private final LinkedBlockingQueue<Timeout> processTimeout = new LinkedBlockingQueue<Timeout>();
private final Set<MessageId> requestQueue = Collections
.synchronizedSet(new HashSet<MessageId>());
private final Map<Long, ZabHeader> queuedCommitMessage = Collections
.synchronizedMap(new HashMap<Long, ZabHeader>());
private final Map<Long, ZabHeader> queuedProposalMessage = Collections
.synchronizedMap(new HashMap<Long, ZabHeader>());
private final LinkedBlockingQueue<ZabHeader> queuedMessages = new LinkedBlockingQueue<ZabHeader>();
private final LinkedBlockingQueue<Long> delivery= new LinkedBlockingQueue<Long>();
private final LinkedBlockingQueue<Long> notDeliverable= new LinkedBlockingQueue<Long>();
private final Set<Long> waitingToDeliver= Collections.synchronizedSet(new HashSet<Long>());
private ConcurrentMap<Long, Proposal> outstandingProposals = new ConcurrentHashMap<Long, Proposal>();
private final Map<MessageId, Message> messageStore = Collections
.synchronizedMap(new HashMap<MessageId, Message>());
private volatile boolean running = true;
private volatile boolean startThroughput = false;
private final static String outDir = "/work/Zab/";
private static String info = null;
private ProtocolStats stats = new ProtocolStats();
@Property(name = "Zab_size", description = "It is Zab cluster size")
private Timer timer = new Timer();
private int clusterSize = 5;
private static int warmUp = 0;
//private Timer checkFinished = new Timer();
private static int numReadCoundRecieved=0;
private ArrayList<Integer> commitArrivalRate = new ArrayList<Integer>();// Next arrival proposal time
private Timer measureNumOfCommit = new Timer();
/*
* Empty constructor
*/
public Zab() {
}
@ManagedAttribute
public boolean isleaderinator() {
return is_leader;
}
public Address getleaderinator() {
return leader;
}
public Address getLocalAddress() {
return local_addr;
}
@Override
public void start() throws Exception {
super.start();
running = true;
executorProposal = Executors.newSingleThreadExecutor();
executorProposal.execute(new FollowerMessageHandler(this.id));
executorDelivery = Executors.newSingleThreadExecutor();
executorDelivery.execute(new MessageHandler());
this.stats = new ProtocolStats(ProtocolName, 10,
numberOfSenderInEachClient, outDir, false, "");
//checkFinished.schedule(new CheckFinished(), 5, 10000);//For tail proposal timeout
log.setLevel("trace");
}
/*
* Reset all protocol fields, reset invokes after warm up has finished, then callback the clients to start
* main test
*/
public void reset() {
//zxid.set(0);
//lastZxidProposed = 0;
//lastZxidCommitted = 0;
requestQueue.clear();
//queuedCommitMessage.clear();
queuedProposalMessage.clear();
queuedMessages.clear();
outstandingProposals.clear();
messageStore.clear();
//startThroughput = false;
warmUp = queuedCommitMessage.size();
if(!is_leader){
measureNumOfCommit.schedule(new MeasueCommitCommitRate(), 5, 1000);//For tail proposal timeout
}
//executorDelivery.shutdown();
log.info("Reset done"+" Time="+System.currentTimeMillis());
}
@Override
public void stop() {
running = false;
executorProposal.shutdown();
executorDelivery.shutdown();
MeasuringNumCommit.shutdown();
//delayTimeout.shutdown();
super.stop();
}
public Object down(Event evt) {
switch (evt.getType()) {
case Event.MSG:
return null; // don't pass down
case Event.SET_LOCAL_ADDRESS:
local_addr = (Address) evt.getArg();
break;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
Message msg = null;
ZabHeader hdr;
switch (evt.getType()) {
case Event.MSG:
msg = (Message) evt.getArg();
hdr = (ZabHeader) msg.getHeader(this.id);
if (hdr == null) {
break; // pass up
}
switch (hdr.getType()) {
case ZabHeader.REQUESTW:
forwardToLeader(msg);
break;
case ZabHeader.REQUESTR:
//log.info(">>>>>>>>>>>>>>>>>Receive read<<<<<<<<<<<<<<<<<<<");
hdr.getMessageOrderInfo().getId().setStartTime(System.nanoTime());
readData(hdr.getMessageOrderInfo());
break;
case ZabHeader.FORWARD:
queuedMessages.add(hdr);
break;
case ZabHeader.PROPOSAL:
sendACK(msg, hdr);
break;
case ZabHeader.ACK:
//For Measuring d
// Timeout tReceived= new Timeout();
// tReceived = hdr.getTimeout();
// tReceived.setArriveTime(System.currentTimeMillis());
// Address folAddress = msg.getSrc();
// tReceived.setZxid((folAddress.toString()+hdr.getZxid()));
// SortedSet<Timeout> timeouts = mapTimeouts.get(folAddress);
// timeouts.add(tReceived);
// mapTimeouts.put(folAddress, timeouts); //End
processACK(msg);
break;
case ZabHeader.COMMIT:
stats.numCommit.incrementAndGet();
//For Measuring d
// long roundTrip = System.currentTimeMillis();
// Timeout fullInfo=hdr.getTimeout();
// if(fullInfo !=null){
// fullInfo.setArriveTimeForRoundTrip(roundTrip);
// processTimeout.add(fullInfo);
// }
// else{
// log.info("**********fullInfo=null*************"+fullInfo);
// }//End
synchronized (notDeliverable) {
if(!queuedProposalMessage.containsKey(hdr.getZxid())){
notDeliverable.add(hdr.getZxid());
log.info("Store Zxid----> "+hdr.getZxid()+" Time="+System.currentTimeMillis());
}
}
delivery.add(hdr.getZxid());
break;
case ZabHeader.STARTWORKLOAD:
info = (String) msg.getObject();
numberOfSenderInEachClient = Integer.parseInt(info.split(":")[1]);
this.stats = new ProtocolStats(ProtocolName, 10,
numberOfSenderInEachClient, outDir, false, info);
startThroughput = true;
stats.setStartThroughputTime(System.currentTimeMillis());
stats.setLastNumReqDeliveredBefore(0);
stats.setLastThroughputTime(System.currentTimeMillis());
timer.schedule(new Throughput(), 1000, 1000);
reset();
break;
case ZabHeader.COUNTMESSAGE:
addCountReadToTotal(hdr);
break;
case ZabHeader.FINISHED:
log.info("I Have notfied from Client----> "+msg.getSrc());
//if (clientFinished.incrementAndGet() == 10) {
running = false;
timer.cancel();
sendCountRead();
log.info("Printing stats");
//}
break;
}
//s
return null;
case Event.VIEW_CHANGE:
handleViewChange((View) evt.getArg());
break;
}
return up_prot.up(evt);
}
/*
* --------------------------------- Private Methods --------------------------------
*/
private void handleViewChange(View v) {
List<Address> mbrs = v.getMembers();
//leader = mbrs.get(0);
//make the first three joined server as ZK servers
if (mbrs.size() == (clusterSize+2)) {
for (int i=2;i<mbrs.size();i++){
zabMembers.add(mbrs.get(i));
}
leader = mbrs.get(2);
if (leader.equals(local_addr)) {
is_leader = true;
//For Measuring d
// for (int j=1;j<zabMembers.size();j++){
// SortedSet<Timeout> timeouts= Collections.synchronizedSortedSet(new TreeSet<Timeout>());
// mapTimeouts.put(zabMembers.get(j), timeouts);
// }//End
}
else{
//For Measuring d
// delayTimeout = Executors.newSingleThreadExecutor();
// delayTimeout.execute(new ProcessACKDelay());
}
}
if (mbrs.size() > (clusterSize+2) && zabMembers.isEmpty()) {
for (int i = 2; i < mbrs.size(); i++) {
zabMembers.add(mbrs.get(i));
if(i>=(clusterSize+2))
break;
}
leader = mbrs.get(2);
if (leader.equals(local_addr)) {
is_leader = true;
//For Measuring d
// for (int j=1;j<zabMembers.size();j++){
// SortedSet<Timeout> timeouts= Collections.synchronizedSortedSet(new TreeSet<Timeout>());
// mapTimeouts.put(zabMembers.get(j), timeouts);
// }//End
}
}
log.info("zabMembers size = " + zabMembers);
if (mbrs.isEmpty())
return;
if (view == null || view.compareTo(v) < 0)
view = v;
else
return;
}
private long getNewZxid() {
return zxid.incrementAndGet();
}
/*
* If this server is a leader put the request in queue for processing it.
* otherwise forwards request to the leader
*/
private synchronized void forwardToLeader(Message msg) {
//stats.incNumRequest();
ZabHeader hdrReq = (ZabHeader) msg.getHeader(this.id);
requestQueue.add(hdrReq.getMessageOrderInfo().getId());
if (is_leader) {
long stp = System.nanoTime();
hdrReq.getMessageOrderInfo().getId().setStartTime(stp);
queuedMessages.add(hdrReq);
} else {
long stf = System.nanoTime();
hdrReq.getMessageOrderInfo().getId().setStartTime(stf);
forward(msg);
}
}
/*
* Forward request to the leader
*/
private synchronized void forward(Message msg) {
Address target = leader;
ZabHeader hdrReq = (ZabHeader) msg.getHeader(this.id);
ZabHeader hdr = new ZabHeader(ZabHeader.FORWARD, hdrReq.getMessageOrderInfo());
Message forward_msg = new Message(target).putHeader(this.id, hdr);
forward_msg.setBuffer(new byte[1000]);
try {
//forward_msg.setFlag(Message.Flag.DONT_BUNDLE);
down_prot.down(new Event(Event.MSG, forward_msg));
} catch (Exception ex) {
log.error("failed forwarding message to " + msg, ex);
}
}
/*
* This method is invoked by follower.
* Follower receives a proposal. This method generates ACK message and send it to the leader.
*/
private synchronized void sendACK(Message msg, ZabHeader hdrAck) {
MessageOrderInfo messageOrderInfo = hdrAck.getMessageOrderInfo();
long proZxid = messageOrderInfo.getOrdering();
//log.info("Recieved Proposal Zxid= "+proZxid);
queuedProposalMessage.put(proZxid, hdrAck);
// if(waitingToDeliver.contains(messageOrderInfo.getOrdering())){
// deliver(messageOrderInfo.getOrdering());
// waitingToDeliver.remove(messageOrderInfo.getOrdering());
// synchronized (delivery) {
// delivery.notify();
// }
// }
synchronized (delivery) {
//log.info("Before Notifying for Zxid= "+proZxid+" Time="+System.currentTimeMillis());
synchronized (notDeliverable) {
if(notDeliverable.contains(proZxid)){
notDeliverable.remove(proZxid);
delivery.notify();
log.info("Notifying for Zxid= "+proZxid);
}
}
}
Timeout timeout = new Timeout(System.currentTimeMillis());
ZabHeader hdrACK = new ZabHeader(ZabHeader.ACK, proZxid, timeout);
Message ACKMessage = new Message(leader).putHeader(this.id, hdrACK);
//ACKMessage.setFlag(Message.Flag.DONT_BUNDLE);
try {
down_prot.down(new Event(Event.MSG, ACKMessage));
} catch (Exception ex) {
//log.error("failed sending ACK message to Leader");
}
}
/*
* This method is invoked by leader. It receives ACK message from a follower
* and check if a majority is reached for particular proposal.
*/
private synchronized void processACK(Message msgACK) {
ZabHeader hdr = (ZabHeader) msgACK.getHeader(this.id);
long ackZxid = hdr.getZxid();
if (lastZxidCommitted >= ackZxid) {
return;
}
Proposal p = outstandingProposals.get(ackZxid);
if (p == null) {
return;
}
p.AckCount++;
if (isQuorum(p.getAckCount())) {
outstandingProposals.remove(ackZxid);
commit(ackZxid);
//}
}
}
/*
* This method is invoked by leader. It sends COMMIT message to all follower and itself.
*/
private synchronized void commit(long zxidd) {
//if (zxidd != lastZxidCommitted + 1) {
//if (log.isDebugEnabled()){
//log.debug("delivering Zxid out of order "+zxidd + " should be "
//+ lastZxidCommitted + 1);
//}
//}
//Timeout timeout = new Timeout();
//if(!timeouts.isEmpty()){
//timeout = timeouts.first();
//timeouts.remove(timeout);
//timeout.setSendBackTimeout(System.currentTimeMillis());
//log.info("timeouts!=isEmpty()");
//log.info("Zxid="+ackedzxid+"/Send Time="+timeout.getSendTime()+"SB="+currentTime);
//}
ZabHeader hdrCommit = new ZabHeader(ZabHeader.COMMIT, zxidd);
Message commitMessage = new Message().putHeader(this.id, hdrCommit);
//.setFlag(Message.Flag.DONT_BUNDLE);;
for (Address address : zabMembers) {
if(address.equals(leader)){
delivery.add(zxidd);
continue;
}
//Timeout infoToFollower;
//For Measuring d
// if (!(mapTimeouts.get(address).isEmpty())){
// infoToFollower=mapTimeouts.get(address).first();
// mapTimeouts.get(address).remove(infoToFollower);
// infoToFollower.setSendBackTimeout(System.currentTimeMillis());
// hdrCommit.setTimeout(infoToFollower);
// }
// else{
// log.info("In else $$$$$$$$$$$");
// }// End
//Message commitMessage = new Message().putHeader(this.id, hdrCommit);// End
//commitMessage.setFlag(Message.Flag.DONT_BUNDLE);
Message cpy = commitMessage.copy();
cpy.setDest(address);
//log.info("Send to ----> "+address);
down_prot.down(new Event(Event.MSG, cpy));
}
}
/*
* Deliver the proposal locally and if the current server is the receiver of the request,
* replay to the client.
*/
private void deliver(long dZxid) {
MessageOrderInfo messageOrderInfo = null;
ZabHeader hdrOrginal = queuedProposalMessage.remove(dZxid);
//log.info("hdrOrginal zxid = " + hdrOrginal.getZxid());
if(hdrOrginal==null){
//log.info("delivering zxid=" + dZxid+" Lastdelivered="+lastZxidCommitted);
// waitingToDeliver.add(dZxid);
// try {
// synchronized (delivery) {
// delivery.wait();
// }
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// hdrOrginal = queuedProposalMessage.remove(dZxid);
return;
}
messageOrderInfo = hdrOrginal.getMessageOrderInfo();
queuedCommitMessage.put(dZxid, hdrOrginal);
stats.incnumReqDelivered();
stats.setEndThroughputTime(System.currentTimeMillis());
//log.info("Zxid=:"+dZxid+" Time="+System.currentTimeMillis());
if (requestQueue.contains(messageOrderInfo.getId())) {
long startTime = hdrOrginal.getMessageOrderInfo().getId().getStartTime();
long endTime = System.nanoTime();
stats.addLatency((endTime - startTime));
sendOrderResponse(messageOrderInfo);
requestQueue.remove((messageOrderInfo.getId()));
}
//synchronized (this) {
lastZxidCommitted = dZxid;
//}
//return true;
}
private synchronized void readData(MessageOrderInfo messageInfo){
//log.info(" readData ");
Message readReplay = null;
CSInteractionHeader hdrResponse = null;
ZabHeader hdrOrginal = null;
//synchronized(queuedCommitMessage){
hdrOrginal = queuedCommitMessage.get(messageInfo.getOrdering());
//}
if (hdrOrginal != null){
//log.info(" Find data==== "+hdrOrginal.getMessageOrderInfo().getOrdering());
hdrResponse = new CSInteractionHeader(CSInteractionHeader.RESPONSER,
messageInfo);
readReplay = new Message(messageInfo.getId().getOriginator()).putHeader((short) 79, hdrResponse);
}
else{//Simulate return null if the requested data is not stored in Zab
log.info(" Read null%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%");
hdrResponse = new CSInteractionHeader(CSInteractionHeader.RESPONSER, messageInfo);
readReplay = new Message(messageInfo.getId().getOriginator()).putHeader((short) 79, hdrResponse);
}
long startTime = messageInfo.getId().getStartTime();
long endTime = System.nanoTime();
stats.addReadLatency((endTime - startTime));
messageInfo.getId().setStartTime(0);
readReplay.setBuffer(new byte[1000]);
stats.incnumReqDelivered();
stats.setEndThroughputTime(System.currentTimeMillis());
//readReplay.setFlag(Message.Flag.DONT_BUNDLE);
down_prot.down(new Event(Event.MSG, readReplay));
}
private void sendOrderResponse(MessageOrderInfo messageOrderInfo){
CSInteractionHeader hdrResponse = new CSInteractionHeader(CSInteractionHeader.RESPONSEW, messageOrderInfo);
Message msgResponse = new Message(messageOrderInfo.getId()
.getOriginator()).putHeader((short) 79, hdrResponse);
//msgResponse.setFlag(Message.Flag.DONT_BUNDLE);
down_prot.down(new Event(Event.MSG, msgResponse));
}
/*
* Check a majority
*/
private boolean isQuorum(int majority) {
return majority >= ((clusterSize / 2) + 1) ? true : false;
//return majority >= (clusterSize) ? true : false;
}
private void sendCountRead(){
int writeOnly= queuedCommitMessage.size()-warmUp;
int readOnly = stats.getnumReqDelivered() - writeOnly;
ZabHeader readCount = new ZabHeader(
ZabHeader.COUNTMESSAGE, readOnly);
Message countRead = new Message(leader).putHeader(this.id,
readCount);
countRead.setFlag(Flag.DONT_BUNDLE);
for (Address address : zabMembers) {
if (address.equals(local_addr))
continue;
Message cpy = countRead.copy();
cpy.setDest(address);
down_prot.down(new Event(Event.MSG, cpy));
}
}
private synchronized void addCountReadToTotal(ZabHeader countReadHeader) {
long readCount = countReadHeader.getZxid();
stats.addToNumReqDelivered((int) readCount);
numReadCoundRecieved++;
if(numReadCoundRecieved==(zabMembers.size()-1)){
//For Measuring d
//stats.printProtocolStats(queuedCommitMessage.size(), clusterSize, (int) ((Double.parseDouble(info.split(":")[0])*100)), delays);
stats.printProtocolStats(queuedCommitMessage.size(), clusterSize, (int) ((Double.parseDouble(info.split(":")[0])*100)), commitArrivalRate);
}
}
/*
* ----------------------------- End of Private Methods --------------------------------
*/
final class FollowerMessageHandler implements Runnable {
private short id;
public FollowerMessageHandler(short id) {
this.id = id;
}
/**
* create a proposal and send it out to all the members
*
* @param message
*/
@Override
public void run() {
handleRequests();
}
public void handleRequests() {
ZabHeader hdrReq = null;
while (running) {
try {
hdrReq = queuedMessages.take();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long new_zxid = getNewZxid();
MessageOrderInfo messageOrderInfo = hdrReq.getMessageOrderInfo();
messageOrderInfo.setOrdering(new_zxid);
ZabHeader hdrProposal = new ZabHeader(ZabHeader.PROPOSAL,
messageOrderInfo);
Message proposalMessage = new Message().putHeader(this.id,
hdrProposal);//
//proposalMessage.setFlag(Message.Flag.DONT_BUNDLE);
proposalMessage.setBuffer(new byte[1000]);
proposalMessage.setSrc(local_addr);
Proposal p = new Proposal();
p.setMessageOrderInfo(hdrReq.getMessageOrderInfo());
p.AckCount++;
outstandingProposals.put(new_zxid, p);
queuedProposalMessage.put(new_zxid, hdrProposal);
try {
for (Address address : zabMembers) {
if (address.equals(leader))
continue;
Message cpy = proposalMessage.copy();
cpy.setDest(address);
down_prot.down(new Event(Event.MSG, cpy));
}
} catch (Exception ex) {
log.error("failed proposing message to members");
}
}
}
}
final class MessageHandler implements Runnable {
@Override
public void run() {
log.info("call deliverMessages()");
deliverMessages();
}
private void deliverMessages() {
Long zxidDeliver = null;
ArrayList<Long> tempZxid = new ArrayList<Long>();
boolean isDelivered= false;
//log.info("Before While");
while(true){
//log.info("Insider While");
// //try {
try {
zxidDeliver= delivery.take();
//} catch (InterruptedException e) {
// TODO Auto-generated catch block
// e.printStackTrace();
//}
// //log.info("delivery.peek()="+zxidDeliver);
// //if (queuedProposalMessage.containsKey(zxidDeliver)) {
// //delivery.poll();
//isDelivered=deliver(zxidDeliver);
synchronized(delivery){
if(!queuedProposalMessage.containsKey(zxidDeliver)){
//if (!isDelivered){
tempZxid.addAll(delivery);
delivery.clear();
delivery.add(zxidDeliver);
delivery.addAll(tempZxid);
tempZxid.clear();
//try {
log.info("Before waiting for Zxid= "+zxidDeliver);
delivery.wait();
log.info("Afther waiting for Zxid= "+zxidDeliver);
}
else
deliver(zxidDeliver);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
final class ProcessACKDelay implements Runnable {
private long sendTime=0;
private long arriveTime=0;
private long sendBackTime=0;
private long idleTime=0;
private long delay=0;
private long arriveTimeForRoundTrip=0;
@Override
public void run() {
startProcess();
}
private void startProcess() {
Timeout delayInfo= null;
while (true) {
try {
delayInfo = processTimeout.take();
String id = delayInfo.getZxid();
sendBackTime = delayInfo.getSendBackTimeout();
arriveTime = delayInfo.getArriveTime();
sendTime = delayInfo.getSendTime();
arriveTimeForRoundTrip = delayInfo.getArriveTimeForRoundTrip();
idleTime =sendBackTime-arriveTime;
delay=(arriveTimeForRoundTrip-sendTime)-idleTime;
//log.info("ST="+sendTime+"/AT="+arriveTime+"/SB="+sendBackTime+"/LAT="
//+arriveTimeForRoundTrip+"/id="+idleTime+"/d="+delay+" /d/2="+((double) delay/2)+" /Addres-Zxid="+id);
//For Measuring d
//delays.add(((double)delay/2));//divide by 2 to get one round;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
class Throughput extends TimerTask {
public Throughput() {
}
private long startTime = 0;
private long currentTime = 0;
private double currentThroughput = 0;
private int finishedThroughput = 0;
@Override
public void run() {
startTime = stats.getLastThroughputTime();
currentTime = System.currentTimeMillis();
finishedThroughput=stats.getnumReqDelivered();
currentThroughput = (((double)finishedThroughput - stats
.getLastNumReqDeliveredBefore()) / ((double)(currentTime - startTime)/1000.0));
stats.setLastNumReqDeliveredBefore(finishedThroughput);
stats.setLastThroughputTime(currentTime);
stats.addThroughput(currentThroughput);
}
public String convertLongToTimeFormat(long time) {
Date date = new Date(time);
SimpleDateFormat longToTime = new SimpleDateFormat("HH:mm:ss.SSSZ");
return longToTime.format(date);
}
}
// class CheckFinished extends TimerTask {
// private int workload=1000000;
// public CheckFinished() {
// }
//
// public void run() {
// if(lastZxidCommitted>=workload){
// stats.printProtocolStats(queuedCommitMessage.size(), clusterSize);
// log.info("Printing stats");
// checkFinished.cancel();
// timer.cancel();
// }
//
// }
//
// }
class MeasueCommitCommitRate extends TimerTask {
private int lastNumCommit=0;
long sum=0;
long avgd=0;
public MeasueCommitCommitRate() {
}
@Override
public void run() {
lastNumCommit = (stats.numCommit.get()-stats.lastNumCommit.get());
if (lastNumCommit!=0){
//log.info("Number of Commit arrival="+lastNumCommit);
commitArrivalRate.add(lastNumCommit);
//log.info("Commit arrival rate="+lastNumCommit);
stats.lastNumCommit.set(stats.numCommit.get());
}
}
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tapestry5.jpa.test;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertTrue;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import javax.persistence.RollbackException;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import org.apache.tapestry5.http.services.ApplicationGlobals;
import org.apache.tapestry5.internal.jpa.JpaInternalUtils;
import org.apache.tapestry5.internal.jpa.PersistedEntity;
import org.apache.tapestry5.internal.test.PageTesterContext;
import org.apache.tapestry5.ioc.Registry;
import org.apache.tapestry5.ioc.RegistryBuilder;
import org.apache.tapestry5.jpa.EntityManagerManager;
import org.apache.tapestry5.jpa.modules.JpaModule;
import org.apache.tapestry5.jpa.test.entities.ThingOne;
import org.apache.tapestry5.jpa.test.entities.ThingTwo;
import org.apache.tapestry5.jpa.test.entities.VersionedThing;
import org.apache.tapestry5.modules.TapestryModule;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class JpaTest
{
private static Registry registry;
private EntityManagerManager entityManagerManager;
private TopLevelService topLevelService;
// @BeforeSuite
public final void setupRegistry()
{
RegistryBuilder builder = new RegistryBuilder();
builder.add(TapestryModule.class);
builder.add(JpaModule.class);
builder.add(JpaTestModule.class);
registry = builder.build();
// set PageTesterContext, otherwise T5 tries to load classpath assets
ApplicationGlobals globals = registry.getObject(ApplicationGlobals.class, null);
globals.storeContext(new PageTesterContext(""));
registry.performRegistryStartup();
entityManagerManager = registry.getService(EntityManagerManager.class);
topLevelService = registry.getService(TopLevelService.class);
}
private EntityManager getEntityManager()
{
return entityManagerManager.getEntityManagers().values().iterator().next();
}
// @AfterSuite
public final void shutdownRegistry()
{
registry.cleanupThread();
registry.shutdown();
registry = null;
}
@BeforeMethod
public final void beginTransaction()
{
setupRegistry();
EntityTransaction tx = getEntityManager().getTransaction();
if (!tx.isActive())
tx.begin();
}
@AfterMethod
public void rollbackLastTransactionAndClean() throws SQLException
{
EntityTransaction transaction = getEntityManager().getTransaction();
if (transaction.isActive())
transaction.rollback();
clearDatabase();
getEntityManager().clear();
shutdownRegistry();
}
// based on http://www.objectpartners.com/2010/11/09/unit-testing-your-persistence-tier-code/
private void clearDatabase() throws SQLException
{
EntityManager em = getEntityManager();
em.clear();
EntityTransaction transaction = em.getTransaction();
if (!transaction.isActive())
transaction.begin();
Connection c = em.unwrap(Connection.class);
Statement s = c.createStatement();
s.execute("SET REFERENTIAL_INTEGRITY FALSE");
Set<String> tables = new HashSet<String>();
ResultSet rs = s.executeQuery("select table_name " + "from INFORMATION_SCHEMA.tables "
+ "where table_type='TABLE' and table_schema='PUBLIC'");
while (rs.next())
{
// if we don't skip over the sequence table, we'll start getting
// "The sequence table information is not complete"
// exceptions
if (!rs.getString(1).startsWith("DUAL_") && !rs.getString(1).equals("SEQUENCE"))
{
tables.add(rs.getString(1));
}
}
rs.close();
for (String table : tables)
{
s.executeUpdate("DELETE FROM " + table);
}
transaction.commit();
s.execute("SET REFERENTIAL_INTEGRITY TRUE");
s.close();
}
private <T> List<T> getInstances(final Class<T> type)
{
EntityManager em = getEntityManager();
CriteriaBuilder qb = em.getCriteriaBuilder();
CriteriaQuery<T> query = qb.createQuery(type);
query.select(query.from(type));
return em.createQuery(query).getResultList();
}
@Test
public void commitBothInNestedTransaction()
{
topLevelService.createThingOneAndTwo("one", "two");
assertEquals(1, getInstances(ThingOne.class).size());
assertEquals(1, getInstances(ThingTwo.class).size());
assertTrue(getEntityManager().find(VersionedThing.class, 1).getVersion() > 0);
}
@Test(expectedExceptions = RollbackException.class)
public void rollbackNestedFails()
{
topLevelService.createThingOneAndTwo("one", null);
}
@Test(expectedExceptions = RollbackException.class)
public void rollbackTopFails()
{
topLevelService.createThingOneAndTwo(null, "two");
}
@Test
public void sequentialCommitUsingInvokeAfterCommit()
{
topLevelService.createThingOneThenTwo("one", "two");
assertEquals(1, getInstances(ThingOne.class).size());
assertEquals(1, getInstances(ThingTwo.class).size());
assertTrue(getEntityManager().find(VersionedThing.class, 1).getVersion() > 1);
}
@Test
public void sequentialCommitUsingInvokeAfterCommitAndCommitAfterAnnotation()
{
topLevelService.createThingOneThenTwoWithNestedCommitAfter("one", "two");
assertEquals(1, getInstances(ThingOne.class).size());
assertEquals(1, getInstances(ThingTwo.class).size());
assertTrue(getEntityManager().find(VersionedThing.class, 1).getVersion() > 1);
}
@Test
public void sequentialRollbackAndAbortUsingInvokeAfterCommit()
{
try
{
topLevelService.createThingOneThenTwo(null, "two");
Assert.fail();
}
catch (RollbackException e)
{
}
assertEquals(0, getInstances(ThingOne.class).size());
assertEquals(0, getInstances(ThingTwo.class).size());
}
@Test
public void trySomething()
{
ThingOne thingOne = new ThingOne();
thingOne.setId(1);
PersistedEntity entity = JpaInternalUtils.convertApplicationValueToPersisted(
entityManagerManager, thingOne);
}
}
| |
/*
* Licensed to Mandrel under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Mandrel licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.mandrel.transport.thrift.nifty;
import static com.facebook.nifty.core.ShutdownUtil.shutdownChannelFactory;
import static com.facebook.nifty.core.ShutdownUtil.shutdownExecutor;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.concurrent.Executors.newCachedThreadPool;
import java.io.Closeable;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.transport.TTransport;
import org.jboss.netty.channel.ServerChannelFactory;
import org.jboss.netty.channel.group.DefaultChannelGroup;
import org.jboss.netty.channel.local.DefaultLocalServerChannelFactory;
import org.jboss.netty.channel.socket.nio.NioServerBossPool;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioWorkerPool;
import org.jboss.netty.util.ThreadNameDeterminer;
import org.jboss.netty.util.Timer;
import org.weakref.jmx.Managed;
import com.facebook.nifty.codec.DefaultThriftFrameCodecFactory;
import com.facebook.nifty.codec.ThriftFrameCodecFactory;
import com.facebook.nifty.core.NettyServerConfig;
import com.facebook.nifty.core.NettyServerConfigBuilder;
import com.facebook.nifty.core.NiftyNoOpSecurityFactory;
import com.facebook.nifty.core.NiftySecurityFactory;
import com.facebook.nifty.core.NiftyTimer;
import com.facebook.nifty.core.ThriftServerDef;
import com.facebook.nifty.duplex.TDuplexProtocolFactory;
import com.facebook.nifty.processor.NiftyProcessor;
import com.facebook.nifty.processor.NiftyProcessorFactory;
import com.facebook.swift.service.ThriftServerConfig;
import com.facebook.swift.service.ThriftServerTimer;
import com.facebook.swift.service.ThriftServerWorkerExecutor;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.Inject;
@Slf4j
public class ThriftServer implements Closeable {
public static final ImmutableMap<String, TDuplexProtocolFactory> DEFAULT_PROTOCOL_FACTORIES = ImmutableMap.of("binary",
TDuplexProtocolFactory.fromSingleFactory(new TBinaryProtocol.Factory()), "compact",
TDuplexProtocolFactory.fromSingleFactory(new TCompactProtocol.Factory()));
public static final ImmutableMap<String, ThriftFrameCodecFactory> DEFAULT_FRAME_CODEC_FACTORIES = ImmutableMap.of("buffered",
(ThriftFrameCodecFactory) new DefaultThriftFrameCodecFactory(), "framed", (ThriftFrameCodecFactory) new DefaultThriftFrameCodecFactory());
public static final ImmutableMap<String, ExecutorService> DEFAULT_WORKER_EXECUTORS = ImmutableMap.of();
public static final NiftySecurityFactoryHolder DEFAULT_SECURITY_FACTORY = new NiftySecurityFactoryHolder();
private enum State {
NOT_STARTED, RUNNING, CLOSED,
}
private final NettyServerTransport transport;
private final int configuredPort;
private final DefaultChannelGroup allChannels = new DefaultChannelGroup();
private final Executor workerExecutor;
private final ExecutorService acceptorExecutor;
private final ExecutorService ioExecutor;
private final int acceptorThreads;
private final int ioThreads;
private final ServerChannelFactory serverChannelFactory;
private State state = State.NOT_STARTED;
public ThriftServer(NiftyProcessor processor) {
this(processor, new ThriftServerConfig());
}
public ThriftServer(NiftyProcessor processor, ThriftServerConfig config) {
this(processor, config, new NiftyTimer("thrift"));
}
public ThriftServer(NiftyProcessor processor, ThriftServerConfig config, Timer timer) {
this(processor, config, timer, DEFAULT_FRAME_CODEC_FACTORIES, DEFAULT_PROTOCOL_FACTORIES, DEFAULT_WORKER_EXECUTORS, DEFAULT_SECURITY_FACTORY, false);
}
public ThriftServer(final NiftyProcessor processor, ThriftServerConfig config, @ThriftServerTimer Timer timer,
Map<String, ThriftFrameCodecFactory> availableFrameCodecFactories, Map<String, TDuplexProtocolFactory> availableProtocolFactories,
Map<String, ExecutorService> availableWorkerExecutors, NiftySecurityFactory securityFactory, boolean local) {
this(processor, config, timer, availableFrameCodecFactories, availableProtocolFactories, availableWorkerExecutors, new NiftySecurityFactoryHolder(
securityFactory), local);
}
@Inject
public ThriftServer(final NiftyProcessor processor, ThriftServerConfig config, @ThriftServerTimer Timer timer,
Map<String, ThriftFrameCodecFactory> availableFrameCodecFactories, Map<String, TDuplexProtocolFactory> availableProtocolFactories,
@ThriftServerWorkerExecutor Map<String, ExecutorService> availableWorkerExecutors, NiftySecurityFactoryHolder securityFactoryHolder, boolean local) {
checkNotNull(availableFrameCodecFactories, "availableFrameCodecFactories cannot be null");
checkNotNull(availableProtocolFactories, "availableProtocolFactories cannot be null");
NiftyProcessorFactory processorFactory = new NiftyProcessorFactory() {
@Override
public NiftyProcessor getProcessor(TTransport transport) {
return processor;
}
};
String transportName = config.getTransportName();
String protocolName = config.getProtocolName();
checkState(availableFrameCodecFactories.containsKey(transportName), "No available server transport named " + transportName);
checkState(availableProtocolFactories.containsKey(protocolName), "No available server protocol named " + protocolName);
workerExecutor = config.getOrBuildWorkerExecutor(availableWorkerExecutors);
if (local) {
log.warn("Using local server");
configuredPort = 0;
ioThreads = 0;
ioExecutor = null;
acceptorThreads = 0;
acceptorExecutor = null;
serverChannelFactory = new DefaultLocalServerChannelFactory();
} else {
configuredPort = config.getPort();
acceptorExecutor = newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("thrift-acceptor-%s").build());
acceptorThreads = config.getAcceptorThreadCount();
ioExecutor = newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("thrift-io-%s").build());
ioThreads = config.getIoThreadCount();
serverChannelFactory = new NioServerSocketChannelFactory(new NioServerBossPool(acceptorExecutor, acceptorThreads, ThreadNameDeterminer.CURRENT),
new NioWorkerPool(ioExecutor, ioThreads, ThreadNameDeterminer.CURRENT));
}
ThriftServerDef thriftServerDef = ThriftServerDef.newBuilder().name("thrift").listen(configuredPort)
.limitFrameSizeTo((int) config.getMaxFrameSize().toBytes()).clientIdleTimeout(config.getIdleConnectionTimeout())
.withProcessorFactory(processorFactory).limitConnectionsTo(config.getConnectionLimit())
.limitQueuedResponsesPerConnection(config.getMaxQueuedResponsesPerConnection())
.thriftFrameCodecFactory(availableFrameCodecFactories.get(transportName)).protocol(availableProtocolFactories.get(protocolName))
.withSecurityFactory(securityFactoryHolder.niftySecurityFactory).using(workerExecutor).taskTimeout(config.getTaskExpirationTimeout()).build();
NettyServerConfigBuilder nettyServerConfigBuilder = NettyServerConfig.newBuilder();
nettyServerConfigBuilder.getServerSocketChannelConfig().setBacklog(config.getAcceptBacklog());
nettyServerConfigBuilder.setBossThreadCount(config.getAcceptorThreadCount());
nettyServerConfigBuilder.setWorkerThreadCount(config.getIoThreadCount());
nettyServerConfigBuilder.setTimer(timer);
NettyServerConfig nettyServerConfig = nettyServerConfigBuilder.build();
transport = new NettyServerTransport(thriftServerDef, nettyServerConfig, allChannels, local);
}
@Managed
public Integer getPort() {
if (configuredPort != 0) {
return configuredPort;
} else {
return getBoundPort();
}
}
@Managed
public int getWorkerThreads() {
if (workerExecutor instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) workerExecutor).getPoolSize();
}
// Not a ThreadPoolExecutor. It may still be an ExecutorService that
// uses threads to do
// it's work, but we have no way to ask a generic Executor for the
// number of threads it is
// running.
return 0;
}
public Executor getWorkerExecutor() {
return workerExecutor;
}
private int getBoundPort() {
// If the server was configured to bind to port 0, a random port will
// actually be bound instead
SocketAddress socketAddress = transport.getServerChannel().getLocalAddress();
if (socketAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
return inetSocketAddress.getPort();
}
// Cannot determine the randomly assigned port until the server is
// started
return 0;
}
@Managed
public int getAcceptorThreads() {
return acceptorThreads;
}
@Managed
public int getIoThreads() {
return ioThreads;
}
public synchronized boolean isRunning() {
return state == State.RUNNING;
}
@PostConstruct
public synchronized ThriftServer start() {
checkState(state != State.CLOSED, "Thrift server is closed");
if (state == State.NOT_STARTED) {
transport.start(serverChannelFactory);
state = State.RUNNING;
}
return this;
}
@PreDestroy
@Override
public synchronized void close() {
if (state == State.CLOSED) {
return;
}
if (state == State.RUNNING) {
try {
transport.stop();
} catch (Exception e) {
Thread.currentThread().interrupt();
}
}
// Executors are created in the constructor, so we should shut them down
// here even if the
// server was never actually started
try {
if (workerExecutor instanceof ExecutorService) {
shutdownExecutor((ExecutorService) workerExecutor, "workerExecutor");
}
shutdownChannelFactory(serverChannelFactory, acceptorExecutor, ioExecutor, allChannels);
} catch (Exception e) {
Thread.currentThread().interrupt();
}
state = State.CLOSED;
}
/**
* Do not use this class. It is only used to workaround Guice not having
*
* @Inject(optional=true) for constructor arguments. The class is public
* because it's used in ThriftServerModule, which is
* in a different package.
*/
public static class NiftySecurityFactoryHolder {
@Inject(optional = true)
public NiftySecurityFactory niftySecurityFactory = new NiftyNoOpSecurityFactory();
@Inject
public NiftySecurityFactoryHolder() {
}
public NiftySecurityFactoryHolder(NiftySecurityFactory niftySecurityFactory) {
this.niftySecurityFactory = niftySecurityFactory;
}
}
}
| |
/*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.schema.mapping;
import com.google.common.base.MoreObjects;
import com.spatial4j.core.shape.Point;
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.column.Columns;
import com.stratio.cassandra.lucene.common.GeospatialUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.spatial.composite.CompositeSpatialStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.apache.lucene.spatial.serialized.SerializedDVStrategy;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.stratio.cassandra.lucene.common.GeospatialUtils.CONTEXT;
/**
* A {@link Mapper} to map geographical points.
*
* @author Andres de la Pena {@literal <adelapena@stratio.com>}
*/
public class GeoPointMapper extends MultipleColumnMapper {
/** The default max number of levels for geohash search trees. */
public static final int DEFAULT_MAX_LEVELS = 11;
/** The name of the latitude column. */
public final String latitude;
/** The name of the longitude column. */
public final String longitude;
/** The max number of levels in the tree. */
public final int maxLevels;
/** The spatial strategy. */
public final CompositeSpatialStrategy strategy;
/**
* Builds a new {@link GeoPointMapper}.
*
* @param field the name of the field
* @param validated if the field must be validated
* @param latitude the name of the column containing the latitude
* @param longitude the name of the column containing the longitude
* @param maxLevels the maximum number of levels in the geohash search tree. False positives will be discarded using
* stored doc values, so a low value doesn't mean precision lost. High values will produce few false positives to be
* post-filtered, at the expense of creating more terms in the search index.
*/
public GeoPointMapper(String field, Boolean validated, String latitude, String longitude, Integer maxLevels) {
super(field, validated, Arrays.asList(latitude, longitude), NUMERIC_TYPES, Collections.singletonList(Byte.class));
if (StringUtils.isBlank(latitude)) {
throw new IndexException("latitude column name is required");
}
if (StringUtils.isBlank(longitude)) {
throw new IndexException("longitude column name is required");
}
this.latitude = latitude;
this.longitude = longitude;
this.maxLevels = GeospatialUtils.validateGeohashMaxLevels(maxLevels, DEFAULT_MAX_LEVELS);
SpatialPrefixTree grid = new GeohashPrefixTree(CONTEXT, this.maxLevels);
RecursivePrefixTreeStrategy indexStrategy = new RecursivePrefixTreeStrategy(grid, field);
SerializedDVStrategy geometryStrategy = new SerializedDVStrategy(CONTEXT, field);
strategy = new CompositeSpatialStrategy(field, indexStrategy, geometryStrategy);
}
/** {@inheritDoc} */
@Override
public List<IndexableField> indexableFields(Columns columns) {
Double lon = readLongitude(columns);
Double lat = readLatitude(columns);
if (lon == null && lat == null) {
return Collections.emptyList();
} else if (lat == null) {
throw new IndexException("Latitude column required if there is a longitude");
} else if (lon == null) {
throw new IndexException("Longitude column required if there is a latitude");
}
Point point = CONTEXT.makePoint(lon, lat);
return Arrays.asList(strategy.createIndexableFields(point));
}
/** {@inheritDoc} */
@Override
public SortField sortField(String name, boolean reverse) {
throw new IndexException("GeoPoint mapper '{}' does not support simple sorting", name);
}
/**
* Returns the latitude contained in the specified {@link Columns}. A valid latitude must in the range [-90, 90].
*
* @param columns the columns containing the latitude
* @return the validated latitude
*/
Double readLatitude(Columns columns) {
Object value = columns.valueForField(latitude);
return value == null ? null : readLatitude(value);
}
/**
* Returns the longitude contained in the specified {@link Columns}. A valid longitude must in the range [-180,
* 180].
*
* @param columns the columns containing the longitude
* @return the validated longitude
*/
Double readLongitude(Columns columns) {
Object value = columns.valueForField(longitude);
return value == null ? null : readLongitude(value);
}
/**
* Returns the latitude contained in the specified {@link Object}.
*
* A valid latitude must in the range [-90, 90].
*
* @param o the {@link Object} containing the latitude
* @return the latitude
*/
private static Double readLatitude(Object o) {
if (o == null) {
return null;
}
Double value;
if (o instanceof Number) {
value = ((Number) o).doubleValue();
} else {
try {
value = Double.valueOf(o.toString());
} catch (NumberFormatException e) {
throw new IndexException("Unparseable latitude: {}", o);
}
}
return GeospatialUtils.checkLatitude("latitude", value);
}
/**
* Returns the longitude contained in the specified {@link Object}.
*
* A valid longitude must in the range [-180, 180].
*
* @param o the {@link Object} containing the latitude
* @return the longitude
*/
private static Double readLongitude(Object o) {
if (o == null) {
return null;
}
Double value;
if (o instanceof Number) {
value = ((Number) o).doubleValue();
} else {
try {
value = Double.valueOf(o.toString());
} catch (NumberFormatException e) {
throw new IndexException("Unparseable longitude: {}", o);
}
}
return GeospatialUtils.checkLongitude("longitude", value);
}
/** {@inheritDoc} */
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("field", field)
.add("validated", validated)
.add("latitude", latitude)
.add("longitude", longitude)
.add("maxLevels", maxLevels)
.toString();
}
}
| |
/*******************************************************************************
* Copyright 2017 Capital One Services, LLC and Bitwise, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*******************************************************************************/
package hydrograph.engine.core.component.entity.utils;
import hydrograph.engine.core.component.entity.elements.*;
import hydrograph.engine.core.component.utils.OperationOutputField;
import hydrograph.engine.jaxb.commontypes.*;
import hydrograph.engine.jaxb.commontypes.TypeProperties.Property;
import hydrograph.engine.jaxb.join.TypeKeyFields;
import javax.xml.bind.JAXBElement;
import java.io.Serializable;
import java.util.*;
/**
* The Class OperationEntityUtils.
*
* @author Bitwise
*
*/
public class OperationEntityUtils implements Serializable {
/**
*
*/
private static final long serialVersionUID = 7977314904706287826L;
private OperationEntityUtils() {
}
/**
* Extracts the operation list from the {@link Operation} object of type
* {@link TypeTransformOperation}, passed as a parameter
*
* @param jaxbOperationList the list of {@link TypeTransformOperation} objects which
* contain information of operations for the component
* @return a list of {@link Operation} objects. Each object in the list
* contains information of operation class, input fields, output
* fields and properties for one operation
*/
public static List<Operation> extractOperations(List<JAXBElement<?>> jaxbOperationList) {
List<Operation> operationList = new ArrayList<Operation>();
for (JAXBElement typeTransformOperation : jaxbOperationList) {
Operation operation = new Operation();
if (typeTransformOperation.getValue() instanceof TypeTransformOperation) {
operation.setOperationId(((TypeTransformOperation) typeTransformOperation.getValue()).getId());
operation.setOperationInputFields(extractOperationInputFields(
((TypeTransformOperation) typeTransformOperation.getValue()).getInputFields()));
operation.setOperationOutputFields(extractOperationOutputFields(
((TypeTransformOperation) typeTransformOperation.getValue()).getOutputFields()));
operation.setOperationFields(extractOperationFields(
((TypeTransformOperation) typeTransformOperation.getValue()).getOutputFields()));
operation.setOperationClass(((TypeTransformOperation) typeTransformOperation.getValue()).getClazz());
operation.setOperationProperties(
extractOperationProperties(((TypeTransformOperation) typeTransformOperation.getValue()).getProperties()));
operation.setExpressionPresent(false);
operationList.add(operation);
} else {
operation.setOperationId(((TypeTransformExpression) typeTransformOperation.getValue()).getId());
operation.setOperationInputFields(extractOperationInputFields(
((TypeTransformExpression) typeTransformOperation.getValue()).getInputFields()));
operation.setOperationOutputFields(extractExpressionOutputFields(
((TypeTransformExpression) typeTransformOperation.getValue()).getOutputFields()));
operation.setOperationFields(extractOperationFields(
((TypeTransformExpression) typeTransformOperation.getValue()).getOutputFields()));
operation.setExpression(addSemiColonIfNotPresent(((TypeTransformExpression) typeTransformOperation.getValue()).getExpr()));
if (((TypeTransformExpression) typeTransformOperation.getValue()).getMergeExpr() != null)
operation.setMergeExpression(addSemiColonIfNotPresent(((TypeTransformExpression) typeTransformOperation.getValue()).getMergeExpr()));
operation.setAccumulatorInitialValue(addQuotes(((TypeTransformExpression) typeTransformOperation.getValue()).getAccumulatorInitalValue()));
operation.setOperationClass(null);
operation.setExpressionPresent(true);
operation.setOperationProperties(
extractOperationProperties(((TypeTransformExpression) typeTransformOperation.getValue()).getProperties()));
operationList.add(operation);
}
}
return operationList;
}
private static String addQuotes(String accumulatorInitalValue) {
if (accumulatorInitalValue != null
&& accumulatorInitalValue.matches("^[_a-zA-Z].+")
&& !accumulatorInitalValue.trim().startsWith("\"")
&& !accumulatorInitalValue.endsWith("\"")) {
accumulatorInitalValue = "\"" + accumulatorInitalValue + "\"";
}
return accumulatorInitalValue;
}
private static String addSemiColonIfNotPresent(String expr) {
if (expr.trim().endsWith(";")) {
return expr.substring(0, expr.length() - 1);
}
return expr;
// return expr;
}
/**
* Extracts the operation list from the {@link Operation} object of type
* {@link TypeTransformOperation}, passed as a parameter
*
* @param jaxbOperationList
* the list of {@link TypeTransformOperation} objects which
* contain information of operations for the component
* @return a list of {@link Operation} objects. Each object in the list
* contains information of operation class, input fields, output
* fields and properties for one operation
*/
// public static List<Expression> extractExpression(List<Object> jaxbOperationList) {
//
// List<Expression> operationList = new ArrayList<Expression>();
//
// for (Object typeTransformOperation : jaxbOperationList) {
// if (typeTransformOperation instanceof TypeTransformExpression) {
// Expression operation = new Expression();
// operation.setOperationId(((TypeTransformExpression) typeTransformOperation).getId());
// operation.setOperationInputFields(extractOperationInputFields(
// ((TypeTransformExpression) typeTransformOperation).getInputFields()));
// operation.setOperationOutputFields(extractExpressionOutputFields(
// ((TypeTransformExpression) typeTransformOperation).getOutputFields()));
// operation.setExpression(((TypeTransformExpression) typeTransformOperation).getExpr());
// operation.setOperationProperties(
// extractOperationProperties(((TypeTransformExpression) typeTransformOperation).getProperties()));
// operationList.add(operation);
// }
// }
// return operationList;
// }
/**
* Extracts the {@link Properties} object from the {@link TypeProperties}
* object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeProperties} parameter
* is null
*
* @param typeProperties the {@link TypeProperties} object which contain information of
* operation properties for the component
* @return a {@link Properties} object
*/
private static Properties extractOperationProperties(TypeProperties typeProperties) {
Properties properties = new Properties();
if (typeProperties == null) {
return null;
} else if (typeProperties.getProperty() == null) {
return null;
}
// Fetch all the properties passed to operation
for (Property eachProperty : typeProperties.getProperty()) {
properties.setProperty(eachProperty.getName(), eachProperty.getValue());
}
return properties;
}
/**
* Extracts the operation input fields from the
* {@link TypeOperationInputFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeOperationInputFields}
* parameter is null
*
* @param typeOperationInputFields the object of {@link TypeOperationInputFields} which contain
* information of operation input fields for the component
* @return a string array containing the input fields for an operation
*/
private static String[] extractOperationInputFields(TypeOperationInputFields typeOperationInputFields) {
if (typeOperationInputFields == null) {
return null;
} else if (typeOperationInputFields.getField() == null) {
return null;
}
List<TypeInputField> typeInputFieldList = typeOperationInputFields.getField();
String[] inputFields = new String[typeInputFieldList.size()];
int i = 0;
for (TypeInputField typeInputField : typeInputFieldList) {
inputFields[i++] = typeInputField.getName();
}
return inputFields;
}
/**
* Extracts the operation output fields from the
* {@link TypeOperationOutputFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeOperationOutputFields}
* parameter is null
*
* @param typeOperationOutputFields the object of {@link TypeOperationOutputFields} which contain
* information of operation output fields for the component
* @return a string array containing the output fields for an operation
*/
private static String[] extractOperationOutputFields(TypeOperationOutputFields typeOperationOutputFields) {
if (typeOperationOutputFields == null) {
return null;
} else if (typeOperationOutputFields.getField() == null) {
return null;
}
List<TypeBaseField> typeOutputFieldList = typeOperationOutputFields.getField();
String[] outputFields = new String[typeOutputFieldList.size()];
int i = 0;
for (TypeBaseField typeOutputField : typeOutputFieldList) {
outputFields[i++] = typeOutputField.getName();
}
return outputFields;
}
/**
* Extracts the operation output fields from the
* {@link TypeOperationOutputFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeOperationOutputFields}
* parameter is null
*
* @param typeOperationOutputFields the object of {@link TypeOperationOutputFields} which contain
* information of operation output fields for the component
* @return a array OperationOutputField containing the output fields for an operation
*/
private static OperationOutputField[] extractOperationFields(TypeOperationOutputFields typeOperationOutputFields) {
if (typeOperationOutputFields == null) {
return null;
} else if (typeOperationOutputFields.getField() == null) {
return null;
}
List<TypeBaseField> typeOutputFieldList = typeOperationOutputFields.getField();
OperationOutputField[] outputFields = new OperationOutputField[typeOutputFieldList.size()];
int i = 0;
for (TypeBaseField outputField : typeOutputFieldList) {
outputFields[i++] = new OperationOutputField(outputField.getName(), outputField.getType().value(), outputField.getFormat() != null ? outputField.getFormat() : "yyyy-MM-dd", outputField.getScale() != null ? outputField.getScale() : 38, outputField.getPrecision() != null ? outputField.getPrecision() : 38);
}
return outputFields;
}
/**
* Extracts the operation output fields from the
* {@link TypeOperationOutputFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeOperationOutputFields}
* parameter is null
*
* @param typeOperationOutputFields the object of {@link TypeOperationOutputFields} which contain
* information of operation output fields for the component
* @return a array OperationOutputField containing the output fields for an operation
*/
private static OperationOutputField[] extractOperationFields(TypeExpressionOutputFields typeOperationOutputFields) {
if (typeOperationOutputFields == null) {
return null;
} else if (typeOperationOutputFields.getField() == null) {
return null;
}
List<TypeBaseField> typeOutputFieldList = new ArrayList<TypeBaseField>();
typeOutputFieldList.add(typeOperationOutputFields.getField());
OperationOutputField[] outputFields = new OperationOutputField[typeOutputFieldList.size()];
int i = 0;
for (TypeBaseField outputField : typeOutputFieldList) {
outputFields[i++] = new OperationOutputField(outputField.getName(), outputField.getType().value(), outputField.getFormat() != null ? outputField.getFormat() : "yyyy-MM-dd", outputField.getScale() != null ? outputField.getScale() : 38, outputField.getPrecision() != null ? outputField.getPrecision() : 38);
}
return outputFields;
}
/**
* Extracts the operation output fields from the
* {@link TypeExpressionOutputFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code TypeExpressionOutputFields}
* parameter is null
*
* @param typeOperationOutputFields the object of {@link TypeExpressionOutputFields} which contain
* information of operation output fields for the component
* @return a string array containing the output fields for an operation
*/
private static String[] extractExpressionOutputFields(TypeExpressionOutputFields typeOperationOutputFields) {
if (typeOperationOutputFields == null) {
return null;
} else if (typeOperationOutputFields.getField() == null) {
return null;
}
return new String[]{typeOperationOutputFields.getField().getName()};
}
/**
* Extracts the operation fields of type {@link TypeOperationField} from the
* list of output socket of type {@link TypeOperationsOutSocket}, passed as
* a parameter
* <p>
* The method returns {@code null} if the {@code operationList} parameter is
* null
* <p>
* <b>Note</b>: This method returns the operation fields from the out
* socket. If looking for the operation output fields provided in the
* operation, look for {@link #extractOperations(List)} method
*
* @param outSocket the {@link TypeOperationsOutSocket} object which contains the
* operation field information
* @return a list of string array containing names of all the operation
* fields for the out sockets. Each array in the list corresponds to
* the operation fields for one out socket
* @throws NullPointerException when {@code outSocket} is null
*/
public static List<String[]> extractOperationFieldsListFromSocket(List<TypeOperationsOutSocket> outSocket)
throws NullPointerException {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<String[]> operationFieldsList = new ArrayList<String[]>();
for (TypeOperationsOutSocket socket : outSocket) {
ArrayList<String> operationFields = new ArrayList<String>();
for (Object field : socket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeOperationField) {
TypeOperationField operationField = (TypeOperationField) field;
operationFields.add(operationField.getName());
}
}
operationFieldsList.add(operationFields.toArray(new String[operationFields.size()]));
}
return operationFieldsList;
}
/**
* Extracts the pass through fields of type {@link TypeInputField} from the
* list of output socket of type {@link TypeOperationsOutSocket}, passed as
* a parameter
*
* @param outSocket the {@link TypeOperationsOutSocket} object which contains the
* pass through field information
* @return a list of string array containing names of all the pass through
* fields for the out sockets. Each array in the list corresponds to
* the pass through fields for one out socket
* @throws NullPointerException when {@code outSocket} is null
*/
public static List<String[]> extractPassThroughFieldsListFromOutSocket(List<TypeOperationsOutSocket> outSocket)
throws NullPointerException {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<String[]> passThroughFieldsList = new ArrayList<String[]>();
for (TypeOperationsOutSocket socket : outSocket) {
ArrayList<String> passThroughFields = new ArrayList<String>();
for (Object field : socket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeInputField) {
TypeInputField passThroughField = (TypeInputField) field;
passThroughFields.add(passThroughField.getName());
}
}
passThroughFieldsList.add(passThroughFields.toArray(new String[passThroughFields.size()]));
}
return passThroughFieldsList;
}
/**
* Extracts the list of map fields of type {@link TypeMapField} from the
* output socket object of type {@link TypeOperationsOutSocket}, passed as a
* parameter
*
* @param outSocket list of {@link TypeOperationsOutSocket} objects which contain
* the map field information
* @return list of map {@link Map}<{@link String}, {@link String}>
* containing all the map field names for the out socket. The map
* fields are stored as Map<SourceFieldName, TargetFieldName>.
* Each map in the list corresponds to the map fields for one out
* socket
* @throws NullPointerException when {@code outSocket} is null
*/
public static List<Map<String, String>> extractMapFieldsListFromOutSocket(List<TypeOperationsOutSocket> outSocket)
throws NullPointerException {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<Map<String, String>> mapFieldsList = new ArrayList<Map<String, String>>();
for (TypeOperationsOutSocket socket : outSocket) {
Map<String, String> mapFields = new HashMap<String, String>();
for (Object field : socket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeMapField) {
TypeMapField mapField = (TypeMapField) field;
mapFields.put(mapField.getSourceName(), mapField.getName());
}
}
mapFieldsList.add(mapFields);
}
return mapFieldsList;
}
/**
* Extracts the socket id's for all the sockets from the output socket
* object of type {@link TypeOperationsOutSocket}, passed as a parameter
*
* @param outSocket list of {@link TypeOperationsOutSocket} objects which contain
* the socket id information
* @return string array containing the socket id's for all the sockets
* @throws NullPointerException when {@code outSocket} is null
*/
public static String[] extractSocketIdFromOutSocket(List<TypeOperationsOutSocket> outSocket)
throws NullPointerException {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<String> socketId = new ArrayList<String>();
for (TypeOperationsOutSocket socket : outSocket) {
socketId.add(socket.getId());
}
return socketId.toArray(new String[socketId.size()]);
}
/**
* Extracts the socket type for all the sockets from the output socket
* object of type {@link TypeOperationsOutSocket}, passed as a parameter
*
* @param outSocket list of {@link TypeOperationsOutSocket} objects which contain
* the socket type information
* @return string array containing the socket type for all the sockets
* @throws NullPointerException when {@code outSocket} is null
*/
public static String[] extractSocketTypeFromOutSocket(List<TypeOperationsOutSocket> outSocket)
throws NullPointerException {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<String> socketType = new ArrayList<String>();
for (TypeOperationsOutSocket socket : outSocket) {
socketType.add(socket.getType());
}
return socketType.toArray(new String[socketType.size()]);
}
/**
* Extracts the {@link Properties} object from the {@link TypeProperties}
* object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeProperties} parameter
* is null
*
* @param typeProperties the {@link TypeProperties} object which contain information of
* runtime properties for the component
* @return a {@link Properties} object
*/
public static Properties extractRuntimeProperties(TypeProperties typeProperties) {
if (typeProperties == null) {
return null;
} else if (typeProperties.getProperty() == null) {
return null;
}
Properties properties = new Properties();
// Fetch all the properties passed to operation
for (Property eachProperty : typeProperties.getProperty()) {
properties.setProperty(eachProperty.getName(), eachProperty.getValue());
}
return properties;
}
/**
* Extracts the {@link List<PassThroughField>} object from the
* {@link TypeOperationsOutSocket} object passed as a parameter
* <p>
*
* @param typeOperationsOutSocket the {@link TypeOperationsOutSocket} object which contain
* information of fields like pass-through fields, map fields,
* etc.
* @return a {@link List<PassThroughField>} object
* @throws NullPointerException when {@code outSocket} is null
*/
private static List<PassThroughField> extractPassThroughFieldsListFromOutSockets(
TypeOperationsOutSocket typeOperationsOutSocket) throws NullPointerException {
if (typeOperationsOutSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<PassThroughField> passThroughFieldsList = new ArrayList<PassThroughField>();
for (Object field : typeOperationsOutSocket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeInputField) {
TypeInputField passThroughField = (TypeInputField) field;
PassThroughField passThroughFieldsObj = new PassThroughField(passThroughField.getName(),
passThroughField.getInSocketId());
passThroughFieldsList.add(passThroughFieldsObj);
}
}
return passThroughFieldsList;
}
/**
* Extracts the {@link List<OperationField>} object from the
* {@link TypeOperationsOutSocket} object passed as a parameter
* <p>
*
* @param typeOperationsOutSocket the {@link TypeOperationsOutSocket} object which contain
* information of fields like pass-through fields, map fields,
* etc.
* @return a {@link List<OperationField>} object
* @throws NullPointerException when {@code outSocket} is null
*/
private static List<OperationField> extractOperationFieldFromOutSockets(
TypeOperationsOutSocket typeOperationsOutSocket) throws NullPointerException {
if (typeOperationsOutSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<OperationField> operationFieldList = new ArrayList<OperationField>();
for (Object field : typeOperationsOutSocket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeOperationField) {
TypeOperationField typeOperationField = (TypeOperationField) field;
operationFieldList
.add(new OperationField(typeOperationField.getName(), typeOperationField.getOperationId()));
} else if (field instanceof TypeExpressionField) {
TypeExpressionField typeOperationField = (TypeExpressionField) field;
operationFieldList
.add(new OperationField(typeOperationField.getName(), typeOperationField.getExpressionId()));
}
}
return operationFieldList;
}
/**
* Extracts the {@link List<MapField>} object from the
* {@link TypeOperationsOutSocket} object passed as a parameter
* <p>
*
* @param typeOperationsOutSocket the {@link TypeOperationsOutSocket} object which contain
* information of fields like pass-through fields, map fields,
* etc.
* @return a {@link List<MapField>} object
* @throws NullPointerException when {@code outSocket} is null
*/
private static List<MapField> extractMapFieldsListFromOutSockets(TypeOperationsOutSocket typeOperationsOutSocket)
throws NullPointerException {
if (typeOperationsOutSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<MapField> mapFieldsList = new ArrayList<MapField>();
for (Object field : typeOperationsOutSocket.getPassThroughFieldOrOperationFieldOrExpressionField()) {
if (field instanceof TypeMapField) {
TypeMapField typeMapField = (TypeMapField) field;
MapField mapField = new MapField(typeMapField.getSourceName(), typeMapField.getName(),
typeMapField.getInSocketId());
mapFieldsList.add(mapField);
}
}
return mapFieldsList;
}
public static List<JoinKeyFields> extractKeyFieldsListFromOutSockets(List<TypeKeyFields> list)
throws NullPointerException {
if (list == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<JoinKeyFields> keyFieldsList = new ArrayList<JoinKeyFields>();
String[] fieldNames;
for (TypeKeyFields keyField : list) {
int i = 0;
fieldNames = new String[keyField.getField().size()];
for (TypeFieldName keyFields : keyField.getField()) {
fieldNames[i] = keyFields.getName();
i++;
}
JoinKeyFields mapFields = new JoinKeyFields(keyField.getInSocketId(), keyField.isRecordRequired(),
fieldNames);
keyFieldsList.add(mapFields);
}
return keyFieldsList;
}
public static List<OutSocket> extractOutSocketList(List<TypeOperationsOutSocket> outSocket) {
if (outSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<OutSocket> outSocketList = new ArrayList<OutSocket>();
for (TypeOperationsOutSocket socket : outSocket) {
OutSocket outSockets = new OutSocket(socket.getId());
if (socket.getType() != null)
outSockets.setSocketType(socket.getType());
outSockets.setMapFieldsList(extractMapFieldsListFromOutSockets(socket));
outSockets.setPassThroughFieldsList(extractPassThroughFieldsListFromOutSockets(socket));
outSockets.setOperationFieldList(extractOperationFieldFromOutSockets(socket));
outSocketList.add(outSockets);
if (socket.getCopyOfInsocket() != null)
outSockets.setCopyOfInSocketId(socket.getCopyOfInsocket().getInSocketId());
}
return outSocketList;
}
public static List<InSocket> extractInSocketList(List<TypeBaseInSocket> inSocket) {
if (inSocket == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<InSocket> inSocketList = new ArrayList<InSocket>();
for (TypeBaseInSocket socket : inSocket) {
InSocket inSock = new InSocket(socket.getFromComponentId(), socket.getFromSocketId(), socket.getId());
inSock.setInSocketType(socket.getType() != null ? socket.getType() : "");
inSock.setFromSocketType(socket.getFromSocketType() != null ? socket.getFromSocketType() : "");
inSocketList.add(inSock);
}
return inSocketList;
}
/*
* public static Map<String, InSocket>
* extractInSocketMap(List<TypeBaseInSocket> inSocketList) {
*
* if (inSocketList == null) { throw new NullPointerException(
* "In socket cannot be null"); } Map<String, InSocket> inSocketMap = new
* HashMap<String, InSocket>(); for (TypeBaseInSocket inSocket :
* inSocketList) { InSocket socket = new
* InSocket(inSocket.getFromComponentId(), inSocket.getFromSocketId(),
* inSocket.getId());
*
* if (inSocket.getFromSocketType() != null)
* socket.setFromSocketType(inSocket.getFromSocketType()); if
* (inSocket.getType() != null) socket.setInSocketType(inSocket.getType());
* inSocketMap.put(inSocket.getId(), socket); }
*
* return inSocketMap; }
*/
public static List<JoinKeyFields> extractKeyFieldsListFromOutSocketsForLookup(
List<hydrograph.engine.jaxb.lookup.TypeKeyFields> list) {
if (list == null) {
throw new NullPointerException("Out socket cannot be null");
}
List<JoinKeyFields> keyFieldsList = new ArrayList<JoinKeyFields>();
String[] fieldNames;
for (hydrograph.engine.jaxb.lookup.TypeKeyFields keyField : list) {
int i = 0;
fieldNames = new String[keyField.getField().size()];
for (TypeFieldName keyFields : keyField.getField()) {
fieldNames[i] = keyFields.getName();
i++;
}
JoinKeyFields mapFields = new JoinKeyFields(keyField.getInSocketId(), true, fieldNames);
keyFieldsList.add(mapFields);
}
return keyFieldsList;
}
/**
* Extracts the key fields from the
* {@link hydrograph.engine.jaxb.aggregate.TypeSecondaryKeyFields} object
* passed as a parameter
* <p>
* TypeSecondaryKeyFields The method returns {@code null} if the
* {@code typeSecondaryKeyFields} parameter is null
*
* @param typeSecondaryKeyFields the object of
* {@link hydrograph.engine.jaxb.aggregate.TypeSecondaryKeyFields}
* which contain information of secondary key fields for the
* component
* @return an array of {@link KeyField} containing the secondary key fields
* for an operation
*/
public static KeyField[] extractSecondaryKeyFields(
hydrograph.engine.jaxb.aggregate.TypeSecondaryKeyFields typeSecondaryKeyFields) {
if (typeSecondaryKeyFields == null || typeSecondaryKeyFields.getField() == null) {
return null;
}
KeyField[] keyFields = new KeyField[typeSecondaryKeyFields.getField().size()];
int i = 0;
for (hydrograph.engine.jaxb.aggregate.TypeSecondayKeyFieldsAttributes eachTypeFieldName : typeSecondaryKeyFields
.getField()) {
KeyField eachKeyField = new KeyField();
eachKeyField.setName(eachTypeFieldName.getName());
eachKeyField.setSortOrder(eachTypeFieldName.getOrder().value());
keyFields[i] = eachKeyField;
i++;
}
return keyFields;
}
/**
* Extracts the key fields from the
* {@link hydrograph.engine.jaxb.aggregate.TypePrimaryKeyFields} object
* passed as a parameter
* <p>
* TypePrimaryKeyFields The method returns {@code null} if the
* {@code typePrimaryKeyFields} parameter is null
*
* @param typePrimaryKeyFields the object of
* {@link hydrograph.engine.jaxb.aggregate.TypePrimaryKeyFields}
* which contain information of key fields for the component
* @return an array of {@link KeyField} containing the key fields for an
* operation
*/
public static KeyField[] extractKeyFields(
hydrograph.engine.jaxb.aggregate.TypePrimaryKeyFields typePrimaryKeyFields) {
if (typePrimaryKeyFields == null) {
return null;
} else if (typePrimaryKeyFields.getNone() != null) {
return null;
} else if (typePrimaryKeyFields.getField() == null) {
return null;
}
KeyField[] keyFields = new KeyField[typePrimaryKeyFields.getField().size()];
int i = 0;
for (TypeFieldName eachTypeFieldName : typePrimaryKeyFields.getField()) {
KeyField eachKeyField = new KeyField();
eachKeyField.setName(eachTypeFieldName.getName());
// eachKeyField.setSortOrder(eachTypeFieldName.getOrder().value());
keyFields[i] = eachKeyField;
i++;
}
return keyFields;
}
/**
* Extracts the key fields from the
* {@link hydrograph.engine.jaxb.groupcombine.TypePrimaryKeyFields} object
* passed as a parameter
* <p>
* TypePrimaryKeyFields The method returns {@code null} if the
* {@code typePrimaryKeyFields} parameter is null
*
* @param typePrimaryKeyFields the object of
* {@link hydrograph.engine.jaxb.groupcombine.TypePrimaryKeyFields}
* which contain information of key fields for the component
* @return an array of {@link KeyField} containing the key fields for an
* operation
*/
public static KeyField[] extractKeyFields(
hydrograph.engine.jaxb.groupcombine.TypePrimaryKeyFields typePrimaryKeyFields) {
if (typePrimaryKeyFields == null) {
return null;
} else if (typePrimaryKeyFields.getNone() != null) {
return null;
} else if (typePrimaryKeyFields.getField() == null) {
return null;
}
KeyField[] keyFields = new KeyField[typePrimaryKeyFields.getField().size()];
int i = 0;
for (TypeFieldName eachTypeFieldName : typePrimaryKeyFields.getField()) {
KeyField eachKeyField = new KeyField();
eachKeyField.setName(eachTypeFieldName.getName());
// eachKeyField.setSortOrder(eachTypeFieldName.getOrder().value());
keyFields[i] = eachKeyField;
i++;
}
return keyFields;
}
/**
* Extracts the key field names and sort order from the
* {@link hydrograph.engine.jaxb.cumulate.TypePrimaryKeyFields
* TypePrimaryKeyFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typePrimaryKeyFields}
* parameter is {@code null}
*
* @param typePrimaryKeyFields the object of
* {@link hydrograph.engine.jaxb.cumulate.TypePrimaryKeyFields
* TypePrimaryKeyFields} which contain information of key fields
* for the component
* @return an array of {@link KeyField} containing the field name and sort
* order
*/
public static KeyField[] extractKeyFields(
hydrograph.engine.jaxb.cumulate.TypePrimaryKeyFields typePrimaryKeyFields) {
if (typePrimaryKeyFields == null) {
return null;
} else if (typePrimaryKeyFields.getNone() != null) {
return null;
} else if (typePrimaryKeyFields.getField() == null) {
return null;
}
KeyField[] keyFields = new KeyField[typePrimaryKeyFields.getField().size()];
int i = 0;
for (TypeFieldName eachTypeFieldName : typePrimaryKeyFields.getField()) {
KeyField eachKeyField = new KeyField();
eachKeyField.setName(eachTypeFieldName.getName());
// eachKeyField.setSortOrder(eachTypeFieldName.getOrder().value());
keyFields[i] = eachKeyField;
i++;
}
return keyFields;
}
/**
* Extracts the key field names and sort order from the
* {@link hydrograph.engine.jaxb.cumulate.TypeSecondaryKeyFields
* TypeSecondaryKeyFields} object passed as a parameter
* <p>
* The method returns {@code null} if the {@code typeSecondaryKeyFields}
* parameter is {@code null}
*
* @param typeSecondaryKeyFields the object of
* {@link hydrograph.engine.jaxb.cumulate.TypeSecondaryKeyFields
* TypeSecondaryKeyFields} which contain information of key
* fields for the component
* @return an array of {@link KeyField} containing the field name and sort
* order
*/
public static KeyField[] extractSecondaryKeyFields(
hydrograph.engine.jaxb.cumulate.TypeSecondaryKeyFields typeSecondaryKeyFields) {
if (typeSecondaryKeyFields == null || typeSecondaryKeyFields.getField() == null) {
return null;
}
KeyField[] keyFields = new KeyField[typeSecondaryKeyFields.getField().size()];
int i = 0;
for (hydrograph.engine.jaxb.cumulate.TypeSecondayKeyFieldsAttributes eachTypeFieldName : typeSecondaryKeyFields
.getField()) {
KeyField eachKeyField = new KeyField();
eachKeyField.setName(eachTypeFieldName.getName());
eachKeyField.setSortOrder(eachTypeFieldName.getOrder().value());
keyFields[i] = eachKeyField;
i++;
}
return keyFields;
}
public static String extractOutputRecordCount(
TypeOutputRecordCount outputRecordCount) {
if (outputRecordCount != null) {
return outputRecordCount.getValue();
}
return "";
}
public static void checkIfOutputRecordCountIsPresentInCaseOfExpressionProcessing(
List<Operation> operationsList,
TypeOutputRecordCount outputRecordCount) {
if (operationsList.get(0).getExpression() != null
&& !"".equals(operationsList.get(0).getExpression())) {
if (outputRecordCount == null) {
throw new RuntimeException(
"Output Record Count is a mandatory parameter in case of Expression Processing.");
}
}
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.io.findfile;
import java.io.File;
import java.io.Serializable;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import jodd.io.FileNameUtil;
import jodd.io.FileUtil;
import jodd.util.InExRules;
import jodd.util.MultiComparator;
import jodd.util.NaturalOrderComparator;
import jodd.util.StringUtil;
import jodd.util.collection.JoddArrayList;
/**
* Generic iterative file finder. Searches all files on specified search path.
* By default, it starts in whitelist mode, where everything is excluded.
* To search, you need to explicitly set include patterns. If no pattern is
* set, then the search starts in blacklist mode, where everything is included (search all).
*
* @see WildcardFindFile
* @see RegExpFindFile
* @see jodd.util.InExRules
*/
public class FindFile implements Iterable<File> {
/**
* Match type.
* @see FindFile#getMatchingFilePath(java.io.File)
* @see FindFile#acceptFile(java.io.File)
*/
public enum Match {
/**
* Full, absolute path.
*/
FULL_PATH,
/**
* Relative path from current root.
*/
RELATIVE_PATH,
/**
* Just file name.
*/
NAME
}
// ---------------------------------------------------------------- flags
protected boolean recursive;
protected boolean includeDirs = true;
protected boolean includeFiles = true;
protected boolean walking = true;
protected Match matchType = Match.FULL_PATH;
public boolean recursive() {
return recursive;
}
/**
* Activates recursive search.
*/
public FindFile recursive(boolean recursive) {
this.recursive = recursive;
return this;
}
public boolean includeDirs() {
return includeDirs;
}
/**
* Include directories in search.
*/
public FindFile includeDirs(boolean includeDirs) {
this.includeDirs = includeDirs;
return this;
}
public boolean includeFiles() {
return includeFiles;
}
/**
* Include files in search.
*/
public FindFile includeFiles(boolean includeFiles) {
this.includeFiles = includeFiles;
return this;
}
public boolean walking() {
return walking;
}
/**
* Sets the walking recursive mode. When walking mode is on (by default),
* folders are walked immediately. Although natural, for large
* set of files, this is not memory-optimal approach, since many
* files are held in memory, when going deeper.
* <p>
* When walking mode is turned off, folders are processed once
* all files are processed, one after the other. The order is
* not natural, but memory consumption is optimal.
* @see #recursive(boolean)
*/
public FindFile walking(boolean walking) {
this.walking = walking;
return this;
}
public Match matchType() {
return matchType;
}
/**
* Set {@link Match matching type}.
*/
public FindFile matchType(Match match) {
this.matchType = match;
return this;
}
// ---------------------------------------------------------------- search path
/**
* Specifies single search path.
*/
public FindFile searchPath(File searchPath) {
addPath(searchPath);
return this;
}
/**
* Specifies a set of search paths.
*/
public FindFile searchPath(File... searchPath) {
for (File file : searchPath) {
addPath(file);
}
return this;
}
/**
* Specifies the search path. If provided path contains
* {@link File#pathSeparator} than string will be tokenized
* and each part will be added separately as a search path.
*/
public FindFile searchPath(String searchPath) {
if (searchPath.indexOf(File.pathSeparatorChar) != -1) {
String[] paths = StringUtil.split(searchPath, File.pathSeparator);
for (String path : paths) {
addPath(new File(path));
}
} else {
addPath(new File(searchPath));
}
return this;
}
/**
* Specifies search paths.
* @see #searchPath(String)
*/
public FindFile searchPath(String... searchPaths) {
for (String searchPath : searchPaths) {
searchPath(searchPath);
}
return this;
}
/**
* Specifies the search path. Throws an exception if URI is invalid.
*/
public FindFile searchPath(URI searchPath) {
File file;
try {
file = new File(searchPath);
} catch (Exception ex) {
throw new FindFileException("URI error: " + searchPath, ex);
}
addPath(file);
return this;
}
/**
* Specifies the search path.
*/
public FindFile searchPath(URI... searchPath) {
for (URI uri : searchPath) {
searchPath(uri);
}
return this;
}
/**
* Specifies the search path. Throws an exception if URL is invalid.
*/
public FindFile searchPath(URL searchPath) {
File file = FileUtil.toFile(searchPath);
if (file == null) {
throw new FindFileException("URL error: " + searchPath);
}
addPath(file);
return this;
}
/**
* Specifies the search path.
*/
public FindFile searchPath(URL... searchPath) {
for (URL url : searchPath) {
searchPath(url);
}
return this;
}
// ---------------------------------------------------------------- files iterator
/**
* Files iterator simply walks over files array.
* Ignores null items. Consumed files are immediately
* removed from the array.
*/
protected class FilesIterator {
protected final File folder;
protected final String[] fileNames;
protected final File[] files;
public FilesIterator(File folder) {
this.folder = folder;
if (sortComparators != null) {
this.files = folder.listFiles();
if (this.files != null) {
Arrays.sort(this.files, new MultiComparator<>(sortComparators));
}
this.fileNames = null;
} else {
this.files = null;
this.fileNames = folder.list();
}
}
public FilesIterator(String[] fileNames) {
this.folder = null;
if (sortComparators != null) {
int fileNamesLength = fileNames.length;
this.files = new File[fileNamesLength];
for (int i = 0; i < fileNamesLength; i++) {
String fileName = fileNames[i];
if (fileName != null) {
this.files[i] = new File(fileName);
}
}
this.fileNames = null;
} else {
this.files = null;
this.fileNames = fileNames;
}
}
protected int index;
/**
* Returns next file or <code>null</code>
* when no next file is available.
*/
public File next() {
if (files != null) {
return nextFile();
} else if (fileNames != null) {
return nextFileName();
} else {
return null;
}
}
protected File nextFileName() {
while (index < fileNames.length) {
String fileName = fileNames[index];
if (fileName == null) {
index++;
continue;
}
fileNames[index] = null;
index++;
File file;
if (folder == null) {
file = new File(fileName);
} else {
file = new File(folder, fileName);
}
if (file.isFile()) {
if (!includeFiles) {
continue;
}
if (!acceptFile(file)) {
continue;
}
}
return file;
}
return null;
}
protected File nextFile() {
while (index < files.length) {
File file = files[index];
if (file == null) {
index++;
continue;
}
files[index] = null;
index++;
if (file.isFile()) {
if (!includeFiles) {
continue;
}
if (!acceptFile(file)) {
continue;
}
}
return file;
}
return null;
}
}
// ---------------------------------------------------------------- matching
protected final InExRules<String, String> rules = createRulesEngine();
/**
* Creates rule engine.
*/
protected InExRules<String, String> createRulesEngine() {
return new InExRules<>();
}
/**
* Defines include pattern.
*/
public FindFile include(String pattern) {
rules.include(pattern);
return this;
}
/**
* Defines include patterns.
*/
public FindFile include(String... patterns) {
for (String pattern : patterns) {
rules.include(pattern);
}
return this;
}
/**
* Enables whitelist mode.
*/
public FindFile excludeAll() {
rules.whitelist();
return this;
}
/**
* Enables blacklist mode.
*/
public FindFile includeAll() {
rules.blacklist();
return this;
}
/**
* Defines exclude pattern.
*/
public FindFile exclude(String pattern) {
rules.exclude(pattern);
return this;
}
/**
* Defines exclude patterns.
*/
public FindFile exclude(String... patterns) {
for (String pattern : patterns) {
rules.exclude(pattern);
}
return this;
}
/**
* Determine if file is accepted, based on include and exclude
* rules. Called on each file entry (file or directory) and
* returns <code>true</code> if file passes search criteria.
* File is matched using {@link #getMatchingFilePath(java.io.File) matching file path}.
* @see jodd.util.InExRules
*/
protected boolean acceptFile(File file) {
String matchingFilePath = getMatchingFilePath(file);
return rules.match(matchingFilePath);
}
/**
* Resolves file path depending on {@link Match matching type}
* Returned path is formatted in unix style.
*/
protected String getMatchingFilePath(File file) {
String path = null;
switch (matchType) {
case FULL_PATH:
path = file.getAbsolutePath();
break;
case RELATIVE_PATH:
path = file.getAbsolutePath();
path = path.substring(rootPath.length());
break;
case NAME:
path = file.getName();
}
path = FileNameUtil.separatorsToUnix(path);
return path;
}
// ---------------------------------------------------------------- next file
protected JoddArrayList<File> pathList;
protected JoddArrayList<File> pathListOriginal;
protected JoddArrayList<File> todoFolders;
protected JoddArrayList<FilesIterator> todoFiles;
protected File lastFile;
protected File rootFile;
protected String rootPath;
/**
* Returns last founded file.
* Returns <code>null</code> at the very beginning.
*/
public File lastFile() {
return lastFile;
}
/**
* Adds existing search path to the file list.
* Non existing files are ignored.
* If path is a folder, it will be scanned for all files.
*/
protected void addPath(File path) {
if (!path.exists()) {
return;
}
if (pathList == null) {
pathList = new JoddArrayList<>();
}
pathList.add(path);
}
/**
* Reset the search so it can be run again with very
* same parameters (and sorting options).
*/
public void reset() {
pathList = pathListOriginal;
pathListOriginal = null;
todoFiles = null;
lastFile = null;
rules.reset();
}
/**
* Finds the next file. Returns founded file that matches search configuration
* or <code>null</code> if no more files can be found.
*/
public File nextFile() {
if (todoFiles == null) {
init();
}
while (true) {
// iterate files
if (!todoFiles.isEmpty()) {
FilesIterator filesIterator = todoFiles.getLast();
File nextFile = filesIterator.next();
if (nextFile == null) {
todoFiles.removeLast();
continue;
}
if (nextFile.isDirectory()) {
if (!walking) {
todoFolders.add(nextFile);
continue;
}
// walking
if (recursive) {
todoFiles.add(new FilesIterator(nextFile));
}
if (includeDirs) {
if (acceptFile(nextFile)) {
lastFile = nextFile;
return nextFile;
}
}
continue;
}
lastFile = nextFile;
return nextFile;
}
// process folders
File folder;
boolean initialDir = false;
if (todoFolders.isEmpty()) {
if (pathList.isEmpty()) {
// the end
return null;
}
folder = pathList.removeFirst();
rootFile = folder;
rootPath = rootFile.getAbsolutePath();
initialDir = true;
} else {
folder = todoFolders.removeFirst();
}
if ((initialDir) || (recursive)) {
todoFiles.add(new FilesIterator(folder));
}
if ((!initialDir) && (includeDirs)) {
if (acceptFile(folder)) {
lastFile = folder;
return folder;
}
}
}
}
/**
* Finds all files and returns list of founded files.
*/
public List<File> findAll() {
List<File> allFiles = new ArrayList<>();
File file;
while ((file = nextFile()) != null) {
allFiles.add(file);
}
return allFiles;
}
/**
* Initializes file walking.
* Separates input files and folders.
*/
protected void init() {
rules.smartMode();
todoFiles = new JoddArrayList<>();
todoFolders = new JoddArrayList<>();
if (pathList == null) {
pathList = new JoddArrayList<>();
return;
}
if (pathListOriginal == null) {
pathListOriginal = (JoddArrayList<File>) pathList.clone();
}
String[] files = new String[pathList.size()];
int index = 0;
Iterator<File> iterator = pathList.iterator();
while (iterator.hasNext()) {
File file = iterator.next();
if (file.isFile()) {
files[index++] = file.getAbsolutePath();
iterator.remove();
}
}
if (index != 0) {
FilesIterator filesIterator = new FilesIterator(files);
todoFiles.add(filesIterator);
}
}
/**
* Returns file walking iterator.
*/
public Iterator<File> iterator() {
return new Iterator<File>() {
private File nextFile;
public boolean hasNext() {
nextFile = nextFile();
return nextFile != null;
}
public File next() {
if (nextFile == null) {
throw new NoSuchElementException();
}
return nextFile;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
// ---------------------------------------------------------------- sort
protected List<Comparator<File>> sortComparators;
protected void addComparator(Comparator<File> comparator) {
if (sortComparators == null) {
sortComparators = new ArrayList<>(4);
}
sortComparators.add(comparator);
}
/**
* Removes ALL sorting options.
*/
public FindFile sortNone() {
sortComparators = null;
return this;
}
/**
* Adds generic sorting.
*/
public FindFile sortWith(Comparator<File> fileComparator) {
addComparator(fileComparator);
return this;
}
/**
* Puts folders before files.
*/
public FindFile sortFoldersFirst() {
addComparator(new FolderFirstComparator(true));
return this;
}
/**
* Puts files before folders.
*/
public FindFile sortFoldersLast() {
addComparator(new FolderFirstComparator(false));
return this;
}
/**
* Sorts files by file name, using <b>natural</b> sort.
*/
public FindFile sortByName() {
addComparator(new FileNameComparator(true));
return this;
}
/**
* Sorts files by file names descending, using <b>natural</b> sort.
*/
public FindFile sortByNameDesc() {
addComparator(new FileNameComparator(false));
return this;
}
/**
* Sorts files by file extension.
*/
public FindFile sortByExtension() {
addComparator(new FileExtensionComparator(true));
return this;
}
/**
* Sorts files by file extension descending.
*/
public FindFile sortByExtensionDesc() {
addComparator(new FileExtensionComparator(false));
return this;
}
/**
* Sorts files by last modified time.
*/
public FindFile sortByTime() {
addComparator(new FileLastModifiedTimeComparator(true));
return this;
}
/**
* Sorts files by last modified time descending.
*/
public FindFile sortByTimeDesc() {
addComparator(new FileLastModifiedTimeComparator(false));
return this;
}
// ---------------------------------------------------------------- comparators
public static class FolderFirstComparator implements Comparator<File>, Serializable {
protected final int order;
public FolderFirstComparator(boolean foldersFirst) {
if (foldersFirst) {
order = 1;
} else {
order = -1;
}
}
public int compare(File file1, File file2) {
if (file1.isFile() && file2.isDirectory()) {
return order;
}
if (file1.isDirectory() && file2.isFile()) {
return -order;
}
return 0;
}
}
public static class FileNameComparator implements Comparator<File>, Serializable {
protected final int order;
protected NaturalOrderComparator<String> naturalOrderComparator = new NaturalOrderComparator<>(true, true);
public FileNameComparator(boolean ascending) {
if (ascending) {
order = 1;
} else {
order = -1;
}
}
public int compare(File file1, File file2) {
int result = naturalOrderComparator.compare(file1.getName(), file2.getName());
if (result == 0) {
return result;
}
if (result > 0) {
return order;
}
return -order;
}
}
public static class FileExtensionComparator implements Comparator<File>, Serializable {
protected final int order;
public FileExtensionComparator(boolean ascending) {
if (ascending) {
order = 1;
} else {
order = -1;
}
}
public int compare(File file1, File file2) {
String ext1 = FileNameUtil.getExtension(file1.getName());
String ext2 = FileNameUtil.getExtension(file2.getName());
long diff = ext1.compareToIgnoreCase(ext2);
if (diff == 0) {
return 0;
}
if (diff > 0) {
return order;
}
return -order;
}
}
public static class FileLastModifiedTimeComparator implements Comparator<File>, Serializable {
protected final int order;
public FileLastModifiedTimeComparator(boolean ascending) {
if (ascending) {
order = 1;
} else {
order = -1;
}
}
public int compare(File file1, File file2) {
long diff = file1.lastModified() - file2.lastModified();
if (diff == 0) {
return 0;
}
if (diff > 0) {
return order;
}
return -order;
}
}
}
| |
// Generated from org/apache/metron/profiler/client/window/generated/Window.g4 by ANTLR 4.5
package org.apache.metron.profiler.client.window.generated;
//CHECKSTYLE:OFF
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class WindowLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.5", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
COMMA=1, COLON=2, WINDOW=3, INCLUDE=4, EXCLUDE=5, FROM=6, EVERY=7, TO=8,
AGO=9, NUMBER=10, IDENTIFIER=11, DAY_SPECIFIER=12, TIME_UNIT=13, WS=14;
public static String[] modeNames = {
"DEFAULT_MODE"
};
public static final String[] ruleNames = {
"COMMA", "COLON", "WINDOW", "INCLUDE", "EXCLUDE", "FROM", "EVERY", "TO",
"AGO", "NUMBER", "IDENTIFIER", "DAY_SPECIFIER", "TIME_UNIT", "WS", "SECOND_UNIT",
"MINUTE_UNIT", "HOUR_UNIT", "DAY_UNIT", "MONDAY", "TUESDAY", "WEDNESDAY",
"THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY", "CURRENT_DAY_OF_WEEK", "WEEKEND",
"WEEKDAY", "HOLIDAYS", "DATE", "DIGIT", "FIRST_DIGIT"
};
private static final String[] _LITERAL_NAMES = {
null, "','", "':'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "COMMA", "COLON", "WINDOW", "INCLUDE", "EXCLUDE", "FROM", "EVERY",
"TO", "AGO", "NUMBER", "IDENTIFIER", "DAY_SPECIFIER", "TIME_UNIT", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public WindowLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Window.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\20\u03c8\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+
" \4!\t!\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+
"\4\3\4\5\4U\n\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+
"\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5"+
"\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5"+
"\5\u0087\n\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3"+
"\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6"+
"\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00b9"+
"\n\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3"+
"\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7"+
"\5\7\u00dd\n\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b"+
"\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\5\b\u00fb\n\b"+
"\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\t\u010b\n\t"+
"\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u0113\n\n\3\13\3\13\7\13\u0117\n\13\f\13"+
"\16\13\u011a\13\13\3\f\3\f\3\f\7\f\u011f\n\f\f\f\16\f\u0122\13\f\3\r\3"+
"\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u0130\n\r\3\16\3\16\3\16"+
"\3\16\5\16\u0136\n\16\3\17\6\17\u0139\n\17\r\17\16\17\u013a\3\17\3\17"+
"\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+
"\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+
"\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+
"\3\20\3\20\5\20\u016b\n\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+
"\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+
"\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+
"\3\21\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u0199\n\21\3\22\3\22\3\22\3\22"+
"\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+
"\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+
"\5\22\u01bb\n\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+
"\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+
"\3\23\5\23\u01d7\n\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24"+
"\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24"+
"\3\24\3\24\5\24\u01f3\n\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+
"\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+
"\3\25\3\25\3\25\3\25\3\25\3\25\3\25\5\25\u0213\n\25\3\26\3\26\3\26\3\26"+
"\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26"+
"\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26"+
"\3\26\3\26\3\26\3\26\3\26\3\26\5\26\u023b\n\26\3\27\3\27\3\27\3\27\3\27"+
"\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+
"\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+
"\3\27\5\27\u025f\n\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+
"\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+
"\3\30\3\30\5\30\u027b\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31"+
"\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31"+
"\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u029f\n\31"+
"\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32"+
"\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u02bb"+
"\n\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+
"\3\33\3\33\3\33\5\33\u0359\n\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+
"\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+
"\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0379\n\34\3\35\3\35\3\35"+
"\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35"+
"\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35"+
"\u0399\n\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+
"\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+
"\3\36\3\36\3\36\3\36\5\36\u03b9\n\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37"+
"\3\37\5\37\u03c3\n\37\3 \3 \3!\3!\2\2\"\3\3\5\4\7\5\t\6\13\7\r\b\17\t"+
"\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\2!\2#\2%\2\'\2)\2+\2-\2/\2"+
"\61\2\63\2\65\2\67\29\2;\2=\2?\2A\2\3\2\6\3\2<<\5\2\62;C\\c|\7\2/;C\\"+
"^^aac|\5\2\13\f\16\17\"\"\u0415\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2"+
"\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2"+
"\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2"+
"\3C\3\2\2\2\5E\3\2\2\2\7T\3\2\2\2\t\u0086\3\2\2\2\13\u00b8\3\2\2\2\r\u00dc"+
"\3\2\2\2\17\u00fa\3\2\2\2\21\u010a\3\2\2\2\23\u0112\3\2\2\2\25\u0114\3"+
"\2\2\2\27\u011b\3\2\2\2\31\u012f\3\2\2\2\33\u0135\3\2\2\2\35\u0138\3\2"+
"\2\2\37\u016a\3\2\2\2!\u0198\3\2\2\2#\u01ba\3\2\2\2%\u01d6\3\2\2\2\'\u01f2"+
"\3\2\2\2)\u0212\3\2\2\2+\u023a\3\2\2\2-\u025e\3\2\2\2/\u027a\3\2\2\2\61"+
"\u029e\3\2\2\2\63\u02ba\3\2\2\2\65\u0358\3\2\2\2\67\u0378\3\2\2\29\u0398"+
"\3\2\2\2;\u03b8\3\2\2\2=\u03c2\3\2\2\2?\u03c4\3\2\2\2A\u03c6\3\2\2\2C"+
"D\7.\2\2D\4\3\2\2\2EF\7<\2\2F\6\3\2\2\2GH\7y\2\2HI\7k\2\2IJ\7p\2\2JK\7"+
"f\2\2KL\7q\2\2LU\7y\2\2MN\7y\2\2NO\7k\2\2OP\7p\2\2PQ\7f\2\2QR\7q\2\2R"+
"S\7y\2\2SU\7u\2\2TG\3\2\2\2TM\3\2\2\2U\b\3\2\2\2VW\7k\2\2WX\7p\2\2XY\7"+
"e\2\2YZ\7n\2\2Z[\7w\2\2[\\\7f\2\2\\\u0087\7g\2\2]^\7K\2\2^_\7P\2\2_`\7"+
"E\2\2`a\7N\2\2ab\7W\2\2bc\7F\2\2c\u0087\7G\2\2de\7k\2\2ef\7p\2\2fg\7e"+
"\2\2gh\7n\2\2hi\7w\2\2ij\7f\2\2jk\7g\2\2k\u0087\7u\2\2lm\7K\2\2mn\7P\2"+
"\2no\7E\2\2op\7N\2\2pq\7W\2\2qr\7F\2\2rs\7G\2\2s\u0087\7U\2\2tu\7k\2\2"+
"uv\7p\2\2vw\7e\2\2wx\7n\2\2xy\7w\2\2yz\7f\2\2z{\7k\2\2{|\7p\2\2|\u0087"+
"\7i\2\2}~\7K\2\2~\177\7P\2\2\177\u0080\7E\2\2\u0080\u0081\7N\2\2\u0081"+
"\u0082\7W\2\2\u0082\u0083\7F\2\2\u0083\u0084\7K\2\2\u0084\u0085\7P\2\2"+
"\u0085\u0087\7I\2\2\u0086V\3\2\2\2\u0086]\3\2\2\2\u0086d\3\2\2\2\u0086"+
"l\3\2\2\2\u0086t\3\2\2\2\u0086}\3\2\2\2\u0087\n\3\2\2\2\u0088\u0089\7"+
"g\2\2\u0089\u008a\7z\2\2\u008a\u008b\7e\2\2\u008b\u008c\7n\2\2\u008c\u008d"+
"\7w\2\2\u008d\u008e\7f\2\2\u008e\u00b9\7g\2\2\u008f\u0090\7G\2\2\u0090"+
"\u0091\7Z\2\2\u0091\u0092\7E\2\2\u0092\u0093\7N\2\2\u0093\u0094\7W\2\2"+
"\u0094\u0095\7F\2\2\u0095\u00b9\7G\2\2\u0096\u0097\7g\2\2\u0097\u0098"+
"\7z\2\2\u0098\u0099\7e\2\2\u0099\u009a\7n\2\2\u009a\u009b\7w\2\2\u009b"+
"\u009c\7f\2\2\u009c\u009d\7g\2\2\u009d\u00b9\7u\2\2\u009e\u009f\7G\2\2"+
"\u009f\u00a0\7Z\2\2\u00a0\u00a1\7E\2\2\u00a1\u00a2\7N\2\2\u00a2\u00a3"+
"\7W\2\2\u00a3\u00a4\7F\2\2\u00a4\u00a5\7G\2\2\u00a5\u00b9\7U\2\2\u00a6"+
"\u00a7\7g\2\2\u00a7\u00a8\7z\2\2\u00a8\u00a9\7e\2\2\u00a9\u00aa\7n\2\2"+
"\u00aa\u00ab\7w\2\2\u00ab\u00ac\7f\2\2\u00ac\u00ad\7k\2\2\u00ad\u00ae"+
"\7p\2\2\u00ae\u00b9\7i\2\2\u00af\u00b0\7G\2\2\u00b0\u00b1\7Z\2\2\u00b1"+
"\u00b2\7E\2\2\u00b2\u00b3\7N\2\2\u00b3\u00b4\7W\2\2\u00b4\u00b5\7F\2\2"+
"\u00b5\u00b6\7K\2\2\u00b6\u00b7\7P\2\2\u00b7\u00b9\7I\2\2\u00b8\u0088"+
"\3\2\2\2\u00b8\u008f\3\2\2\2\u00b8\u0096\3\2\2\2\u00b8\u009e\3\2\2\2\u00b8"+
"\u00a6\3\2\2\2\u00b8\u00af\3\2\2\2\u00b9\f\3\2\2\2\u00ba\u00bb\7H\2\2"+
"\u00bb\u00bc\7T\2\2\u00bc\u00bd\7Q\2\2\u00bd\u00dd\7O\2\2\u00be\u00bf"+
"\7h\2\2\u00bf\u00c0\7t\2\2\u00c0\u00c1\7q\2\2\u00c1\u00dd\7o\2\2\u00c2"+
"\u00c3\7U\2\2\u00c3\u00c4\7V\2\2\u00c4\u00c5\7C\2\2\u00c5\u00c6\7T\2\2"+
"\u00c6\u00c7\7V\2\2\u00c7\u00c8\7K\2\2\u00c8\u00c9\7P\2\2\u00c9\u00ca"+
"\7I\2\2\u00ca\u00cb\7\"\2\2\u00cb\u00cc\7H\2\2\u00cc\u00cd\7T\2\2\u00cd"+
"\u00ce\7Q\2\2\u00ce\u00dd\7O\2\2\u00cf\u00d0\7u\2\2\u00d0\u00d1\7v\2\2"+
"\u00d1\u00d2\7c\2\2\u00d2\u00d3\7t\2\2\u00d3\u00d4\7v\2\2\u00d4\u00d5"+
"\7k\2\2\u00d5\u00d6\7p\2\2\u00d6\u00d7\7i\2\2\u00d7\u00d8\7\"\2\2\u00d8"+
"\u00d9\7h\2\2\u00d9\u00da\7t\2\2\u00da\u00db\7q\2\2\u00db\u00dd\7o\2\2"+
"\u00dc\u00ba\3\2\2\2\u00dc\u00be\3\2\2\2\u00dc\u00c2\3\2\2\2\u00dc\u00cf"+
"\3\2\2\2\u00dd\16\3\2\2\2\u00de\u00df\7G\2\2\u00df\u00e0\7X\2\2\u00e0"+
"\u00e1\7G\2\2\u00e1\u00e2\7T\2\2\u00e2\u00fb\7[\2\2\u00e3\u00e4\7g\2\2"+
"\u00e4\u00e5\7x\2\2\u00e5\u00e6\7g\2\2\u00e6\u00e7\7t\2\2\u00e7\u00fb"+
"\7{\2\2\u00e8\u00e9\7H\2\2\u00e9\u00ea\7Q\2\2\u00ea\u00eb\7T\2\2\u00eb"+
"\u00ec\7\"\2\2\u00ec\u00ed\7G\2\2\u00ed\u00ee\7X\2\2\u00ee\u00ef\7G\2"+
"\2\u00ef\u00f0\7T\2\2\u00f0\u00fb\7[\2\2\u00f1\u00f2\7h\2\2\u00f2\u00f3"+
"\7q\2\2\u00f3\u00f4\7t\2\2\u00f4\u00f5\7\"\2\2\u00f5\u00f6\7g\2\2\u00f6"+
"\u00f7\7x\2\2\u00f7\u00f8\7g\2\2\u00f8\u00f9\7t\2\2\u00f9\u00fb\7{\2\2"+
"\u00fa\u00de\3\2\2\2\u00fa\u00e3\3\2\2\2\u00fa\u00e8\3\2\2\2\u00fa\u00f1"+
"\3\2\2\2\u00fb\20\3\2\2\2\u00fc\u00fd\7V\2\2\u00fd\u010b\7Q\2\2\u00fe"+
"\u00ff\7v\2\2\u00ff\u010b\7q\2\2\u0100\u0101\7w\2\2\u0101\u0102\7p\2\2"+
"\u0102\u0103\7v\2\2\u0103\u0104\7k\2\2\u0104\u010b\7n\2\2\u0105\u0106"+
"\7W\2\2\u0106\u0107\7P\2\2\u0107\u0108\7V\2\2\u0108\u0109\7K\2\2\u0109"+
"\u010b\7N\2\2\u010a\u00fc\3\2\2\2\u010a\u00fe\3\2\2\2\u010a\u0100\3\2"+
"\2\2\u010a\u0105\3\2\2\2\u010b\22\3\2\2\2\u010c\u010d\7C\2\2\u010d\u010e"+
"\7I\2\2\u010e\u0113\7Q\2\2\u010f\u0110\7c\2\2\u0110\u0111\7i\2\2\u0111"+
"\u0113\7q\2\2\u0112\u010c\3\2\2\2\u0112\u010f\3\2\2\2\u0113\24\3\2\2\2"+
"\u0114\u0118\5A!\2\u0115\u0117\5? \2\u0116\u0115\3\2\2\2\u0117\u011a\3"+
"\2\2\2\u0118\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119\26\3\2\2\2\u011a"+
"\u0118\3\2\2\2\u011b\u011c\t\2\2\2\u011c\u0120\t\3\2\2\u011d\u011f\t\4"+
"\2\2\u011e\u011d\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u011e\3\2\2\2\u0120"+
"\u0121\3\2\2\2\u0121\30\3\2\2\2\u0122\u0120\3\2\2\2\u0123\u0130\5\'\24"+
"\2\u0124\u0130\5)\25\2\u0125\u0130\5+\26\2\u0126\u0130\5-\27\2\u0127\u0130"+
"\5/\30\2\u0128\u0130\5\61\31\2\u0129\u0130\5\63\32\2\u012a\u0130\5\65"+
"\33\2\u012b\u0130\5\67\34\2\u012c\u0130\59\35\2\u012d\u0130\5;\36\2\u012e"+
"\u0130\5=\37\2\u012f\u0123\3\2\2\2\u012f\u0124\3\2\2\2\u012f\u0125\3\2"+
"\2\2\u012f\u0126\3\2\2\2\u012f\u0127\3\2\2\2\u012f\u0128\3\2\2\2\u012f"+
"\u0129\3\2\2\2\u012f\u012a\3\2\2\2\u012f\u012b\3\2\2\2\u012f\u012c\3\2"+
"\2\2\u012f\u012d\3\2\2\2\u012f\u012e\3\2\2\2\u0130\32\3\2\2\2\u0131\u0136"+
"\5\37\20\2\u0132\u0136\5!\21\2\u0133\u0136\5#\22\2\u0134\u0136\5%\23\2"+
"\u0135\u0131\3\2\2\2\u0135\u0132\3\2\2\2\u0135\u0133\3\2\2\2\u0135\u0134"+
"\3\2\2\2\u0136\34\3\2\2\2\u0137\u0139\t\5\2\2\u0138\u0137\3\2\2\2\u0139"+
"\u013a\3\2\2\2\u013a\u0138\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c\3\2"+
"\2\2\u013c\u013d\b\17\2\2\u013d\36\3\2\2\2\u013e\u013f\7U\2\2\u013f\u0140"+
"\7G\2\2\u0140\u0141\7E\2\2\u0141\u0142\7Q\2\2\u0142\u0143\7P\2\2\u0143"+
"\u016b\7F\2\2\u0144\u0145\7u\2\2\u0145\u0146\7g\2\2\u0146\u0147\7e\2\2"+
"\u0147\u0148\7q\2\2\u0148\u0149\7p\2\2\u0149\u016b\7f\2\2\u014a\u014b"+
"\7u\2\2\u014b\u014c\7g\2\2\u014c\u014d\7e\2\2\u014d\u014e\7q\2\2\u014e"+
"\u014f\7p\2\2\u014f\u0150\7f\2\2\u0150\u016b\7u\2\2\u0151\u0152\7U\2\2"+
"\u0152\u0153\7G\2\2\u0153\u0154\7E\2\2\u0154\u0155\7Q\2\2\u0155\u0156"+
"\7P\2\2\u0156\u0157\7F\2\2\u0157\u016b\7U\2\2\u0158\u0159\7u\2\2\u0159"+
"\u015a\7g\2\2\u015a\u015b\7e\2\2\u015b\u015c\7q\2\2\u015c\u015d\7p\2\2"+
"\u015d\u015e\7f\2\2\u015e\u015f\7*\2\2\u015f\u0160\7u\2\2\u0160\u016b"+
"\7+\2\2\u0161\u0162\7U\2\2\u0162\u0163\7G\2\2\u0163\u0164\7E\2\2\u0164"+
"\u0165\7Q\2\2\u0165\u0166\7P\2\2\u0166\u0167\7F\2\2\u0167\u0168\7*\2\2"+
"\u0168\u0169\7U\2\2\u0169\u016b\7+\2\2\u016a\u013e\3\2\2\2\u016a\u0144"+
"\3\2\2\2\u016a\u014a\3\2\2\2\u016a\u0151\3\2\2\2\u016a\u0158\3\2\2\2\u016a"+
"\u0161\3\2\2\2\u016b \3\2\2\2\u016c\u016d\7O\2\2\u016d\u016e\7K\2\2\u016e"+
"\u016f\7P\2\2\u016f\u0170\7W\2\2\u0170\u0171\7V\2\2\u0171\u0199\7G\2\2"+
"\u0172\u0173\7o\2\2\u0173\u0174\7k\2\2\u0174\u0175\7p\2\2\u0175\u0176"+
"\7w\2\2\u0176\u0177\7v\2\2\u0177\u0199\7g\2\2\u0178\u0179\7o\2\2\u0179"+
"\u017a\7k\2\2\u017a\u017b\7p\2\2\u017b\u017c\7w\2\2\u017c\u017d\7v\2\2"+
"\u017d\u017e\7g\2\2\u017e\u0199\7u\2\2\u017f\u0180\7O\2\2\u0180\u0181"+
"\7K\2\2\u0181\u0182\7P\2\2\u0182\u0183\7W\2\2\u0183\u0184\7V\2\2\u0184"+
"\u0185\7G\2\2\u0185\u0199\7U\2\2\u0186\u0187\7o\2\2\u0187\u0188\7k\2\2"+
"\u0188\u0189\7p\2\2\u0189\u018a\7w\2\2\u018a\u018b\7v\2\2\u018b\u018c"+
"\7g\2\2\u018c\u018d\7*\2\2\u018d\u018e\7u\2\2\u018e\u0199\7+\2\2\u018f"+
"\u0190\7O\2\2\u0190\u0191\7K\2\2\u0191\u0192\7P\2\2\u0192\u0193\7W\2\2"+
"\u0193\u0194\7V\2\2\u0194\u0195\7G\2\2\u0195\u0196\7*\2\2\u0196\u0197"+
"\7U\2\2\u0197\u0199\7+\2\2\u0198\u016c\3\2\2\2\u0198\u0172\3\2\2\2\u0198"+
"\u0178\3\2\2\2\u0198\u017f\3\2\2\2\u0198\u0186\3\2\2\2\u0198\u018f\3\2"+
"\2\2\u0199\"\3\2\2\2\u019a\u019b\7J\2\2\u019b\u019c\7Q\2\2\u019c\u019d"+
"\7W\2\2\u019d\u01bb\7T\2\2\u019e\u019f\7j\2\2\u019f\u01a0\7q\2\2\u01a0"+
"\u01a1\7w\2\2\u01a1\u01bb\7t\2\2\u01a2\u01a3\7j\2\2\u01a3\u01a4\7q\2\2"+
"\u01a4\u01a5\7w\2\2\u01a5\u01a6\7t\2\2\u01a6\u01bb\7u\2\2\u01a7\u01a8"+
"\7J\2\2\u01a8\u01a9\7Q\2\2\u01a9\u01aa\7W\2\2\u01aa\u01ab\7T\2\2\u01ab"+
"\u01bb\7U\2\2\u01ac\u01ad\7j\2\2\u01ad\u01ae\7q\2\2\u01ae\u01af\7w\2\2"+
"\u01af\u01b0\7t\2\2\u01b0\u01b1\7*\2\2\u01b1\u01b2\7u\2\2\u01b2\u01bb"+
"\7+\2\2\u01b3\u01b4\7J\2\2\u01b4\u01b5\7Q\2\2\u01b5\u01b6\7W\2\2\u01b6"+
"\u01b7\7T\2\2\u01b7\u01b8\7*\2\2\u01b8\u01b9\7U\2\2\u01b9\u01bb\7+\2\2"+
"\u01ba\u019a\3\2\2\2\u01ba\u019e\3\2\2\2\u01ba\u01a2\3\2\2\2\u01ba\u01a7"+
"\3\2\2\2\u01ba\u01ac\3\2\2\2\u01ba\u01b3\3\2\2\2\u01bb$\3\2\2\2\u01bc"+
"\u01bd\7F\2\2\u01bd\u01be\7C\2\2\u01be\u01d7\7[\2\2\u01bf\u01c0\7f\2\2"+
"\u01c0\u01c1\7c\2\2\u01c1\u01d7\7{\2\2\u01c2\u01c3\7f\2\2\u01c3\u01c4"+
"\7c\2\2\u01c4\u01c5\7{\2\2\u01c5\u01d7\7u\2\2\u01c6\u01c7\7F\2\2\u01c7"+
"\u01c8\7C\2\2\u01c8\u01c9\7[\2\2\u01c9\u01d7\7U\2\2\u01ca\u01cb\7f\2\2"+
"\u01cb\u01cc\7c\2\2\u01cc\u01cd\7{\2\2\u01cd\u01ce\7*\2\2\u01ce\u01cf"+
"\7u\2\2\u01cf\u01d7\7+\2\2\u01d0\u01d1\7F\2\2\u01d1\u01d2\7C\2\2\u01d2"+
"\u01d3\7[\2\2\u01d3\u01d4\7*\2\2\u01d4\u01d5\7U\2\2\u01d5\u01d7\7+\2\2"+
"\u01d6\u01bc\3\2\2\2\u01d6\u01bf\3\2\2\2\u01d6\u01c2\3\2\2\2\u01d6\u01c6"+
"\3\2\2\2\u01d6\u01ca\3\2\2\2\u01d6\u01d0\3\2\2\2\u01d7&\3\2\2\2\u01d8"+
"\u01d9\7O\2\2\u01d9\u01da\7Q\2\2\u01da\u01db\7P\2\2\u01db\u01dc\7F\2\2"+
"\u01dc\u01dd\7C\2\2\u01dd\u01f3\7[\2\2\u01de\u01df\7o\2\2\u01df\u01e0"+
"\7q\2\2\u01e0\u01e1\7p\2\2\u01e1\u01e2\7f\2\2\u01e2\u01e3\7c\2\2\u01e3"+
"\u01f3\7{\2\2\u01e4\u01e5\7O\2\2\u01e5\u01e6\7Q\2\2\u01e6\u01e7\7P\2\2"+
"\u01e7\u01e8\7F\2\2\u01e8\u01e9\7C\2\2\u01e9\u01ea\7[\2\2\u01ea\u01f3"+
"\7U\2\2\u01eb\u01ec\7o\2\2\u01ec\u01ed\7q\2\2\u01ed\u01ee\7p\2\2\u01ee"+
"\u01ef\7f\2\2\u01ef\u01f0\7c\2\2\u01f0\u01f1\7{\2\2\u01f1\u01f3\7u\2\2"+
"\u01f2\u01d8\3\2\2\2\u01f2\u01de\3\2\2\2\u01f2\u01e4\3\2\2\2\u01f2\u01eb"+
"\3\2\2\2\u01f3(\3\2\2\2\u01f4\u01f5\7V\2\2\u01f5\u01f6\7W\2\2\u01f6\u01f7"+
"\7G\2\2\u01f7\u01f8\7U\2\2\u01f8\u01f9\7F\2\2\u01f9\u01fa\7C\2\2\u01fa"+
"\u0213\7[\2\2\u01fb\u01fc\7v\2\2\u01fc\u01fd\7w\2\2\u01fd\u01fe\7g\2\2"+
"\u01fe\u01ff\7u\2\2\u01ff\u0200\7f\2\2\u0200\u0201\7c\2\2\u0201\u0213"+
"\7{\2\2\u0202\u0203\7V\2\2\u0203\u0204\7W\2\2\u0204\u0205\7G\2\2\u0205"+
"\u0206\7U\2\2\u0206\u0207\7F\2\2\u0207\u0208\7C\2\2\u0208\u0209\7[\2\2"+
"\u0209\u0213\7U\2\2\u020a\u020b\7v\2\2\u020b\u020c\7w\2\2\u020c\u020d"+
"\7g\2\2\u020d\u020e\7u\2\2\u020e\u020f\7f\2\2\u020f\u0210\7c\2\2\u0210"+
"\u0211\7{\2\2\u0211\u0213\7u\2\2\u0212\u01f4\3\2\2\2\u0212\u01fb\3\2\2"+
"\2\u0212\u0202\3\2\2\2\u0212\u020a\3\2\2\2\u0213*\3\2\2\2\u0214\u0215"+
"\7Y\2\2\u0215\u0216\7G\2\2\u0216\u0217\7F\2\2\u0217\u0218\7P\2\2\u0218"+
"\u0219\7G\2\2\u0219\u021a\7U\2\2\u021a\u021b\7F\2\2\u021b\u021c\7C\2\2"+
"\u021c\u023b\7[\2\2\u021d\u021e\7y\2\2\u021e\u021f\7g\2\2\u021f\u0220"+
"\7f\2\2\u0220\u0221\7p\2\2\u0221\u0222\7g\2\2\u0222\u0223\7u\2\2\u0223"+
"\u0224\7f\2\2\u0224\u0225\7c\2\2\u0225\u023b\7{\2\2\u0226\u0227\7Y\2\2"+
"\u0227\u0228\7G\2\2\u0228\u0229\7F\2\2\u0229\u022a\7P\2\2\u022a\u022b"+
"\7G\2\2\u022b\u022c\7U\2\2\u022c\u022d\7F\2\2\u022d\u022e\7C\2\2\u022e"+
"\u022f\7[\2\2\u022f\u023b\7U\2\2\u0230\u0231\7y\2\2\u0231\u0232\7g\2\2"+
"\u0232\u0233\7f\2\2\u0233\u0234\7p\2\2\u0234\u0235\7g\2\2\u0235\u0236"+
"\7u\2\2\u0236\u0237\7f\2\2\u0237\u0238\7c\2\2\u0238\u0239\7{\2\2\u0239"+
"\u023b\7u\2\2\u023a\u0214\3\2\2\2\u023a\u021d\3\2\2\2\u023a\u0226\3\2"+
"\2\2\u023a\u0230\3\2\2\2\u023b,\3\2\2\2\u023c\u023d\7V\2\2\u023d\u023e"+
"\7J\2\2\u023e\u023f\7W\2\2\u023f\u0240\7T\2\2\u0240\u0241\7U\2\2\u0241"+
"\u0242\7F\2\2\u0242\u0243\7C\2\2\u0243\u025f\7[\2\2\u0244\u0245\7v\2\2"+
"\u0245\u0246\7j\2\2\u0246\u0247\7w\2\2\u0247\u0248\7t\2\2\u0248\u0249"+
"\7u\2\2\u0249\u024a\7f\2\2\u024a\u024b\7c\2\2\u024b\u025f\7{\2\2\u024c"+
"\u024d\7V\2\2\u024d\u024e\7J\2\2\u024e\u024f\7W\2\2\u024f\u0250\7T\2\2"+
"\u0250\u0251\7U\2\2\u0251\u0252\7F\2\2\u0252\u0253\7C\2\2\u0253\u0254"+
"\7[\2\2\u0254\u025f\7U\2\2\u0255\u0256\7v\2\2\u0256\u0257\7j\2\2\u0257"+
"\u0258\7w\2\2\u0258\u0259\7t\2\2\u0259\u025a\7u\2\2\u025a\u025b\7f\2\2"+
"\u025b\u025c\7c\2\2\u025c\u025d\7{\2\2\u025d\u025f\7u\2\2\u025e\u023c"+
"\3\2\2\2\u025e\u0244\3\2\2\2\u025e\u024c\3\2\2\2\u025e\u0255\3\2\2\2\u025f"+
".\3\2\2\2\u0260\u0261\7H\2\2\u0261\u0262\7T\2\2\u0262\u0263\7K\2\2\u0263"+
"\u0264\7F\2\2\u0264\u0265\7C\2\2\u0265\u027b\7[\2\2\u0266\u0267\7h\2\2"+
"\u0267\u0268\7t\2\2\u0268\u0269\7k\2\2\u0269\u026a\7f\2\2\u026a\u026b"+
"\7c\2\2\u026b\u027b\7{\2\2\u026c\u026d\7H\2\2\u026d\u026e\7T\2\2\u026e"+
"\u026f\7K\2\2\u026f\u0270\7F\2\2\u0270\u0271\7C\2\2\u0271\u0272\7[\2\2"+
"\u0272\u027b\7U\2\2\u0273\u0274\7h\2\2\u0274\u0275\7t\2\2\u0275\u0276"+
"\7k\2\2\u0276\u0277\7f\2\2\u0277\u0278\7c\2\2\u0278\u0279\7{\2\2\u0279"+
"\u027b\7u\2\2\u027a\u0260\3\2\2\2\u027a\u0266\3\2\2\2\u027a\u026c\3\2"+
"\2\2\u027a\u0273\3\2\2\2\u027b\60\3\2\2\2\u027c\u027d\7U\2\2\u027d\u027e"+
"\7C\2\2\u027e\u027f\7V\2\2\u027f\u0280\7W\2\2\u0280\u0281\7T\2\2\u0281"+
"\u0282\7F\2\2\u0282\u0283\7C\2\2\u0283\u029f\7[\2\2\u0284\u0285\7u\2\2"+
"\u0285\u0286\7c\2\2\u0286\u0287\7v\2\2\u0287\u0288\7w\2\2\u0288\u0289"+
"\7t\2\2\u0289\u028a\7f\2\2\u028a\u028b\7c\2\2\u028b\u029f\7{\2\2\u028c"+
"\u028d\7U\2\2\u028d\u028e\7C\2\2\u028e\u028f\7V\2\2\u028f\u0290\7W\2\2"+
"\u0290\u0291\7T\2\2\u0291\u0292\7F\2\2\u0292\u0293\7C\2\2\u0293\u0294"+
"\7[\2\2\u0294\u029f\7U\2\2\u0295\u0296\7u\2\2\u0296\u0297\7c\2\2\u0297"+
"\u0298\7v\2\2\u0298\u0299\7w\2\2\u0299\u029a\7t\2\2\u029a\u029b\7f\2\2"+
"\u029b\u029c\7c\2\2\u029c\u029d\7{\2\2\u029d\u029f\7u\2\2\u029e\u027c"+
"\3\2\2\2\u029e\u0284\3\2\2\2\u029e\u028c\3\2\2\2\u029e\u0295\3\2\2\2\u029f"+
"\62\3\2\2\2\u02a0\u02a1\7U\2\2\u02a1\u02a2\7W\2\2\u02a2\u02a3\7P\2\2\u02a3"+
"\u02a4\7F\2\2\u02a4\u02a5\7C\2\2\u02a5\u02bb\7[\2\2\u02a6\u02a7\7u\2\2"+
"\u02a7\u02a8\7w\2\2\u02a8\u02a9\7p\2\2\u02a9\u02aa\7f\2\2\u02aa\u02ab"+
"\7c\2\2\u02ab\u02bb\7{\2\2\u02ac\u02ad\7U\2\2\u02ad\u02ae\7W\2\2\u02ae"+
"\u02af\7P\2\2\u02af\u02b0\7F\2\2\u02b0\u02b1\7C\2\2\u02b1\u02b2\7[\2\2"+
"\u02b2\u02bb\7U\2\2\u02b3\u02b4\7u\2\2\u02b4\u02b5\7w\2\2\u02b5\u02b6"+
"\7p\2\2\u02b6\u02b7\7f\2\2\u02b7\u02b8\7c\2\2\u02b8\u02b9\7{\2\2\u02b9"+
"\u02bb\7u\2\2\u02ba\u02a0\3\2\2\2\u02ba\u02a6\3\2\2\2\u02ba\u02ac\3\2"+
"\2\2\u02ba\u02b3\3\2\2\2\u02bb\64\3\2\2\2\u02bc\u02bd\7v\2\2\u02bd\u02be"+
"\7j\2\2\u02be\u02bf\7k\2\2\u02bf\u02c0\7u\2\2\u02c0\u02c1\7\"\2\2\u02c1"+
"\u02c2\7f\2\2\u02c2\u02c3\7c\2\2\u02c3\u02c4\7{\2\2\u02c4\u02c5\7\"\2"+
"\2\u02c5\u02c6\7q\2\2\u02c6\u02c7\7h\2\2\u02c7\u02c8\7\"\2\2\u02c8\u02c9"+
"\7y\2\2\u02c9\u02ca\7g\2\2\u02ca\u02cb\7g\2\2\u02cb\u0359\7m\2\2\u02cc"+
"\u02cd\7V\2\2\u02cd\u02ce\7J\2\2\u02ce\u02cf\7K\2\2\u02cf\u02d0\7U\2\2"+
"\u02d0\u02d1\7\"\2\2\u02d1\u02d2\7F\2\2\u02d2\u02d3\7C\2\2\u02d3\u02d4"+
"\7[\2\2\u02d4\u02d5\7\"\2\2\u02d5\u02d6\7Q\2\2\u02d6\u02d7\7H\2\2\u02d7"+
"\u02d8\7\"\2\2\u02d8\u02d9\7Y\2\2\u02d9\u02da\7G\2\2\u02da\u02db\7G\2"+
"\2\u02db\u0359\7M\2\2\u02dc\u02dd\7v\2\2\u02dd\u02de\7j\2\2\u02de\u02df"+
"\7k\2\2\u02df\u02e0\7u\2\2\u02e0\u02e1\7\"\2\2\u02e1\u02e2\7f\2\2\u02e2"+
"\u02e3\7c\2\2\u02e3\u02e4\7{\2\2\u02e4\u02e5\7\"\2\2\u02e5\u02e6\7q\2"+
"\2\u02e6\u02e7\7h\2\2\u02e7\u02e8\7\"\2\2\u02e8\u02e9\7v\2\2\u02e9\u02ea"+
"\7j\2\2\u02ea\u02eb\7g\2\2\u02eb\u02ec\7\"\2\2\u02ec\u02ed\7y\2\2\u02ed"+
"\u02ee\7g\2\2\u02ee\u02ef\7g\2\2\u02ef\u0359\7m\2\2\u02f0\u02f1\7V\2\2"+
"\u02f1\u02f2\7J\2\2\u02f2\u02f3\7K\2\2\u02f3\u02f4\7U\2\2\u02f4\u02f5"+
"\7\"\2\2\u02f5\u02f6\7F\2\2\u02f6\u02f7\7C\2\2\u02f7\u02f8\7[\2\2\u02f8"+
"\u02f9\7\"\2\2\u02f9\u02fa\7Q\2\2\u02fa\u02fb\7H\2\2\u02fb\u02fc\7\"\2"+
"\2\u02fc\u02fd\7V\2\2\u02fd\u02fe\7J\2\2\u02fe\u02ff\7G\2\2\u02ff\u0300"+
"\7\"\2\2\u0300\u0301\7Y\2\2\u0301\u0302\7G\2\2\u0302\u0303\7G\2\2\u0303"+
"\u0359\7M\2\2\u0304\u0305\7e\2\2\u0305\u0306\7w\2\2\u0306\u0307\7t\2\2"+
"\u0307\u0308\7t\2\2\u0308\u0309\7g\2\2\u0309\u030a\7p\2\2\u030a\u030b"+
"\7v\2\2\u030b\u030c\7\"\2\2\u030c\u030d\7f\2\2\u030d\u030e\7c\2\2\u030e"+
"\u030f\7{\2\2\u030f\u0310\7\"\2\2\u0310\u0311\7q\2\2\u0311\u0312\7h\2"+
"\2\u0312\u0313\7\"\2\2\u0313\u0314\7y\2\2\u0314\u0315\7g\2\2\u0315\u0316"+
"\7g\2\2\u0316\u0359\7m\2\2\u0317\u0318\7E\2\2\u0318\u0319\7W\2\2\u0319"+
"\u031a\7T\2\2\u031a\u031b\7T\2\2\u031b\u031c\7G\2\2\u031c\u031d\7P\2\2"+
"\u031d\u031e\7V\2\2\u031e\u031f\7\"\2\2\u031f\u0320\7F\2\2\u0320\u0321"+
"\7C\2\2\u0321\u0322\7[\2\2\u0322\u0323\7\"\2\2\u0323\u0324\7Q\2\2\u0324"+
"\u0325\7H\2\2\u0325\u0326\7\"\2\2\u0326\u0327\7Y\2\2\u0327\u0328\7G\2"+
"\2\u0328\u0329\7G\2\2\u0329\u0359\7M\2\2\u032a\u032b\7e\2\2\u032b\u032c"+
"\7w\2\2\u032c\u032d\7t\2\2\u032d\u032e\7t\2\2\u032e\u032f\7g\2\2\u032f"+
"\u0330\7p\2\2\u0330\u0331\7v\2\2\u0331\u0332\7\"\2\2\u0332\u0333\7f\2"+
"\2\u0333\u0334\7c\2\2\u0334\u0335\7{\2\2\u0335\u0336\7\"\2\2\u0336\u0337"+
"\7q\2\2\u0337\u0338\7h\2\2\u0338\u0339\7\"\2\2\u0339\u033a\7v\2\2\u033a"+
"\u033b\7j\2\2\u033b\u033c\7g\2\2\u033c\u033d\7\"\2\2\u033d\u033e\7y\2"+
"\2\u033e\u033f\7g\2\2\u033f\u0340\7g\2\2\u0340\u0359\7m\2\2\u0341\u0342"+
"\7E\2\2\u0342\u0343\7W\2\2\u0343\u0344\7T\2\2\u0344\u0345\7T\2\2\u0345"+
"\u0346\7G\2\2\u0346\u0347\7P\2\2\u0347\u0348\7V\2\2\u0348\u0349\7\"\2"+
"\2\u0349\u034a\7F\2\2\u034a\u034b\7C\2\2\u034b\u034c\7[\2\2\u034c\u034d"+
"\7\"\2\2\u034d\u034e\7Q\2\2\u034e\u034f\7H\2\2\u034f\u0350\7\"\2\2\u0350"+
"\u0351\7V\2\2\u0351\u0352\7J\2\2\u0352\u0353\7G\2\2\u0353\u0354\7\"\2"+
"\2\u0354\u0355\7Y\2\2\u0355\u0356\7G\2\2\u0356\u0357\7G\2\2\u0357\u0359"+
"\7M\2\2\u0358\u02bc\3\2\2\2\u0358\u02cc\3\2\2\2\u0358\u02dc\3\2\2\2\u0358"+
"\u02f0\3\2\2\2\u0358\u0304\3\2\2\2\u0358\u0317\3\2\2\2\u0358\u032a\3\2"+
"\2\2\u0358\u0341\3\2\2\2\u0359\66\3\2\2\2\u035a\u035b\7y\2\2\u035b\u035c"+
"\7g\2\2\u035c\u035d\7g\2\2\u035d\u035e\7m\2\2\u035e\u035f\7g\2\2\u035f"+
"\u0360\7p\2\2\u0360\u0379\7f\2\2\u0361\u0362\7Y\2\2\u0362\u0363\7G\2\2"+
"\u0363\u0364\7G\2\2\u0364\u0365\7M\2\2\u0365\u0366\7G\2\2\u0366\u0367"+
"\7P\2\2\u0367\u0379\7F\2\2\u0368\u0369\7y\2\2\u0369\u036a\7g\2\2\u036a"+
"\u036b\7g\2\2\u036b\u036c\7m\2\2\u036c\u036d\7g\2\2\u036d\u036e\7p\2\2"+
"\u036e\u036f\7f\2\2\u036f\u0379\7u\2\2\u0370\u0371\7Y\2\2\u0371\u0372"+
"\7G\2\2\u0372\u0373\7G\2\2\u0373\u0374\7M\2\2\u0374\u0375\7G\2\2\u0375"+
"\u0376\7P\2\2\u0376\u0377\7F\2\2\u0377\u0379\7U\2\2\u0378\u035a\3\2\2"+
"\2\u0378\u0361\3\2\2\2\u0378\u0368\3\2\2\2\u0378\u0370\3\2\2\2\u03798"+
"\3\2\2\2\u037a\u037b\7y\2\2\u037b\u037c\7g\2\2\u037c\u037d\7g\2\2\u037d"+
"\u037e\7m\2\2\u037e\u037f\7f\2\2\u037f\u0380\7c\2\2\u0380\u0399\7{\2\2"+
"\u0381\u0382\7Y\2\2\u0382\u0383\7G\2\2\u0383\u0384\7G\2\2\u0384\u0385"+
"\7M\2\2\u0385\u0386\7F\2\2\u0386\u0387\7C\2\2\u0387\u0399\7[\2\2\u0388"+
"\u0389\7y\2\2\u0389\u038a\7g\2\2\u038a\u038b\7g\2\2\u038b\u038c\7m\2\2"+
"\u038c\u038d\7f\2\2\u038d\u038e\7c\2\2\u038e\u038f\7{\2\2\u038f\u0399"+
"\7u\2\2\u0390\u0391\7Y\2\2\u0391\u0392\7G\2\2\u0392\u0393\7G\2\2\u0393"+
"\u0394\7M\2\2\u0394\u0395\7F\2\2\u0395\u0396\7C\2\2\u0396\u0397\7[\2\2"+
"\u0397\u0399\7U\2\2\u0398\u037a\3\2\2\2\u0398\u0381\3\2\2\2\u0398\u0388"+
"\3\2\2\2\u0398\u0390\3\2\2\2\u0399:\3\2\2\2\u039a\u039b\7j\2\2\u039b\u039c"+
"\7q\2\2\u039c\u039d\7n\2\2\u039d\u039e\7k\2\2\u039e\u039f\7f\2\2\u039f"+
"\u03a0\7c\2\2\u03a0\u03b9\7{\2\2\u03a1\u03a2\7J\2\2\u03a2\u03a3\7Q\2\2"+
"\u03a3\u03a4\7N\2\2\u03a4\u03a5\7K\2\2\u03a5\u03a6\7F\2\2\u03a6\u03a7"+
"\7C\2\2\u03a7\u03b9\7[\2\2\u03a8\u03a9\7j\2\2\u03a9\u03aa\7q\2\2\u03aa"+
"\u03ab\7n\2\2\u03ab\u03ac\7k\2\2\u03ac\u03ad\7f\2\2\u03ad\u03ae\7c\2\2"+
"\u03ae\u03af\7{\2\2\u03af\u03b9\7u\2\2\u03b0\u03b1\7J\2\2\u03b1\u03b2"+
"\7Q\2\2\u03b2\u03b3\7N\2\2\u03b3\u03b4\7K\2\2\u03b4\u03b5\7F\2\2\u03b5"+
"\u03b6\7C\2\2\u03b6\u03b7\7[\2\2\u03b7\u03b9\7U\2\2\u03b8\u039a\3\2\2"+
"\2\u03b8\u03a1\3\2\2\2\u03b8\u03a8\3\2\2\2\u03b8\u03b0\3\2\2\2\u03b9<"+
"\3\2\2\2\u03ba\u03bb\7f\2\2\u03bb\u03bc\7c\2\2\u03bc\u03bd\7v\2\2\u03bd"+
"\u03c3\7g\2\2\u03be\u03bf\7F\2\2\u03bf\u03c0\7C\2\2\u03c0\u03c1\7V\2\2"+
"\u03c1\u03c3\7G\2\2\u03c2\u03ba\3\2\2\2\u03c2\u03be\3\2\2\2\u03c3>\3\2"+
"\2\2\u03c4\u03c5\4\62;\2\u03c5@\3\2\2\2\u03c6\u03c7\4\63;\2\u03c7B\3\2"+
"\2\2\37\2T\u0086\u00b8\u00dc\u00fa\u010a\u0112\u0118\u0120\u012f\u0135"+
"\u013a\u016a\u0198\u01ba\u01d6\u01f2\u0212\u023a\u025e\u027a\u029e\u02ba"+
"\u0358\u0378\u0398\u03b8\u03c2\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.emitter.graphite;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import com.metamx.common.ISE;
import com.metamx.common.logger.Logger;
import com.metamx.emitter.service.ServiceMetricEvent;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
@JsonTypeName("whiteList")
public class WhiteListBasedConverter implements DruidToGraphiteEventConverter
{
private static final Logger LOGGER = new Logger(WhiteListBasedConverter.class);
/**
* @code whiteListDimsMapper is a white list of metric->dimensions mappings.
* Key is the metric name or the metric's prefix.
* Value is a list of metric's dimensions names.
* The order of the dimension name is important, it will be used to build the graphite metric path.
* For instance we have dimension type is nested under dimension dataSource -> prefix.dataSource.queryType.metricName
*/
private final ImmutableSortedMap<String, ImmutableSet<String>> whiteListDimsMapper;
@JsonProperty
private final boolean ignoreHostname;
@JsonProperty
private final boolean ignoreServiceName;
@JsonProperty
private final String namespacePrefix;
@JsonProperty
private final String mapPath;
private final ObjectMapper mapper;
@JsonCreator
public WhiteListBasedConverter(
@JsonProperty("namespacePrefix") String namespacePrefix,
@JsonProperty("ignoreHostname") Boolean ignoreHostname,
@JsonProperty("ignoreServiceName") Boolean ignoreServiceName,
@JsonProperty("mapPath") String mapPath,
@JacksonInject ObjectMapper mapper
)
{
this.mapper = Preconditions.checkNotNull(mapper);
this.mapPath = mapPath;
this.whiteListDimsMapper = readMap(this.mapPath);
this.ignoreHostname = ignoreHostname == null ? false : ignoreHostname;
this.ignoreServiceName = ignoreServiceName == null ? false : ignoreServiceName;
this.namespacePrefix = Preconditions.checkNotNull(namespacePrefix, "namespace prefix can not be null");
}
@JsonProperty
public boolean isIgnoreHostname()
{
return ignoreHostname;
}
@JsonProperty
public boolean isIgnoreServiceName()
{
return ignoreServiceName;
}
@JsonProperty
public String getNamespacePrefix()
{
return namespacePrefix;
}
public ImmutableSortedMap<String, ImmutableSet<String>> getWhiteListDimsMapper()
{
return whiteListDimsMapper;
}
/**
* @param event Event subject to filtering
*
* @return true if and only if the event prefix key is in the {@code whiteListDimsMapper}
*/
private boolean isInWhiteList(ServiceMetricEvent event)
{
return getPrefixKey(event.getMetric(), whiteListDimsMapper) != null;
}
/**
* @param key the metric name to lookup
* @param whiteList
*
* @return <tt>null</tt> if the key does not match with any of the prefixes keys in @code metricsWhiteList,
* or the prefix in @code whiteListDimsMapper
*/
private String getPrefixKey(String key, SortedMap<String, ?> whiteList)
{
String prefixKey = null;
if (whiteList.containsKey(key)) {
return key;
}
SortedMap<String, ?> headMap = whiteList.headMap(key);
if (!headMap.isEmpty() && key.startsWith(headMap.lastKey())) {
prefixKey = headMap.lastKey();
}
return prefixKey;
}
/**
* Returns a {@link List} of the white-listed dimension's values to send.
* The list is order is the same as the order of dimensions {@code whiteListDimsMapper}
*
* @param event the event for which will filter dimensions
*
* @return {@link List} of the filtered dimension values to send or <tt>null<tt/> if the event is not in the white list
*/
private List<String> getOrderedDimValues(ServiceMetricEvent event)
{
String prefixKey = getPrefixKey(event.getMetric(), whiteListDimsMapper);
if (prefixKey == null) {
return null;
}
ImmutableList.Builder<String> outputList = new ImmutableList.Builder();
Set<String> dimensions = whiteListDimsMapper.get(prefixKey);
if (dimensions == null) {
return Collections.emptyList();
}
for (String dimKey : dimensions) {
String dimValue = (String) event.getUserDims().get(dimKey);
if (dimValue != null) {
outputList.add(GraphiteEmitter.sanitize(dimValue));
}
}
return outputList.build();
}
/**
* @param serviceMetricEvent druid metric event to convert
*
* @return <tt>null</tt> if the event is not white listed, otherwise return {@link GraphiteEvent}
* <p>
* The metric path of the graphite event is:
* <namespacePrefix>.[<druid service name>].[<druid hostname>].<white-listed dimensions>.<metric>
* <p/>
* The order of the dimension is the order returned by {@code getOrderedDimValues()}
* Note that this path will be sanitized by replacing all the `.` or space by `_` {@link GraphiteEmitter#sanitize(String)}
* </p>
*/
@Override
public GraphiteEvent druidEventToGraphite(ServiceMetricEvent serviceMetricEvent)
{
if (!this.isInWhiteList(serviceMetricEvent)) {
return null;
}
final ImmutableList.Builder<String> metricPathBuilder = new ImmutableList.Builder<>();
metricPathBuilder.add(this.getNamespacePrefix());
if (!this.isIgnoreServiceName()) {
metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getService()));
}
if (!this.isIgnoreHostname()) {
metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getHost()));
}
metricPathBuilder.addAll(this.getOrderedDimValues(serviceMetricEvent));
metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric()));
final GraphiteEvent graphiteEvent = new GraphiteEvent(
Joiner.on(".").join(metricPathBuilder.build()),
String.valueOf(serviceMetricEvent.getValue()),
TimeUnit.MILLISECONDS.toSeconds(serviceMetricEvent.getCreatedTime().getMillis())
);
return graphiteEvent;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (!(o instanceof WhiteListBasedConverter)) {
return false;
}
WhiteListBasedConverter that = (WhiteListBasedConverter) o;
if (isIgnoreHostname() != that.isIgnoreHostname()) {
return false;
}
if (isIgnoreServiceName() != that.isIgnoreServiceName()) {
return false;
}
if (!getNamespacePrefix().equals(that.getNamespacePrefix())) {
return false;
}
return mapPath != null ? mapPath.equals(that.mapPath) : that.mapPath == null;
}
@Override
public int hashCode()
{
int result = (isIgnoreHostname() ? 1 : 0);
result = 31 * result + (isIgnoreServiceName() ? 1 : 0);
result = 31 * result + getNamespacePrefix().hashCode();
result = 31 * result + (mapPath != null ? mapPath.hashCode() : 0);
return result;
}
private ImmutableSortedMap<String, ImmutableSet<String>> readMap(final String mapPath)
{
String fileContent;
String actualPath = mapPath;
try {
if (Strings.isNullOrEmpty(mapPath)) {
actualPath = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json").getFile();
LOGGER.info("using default whiteList map located at [%s]", actualPath);
fileContent = CharStreams.toString(new InputStreamReader(this.getClass()
.getClassLoader()
.getResourceAsStream("defaultWhiteListMap.json")));
} else {
fileContent = Files.asCharSource(new File(mapPath), Charset.forName("UTF-8")).read();
}
return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableSet<String>>>()
{
}).readValue(fileContent);
}
catch (IOException e) {
throw new ISE(e, "Got an exception while parsing file [%s]", actualPath);
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateResourceServer" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateResourceServerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The user pool ID for the user pool.
* </p>
*/
private String userPoolId;
/**
* <p>
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the resource
* server is located. For example, <code>https://my-weather-api.example.com</code>.
* </p>
*/
private String identifier;
/**
* <p>
* A friendly name for the resource server.
* </p>
*/
private String name;
/**
* <p>
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* </p>
*/
private java.util.List<ResourceServerScopeType> scopes;
/**
* <p>
* The user pool ID for the user pool.
* </p>
*
* @param userPoolId
* The user pool ID for the user pool.
*/
public void setUserPoolId(String userPoolId) {
this.userPoolId = userPoolId;
}
/**
* <p>
* The user pool ID for the user pool.
* </p>
*
* @return The user pool ID for the user pool.
*/
public String getUserPoolId() {
return this.userPoolId;
}
/**
* <p>
* The user pool ID for the user pool.
* </p>
*
* @param userPoolId
* The user pool ID for the user pool.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateResourceServerRequest withUserPoolId(String userPoolId) {
setUserPoolId(userPoolId);
return this;
}
/**
* <p>
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the resource
* server is located. For example, <code>https://my-weather-api.example.com</code>.
* </p>
*
* @param identifier
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the
* resource server is located. For example, <code>https://my-weather-api.example.com</code>.
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* <p>
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the resource
* server is located. For example, <code>https://my-weather-api.example.com</code>.
* </p>
*
* @return A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the
* resource server is located. For example, <code>https://my-weather-api.example.com</code>.
*/
public String getIdentifier() {
return this.identifier;
}
/**
* <p>
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the resource
* server is located. For example, <code>https://my-weather-api.example.com</code>.
* </p>
*
* @param identifier
* A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the
* resource server is located. For example, <code>https://my-weather-api.example.com</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateResourceServerRequest withIdentifier(String identifier) {
setIdentifier(identifier);
return this;
}
/**
* <p>
* A friendly name for the resource server.
* </p>
*
* @param name
* A friendly name for the resource server.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* A friendly name for the resource server.
* </p>
*
* @return A friendly name for the resource server.
*/
public String getName() {
return this.name;
}
/**
* <p>
* A friendly name for the resource server.
* </p>
*
* @param name
* A friendly name for the resource server.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateResourceServerRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* </p>
*
* @return A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
*/
public java.util.List<ResourceServerScopeType> getScopes() {
return scopes;
}
/**
* <p>
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* </p>
*
* @param scopes
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
*/
public void setScopes(java.util.Collection<ResourceServerScopeType> scopes) {
if (scopes == null) {
this.scopes = null;
return;
}
this.scopes = new java.util.ArrayList<ResourceServerScopeType>(scopes);
}
/**
* <p>
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setScopes(java.util.Collection)} or {@link #withScopes(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param scopes
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateResourceServerRequest withScopes(ResourceServerScopeType... scopes) {
if (this.scopes == null) {
setScopes(new java.util.ArrayList<ResourceServerScopeType>(scopes.length));
}
for (ResourceServerScopeType ele : scopes) {
this.scopes.add(ele);
}
return this;
}
/**
* <p>
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* </p>
*
* @param scopes
* A list of scopes. Each scope is map, where the keys are <code>name</code> and <code>description</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateResourceServerRequest withScopes(java.util.Collection<ResourceServerScopeType> scopes) {
setScopes(scopes);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUserPoolId() != null)
sb.append("UserPoolId: ").append(getUserPoolId()).append(",");
if (getIdentifier() != null)
sb.append("Identifier: ").append(getIdentifier()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getScopes() != null)
sb.append("Scopes: ").append(getScopes());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateResourceServerRequest == false)
return false;
CreateResourceServerRequest other = (CreateResourceServerRequest) obj;
if (other.getUserPoolId() == null ^ this.getUserPoolId() == null)
return false;
if (other.getUserPoolId() != null && other.getUserPoolId().equals(this.getUserPoolId()) == false)
return false;
if (other.getIdentifier() == null ^ this.getIdentifier() == null)
return false;
if (other.getIdentifier() != null && other.getIdentifier().equals(this.getIdentifier()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getScopes() == null ^ this.getScopes() == null)
return false;
if (other.getScopes() != null && other.getScopes().equals(this.getScopes()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUserPoolId() == null) ? 0 : getUserPoolId().hashCode());
hashCode = prime * hashCode + ((getIdentifier() == null) ? 0 : getIdentifier().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getScopes() == null) ? 0 : getScopes().hashCode());
return hashCode;
}
@Override
public CreateResourceServerRequest clone() {
return (CreateResourceServerRequest) super.clone();
}
}
| |
package etri.sdn.controller.module.linkdiscovery;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.LinkedBlockingQueue;
import org.projectfloodlight.openflow.protocol.OFFactories;
import org.projectfloodlight.openflow.protocol.OFFactory;
import org.projectfloodlight.openflow.protocol.OFMessage;
import org.projectfloodlight.openflow.protocol.OFPacketIn;
import org.projectfloodlight.openflow.protocol.OFPacketOut;
import org.projectfloodlight.openflow.protocol.OFPortDesc;
import org.projectfloodlight.openflow.protocol.OFPortReason;
import org.projectfloodlight.openflow.protocol.OFPortState;
import org.projectfloodlight.openflow.protocol.OFPortStatus;
import org.projectfloodlight.openflow.protocol.OFType;
import org.projectfloodlight.openflow.protocol.OFVersion;
import org.projectfloodlight.openflow.protocol.action.OFAction;
import org.projectfloodlight.openflow.protocol.action.OFActionOutput;
import org.projectfloodlight.openflow.protocol.match.MatchField;
import org.projectfloodlight.openflow.types.OFBufferId;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.util.HexString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import etri.sdn.controller.IOFTask;
import etri.sdn.controller.IService;
import etri.sdn.controller.MessageContext;
import etri.sdn.controller.OFMFilter;
import etri.sdn.controller.OFModel;
import etri.sdn.controller.OFModule;
import etri.sdn.controller.module.linkdiscovery.ILinkDiscoveryListener.LDUpdate;
import etri.sdn.controller.module.linkdiscovery.ILinkDiscoveryListener.UpdateOperation;
import etri.sdn.controller.protocol.OFProtocol;
import etri.sdn.controller.protocol.io.Connection;
import etri.sdn.controller.protocol.io.IOFHandler.Role;
import etri.sdn.controller.protocol.io.IOFSwitch;
import etri.sdn.controller.protocol.io.IOFSwitch.SwitchType;
import etri.sdn.controller.protocol.packet.BSN;
import etri.sdn.controller.protocol.packet.Ethernet;
import etri.sdn.controller.protocol.packet.IPv4;
import etri.sdn.controller.protocol.packet.LLDP;
import etri.sdn.controller.protocol.packet.LLDPTLV;
/**
* Link Discovery Module.
* This module implements ILinkDiscoveryService.
* This service is used by {@link etri.sdn.controller.module.topologymanager.OFMTopologyManager}.
*
* @author bjlee
*
*/
public class OFMLinkDiscovery extends OFModule implements ILinkDiscoveryService {
private static final Logger logger = LoggerFactory.getLogger(OFMLinkDiscovery.class);
//
// LLDP and BDDP fields
//
/**
* LLDP Data Units (LLDPDUs) are sent to the destination MAC address 01:80:c2:00:00:0e.
* This address is defined as the "LLDP_Multicast" address.
* This address is defined within a range of addresses reserved by the IEEE for protocols
* that are to be constrained to an individual LAN.
* AN LLDPDU will not be forwarded by MAC bridges (e.g. switches) that conform to IEEE Std 802.1D-2004.
*/
private static final byte[] LLDP_STANDARD_DST_MAC_STRING =
HexString.fromHexString("01:80:c2:00:00:0e");
private static final long LINK_LOCAL_MASK = 0xfffffffffff0L;
private static final long LINK_LOCAL_VALUE = 0x0180c2000000L;
// BigSwitch OUI is 5C:16:C7, so 5D:16:C7 is the multicast version
// private static final String LLDP_BSN_DST_MAC_STRING = "5d:16:c7:00:00:01";
private static final String LLDP_BSN_DST_MAC_STRING = "ff:ff:ff:ff:ff:ff";
private static final byte TLV_DIRECTION_TYPE = 0x73;
private static final short TLV_DIRECTION_LENGTH = 1; // 1 byte
private static final byte TLV_DIRECTION_VALUE_FORWARD[] = {0x01};
private static final byte TLV_DIRECTION_VALUE_REVERSE[] = {0x02};
protected final int DISCOVERY_TASK_INTERVAL = 1; // 1 second.
protected final int LLDP_TO_ALL_INTERVAL = 7; //15 seconds.
protected long lldpClock = 0;
/**
* LLDP frequency for known links.
* This value is intentionally kept higher than LLDP_TO_ALL_INTERVAL.
* If we want to identify link failures faster, we could decrease this
* value to a small number, say 1 or 2 sec.
*/
protected final int LLDP_TO_KNOWN_INTERVAL = 20;
/**
* Quarantine Task. 100ms.
*/
protected final int BDDP_TASK_INTERVAL = 100;
/**
* Quarantine Task. # of ports per iteration.
*/
protected final int BDDP_TASK_SIZE = 5;
private static final LLDPTLV forwardTLV
= new LLDPTLV().
setType((byte)TLV_DIRECTION_TYPE).
setLength((short)TLV_DIRECTION_LENGTH).
setValue(TLV_DIRECTION_VALUE_FORWARD);
private static final LLDPTLV reverseTLV
= new LLDPTLV().
setType((byte)TLV_DIRECTION_TYPE).
setLength((short)TLV_DIRECTION_LENGTH).
setValue(TLV_DIRECTION_VALUE_REVERSE);
protected LLDPTLV controllerTLV;
/**
* A list of ports that are quarantined for discovering links through
* them. Data traffic from these ports are not allowed until the ports
* are released from quarantine.
*/
protected LinkedBlockingQueue<NodePortTuple> quarantineQueue = new LinkedBlockingQueue<NodePortTuple>();
protected LinkedBlockingQueue<NodePortTuple> maintenanceQueue = new LinkedBlockingQueue<NodePortTuple>();
protected BlockingQueue<LDUpdate> updates = new LinkedBlockingQueue<LDUpdate>();
/**
* topology aware components are called in the order they were added to the
* the array
*/
private ArrayList<ILinkDiscoveryListener> linkDiscoveryAware = new ArrayList<ILinkDiscoveryListener>();
/**
* structure that holds a disconnected switches temporarily.
*/
private class Disconnection implements Comparable<Disconnection> {
private long timestamp;
private long switchId;
private Set<OFPort> ports;
public Disconnection(IOFSwitch sw) {
this.timestamp = Calendar.getInstance().getTimeInMillis();
this.switchId = sw.getId();
this.ports = new HashSet<OFPort>();
// for ( OFPhysicalPort p : sw.getPorts() ) {
for ( OFPortDesc p : protocol.getPortInformations(sw) ) {
this.ports.add(p.getPortNo());
}
}
public long getSwitchId() {
return this.switchId;
}
public long getTimestamp() {
return this.timestamp;
}
public Set<OFPort> getPorts() {
return this.ports;
}
@Override
public int compareTo(Disconnection o) {
if ( this.switchId < o.switchId ) return -1;
else if ( this.switchId > o.switchId ) return 1;
else return 0;
}
}
/**
* This is a set to hold information about disconnected switches and their ports temporarily.
*/
private Set<Disconnection> disconnections = new ConcurrentSkipListSet<Disconnection>();
/**
* Model of this module. initialized within initialize()
*/
private Links links = null;
private OFProtocol protocol;
/**
* Constructor that does nothing.
*/
public OFMLinkDiscovery() {
// does nothing.
}
protected OFPort getInputPort(OFPacketIn pi) {
if ( pi == null ) {
throw new AssertionError("pi cannot refer null");
}
try {
return pi.getInPort();
} catch ( UnsupportedOperationException e ) {
return pi.getMatch().get(MatchField.IN_PORT);
}
}
@Override
protected Collection<Class<? extends IService>> services() {
List<Class<? extends IService>> ret = new LinkedList<Class<? extends IService>>();
ret.add(ILinkDiscoveryService.class);
return ret;
}
/**
* Initialize this module to receive
*
* <ol>
* <li> PACKET_IN messages with LLDP packets (Ethertype 0x88cc)
* <li> PORT_STATUS messages
* </ol>
*
* Periodic tasks are also initiated.
*
*/
@Override
public void initialize() {
// initialize Links object with proper manager reference.
this.links = new Links(this);
this.protocol = getController().getProtocol();
// initialize controller TLV
setControllerTLV();
// I will receive PACKET_IN messages selectively.
registerFilter(
OFType.PACKET_IN,
new OFMFilter() {
@Override
public boolean filter(OFMessage m) {
// we process all PACKET_IN regardless of its version.
OFPacketIn pi = (OFPacketIn) m;
byte[] packet = pi.getData();
if ( packet == null || packet.length < 14 ) {
// this packet is not mine!
return false;
}
// this checks if the Packet-In is for LLDP!
// This is very important to guarantee maximum performance. (-_-;)
if ( packet[12] != (byte)0x88 || packet[13] != (byte)0xcc ) {
// this packet is not mine!
return false;
}
return true;
}
}
);
// I will receive all PORT_STATUS messages.
registerFilter(
OFType.PORT_STATUS,
new OFMFilter() {
@Override
public boolean filter(OFMessage m) {
// we process all PORT_STATUS messages regardless of their verison.
return true;
}
}
);
// register discovery task
initiatePeriodicDiscovery();
initiatePeriodicQuarantineWorker();
initiatePeriodicTopologyUpdate();
}
/**
* This method is called by Links object to pass link update event to its listeners.
* called by the following methods:
*
* (1) {@link Links#addOrUpdateLink(Link, LinkInfo)}
* (2) {@link Links#timeoutLinks()}
* (3) {@link Links#updatePortStatus(Long, int, OFPortStatus)}
*
* @param lt Link object
* @param info LinkInfo object
*/
public void addLinkUpdate(Link lt, LinkInfo info) {
UpdateOperation operation = getUpdateOperation(info.getSrcPortState(),
info.getDstPortState());
updates.add(new LDUpdate(lt.getSrc(), lt.getSrcPort(),
lt.getDst(), lt.getDstPort(),
getLinkType(lt, info),
operation));
}
/**
* This method is called by Links object to pass link update event to its listeners.
* @param lt Link object
* @param info LinkInfo object
* @param operation UpdateOperation value
*/
public void addLinkUpdate(Link lt, LinkInfo info, UpdateOperation operation) {
updates.add(new LDUpdate(lt.getSrc(), lt.getSrcPort(),
lt.getDst(), lt.getDstPort(),
getLinkType(lt, info),
operation));
}
/**
* Add a new LDUpdate object to the {@link #updates} queue.
*
* @param switchId identifier of the switch
* @param portNum number of the port
* @param op update operation. see {@link ILinkDiscoveryService}
*/
public void addLinkUpdate(long switchId, OFPort portNum, UpdateOperation op) {
updates.add(new LDUpdate(switchId, portNum, op));
}
/**
* Add a new LDUpdate object to the {@link #updates} queue.
*
* @param switchId identifier of the switch
* @param portNum port number
* @param status status of the link. with this value, UpdateOperation is calculated.
*/
public void addLinkUpdate(long switchId, OFPort portNum, Set<OFPortState> status) {
addLinkUpdate(switchId, portNum, getUpdateOperation(status));
}
/**
* This method is called by Links object to make manager to send link discovery messages to peers.
* internally call {@link #sendDiscoveryMessage(IOFSwitch, int, boolean, boolean)}.
*
* @param switchId ask this switch to send discovery message
* @param destinationPort to which port to send the discovery message
* @param isStandard LLDP(true) or BDDP(false)
* @param isReverse reverse(true) or non-reverse(false)
*/
public void sendDiscoveryMessage(long switchId, OFPort destinationPort, boolean isStandard, boolean isReverse) {
// this internally calls private member function (relay).
sendDiscoveryMessage(this.controller.getSwitch(switchId), destinationPort, isStandard, isReverse);
}
/**
* register listeners to the {@link ILinkDiscoveryService}.
*/
@Override
public void addListener(ILinkDiscoveryListener listener) {
linkDiscoveryAware.add( listener );
}
/**
* This is an initialization routine for {@link #controllerTLV}.
*/
private void setControllerTLV() {
//Setting the controllerTLVValue based on current nano time,
//controller's IP address, and the network interface object hash
//the corresponding IP address.
final int prime = 7867;
InetAddress localIPAddress = null;
NetworkInterface localInterface = null;
byte[] controllerTLVValue = new byte[] {0, 0, 0, 0, 0, 0, 0, 0}; // 8 byte value.
ByteBuffer bb = ByteBuffer.allocate(10);
try{
localIPAddress = java.net.InetAddress.getLocalHost();
localInterface = NetworkInterface.getByInetAddress(localIPAddress);
} catch (Exception e) {
e.printStackTrace();
}
long result = System.nanoTime();
if (localIPAddress != null)
result = result * prime + IPv4.toIPv4Address(localIPAddress.getHostAddress());
if (localInterface != null)
result = result * prime + localInterface.hashCode();
// set the first 4 bits to 0.
result = result & (0x0fffffffffffffffL);
bb.putLong(result);
bb.rewind();
bb.get(controllerTLVValue, 0, 8);
// type is set to 0x0c (12) which means the TLV is created by the controller.
this.controllerTLV = new LLDPTLV().setType((byte) 0x0c).setLength((short) controllerTLVValue.length).setValue(controllerTLVValue);
}
private void initiatePeriodicDiscovery() {
// registered task will be re-executed every DISCOVERY_TASK_INTERVAL seconds.
this.controller.scheduleTask(
new IOFTask() {
public boolean execute() {
Role role = controller.getRole();
if ( role == null || role == Role.MASTER || role == Role.EQUAL ) {
cleanDisconnectedSwitches();
discoverLinks();
}
return true;
}
},
0,
DISCOVERY_TASK_INTERVAL * 1000 /* milliseconds */);
}
/**
* This function removes link information on ports from the disconnected switches.
*/
private void cleanDisconnectedSwitches() {
Set<Disconnection> to_remove = new HashSet<Disconnection>();
for ( Disconnection dcn : this.disconnections ) {
if ( Calendar.getInstance().getTimeInMillis() - dcn.getTimestamp() > 10000 /* 10 seconds */ ) {
to_remove.add(dcn);
long id = dcn.getSwitchId();
for ( OFPort port : dcn.getPorts() ) {
this.links.deleteLinksOnPort(new NodePortTuple(id, port));
}
}
}
this.disconnections.removeAll(to_remove);
}
private void initiatePeriodicQuarantineWorker() {
this.controller.scheduleTask(
new IOFTask() {
/**
* This method processes the quarantine list in bursts. The task is
* at most once per BDDP_TASK_INTERVAL.
* One each call, BDDP_TASK_SIZE number of switch ports are processed.
* Once the BDDP packets are sent out through the switch ports, the ports
* are removed from the quarantine list.
*/
public boolean execute() {
int count = 0;
Set<NodePortTuple> nptList = new HashSet<NodePortTuple>();
while(count < BDDP_TASK_SIZE && quarantineQueue.peek() !=null) {
NodePortTuple npt;
npt = quarantineQueue.remove();
sendDiscoveryMessage(controller.getSwitch(npt.getNodeId()), npt.getPortId(), false, false);
nptList.add(npt);
count++;
}
count = 0;
while (count < BDDP_TASK_SIZE && maintenanceQueue.peek() != null) {
NodePortTuple npt;
npt = maintenanceQueue.remove();
sendDiscoveryMessage(controller.getSwitch(npt.getNodeId()), npt.getPortId(), false, false);
count++;
}
for(NodePortTuple npt:nptList) {
generateSwitchPortStatusUpdate(controller.getSwitch(npt.getNodeId()), npt.getPortId());
}
return true;
}
},
0,
BDDP_TASK_INTERVAL /* milliseconds */);
}
private void generateSwitchPortStatusUpdate(IOFSwitch sw, OFPort port) {
UpdateOperation operation;
if (sw == null) return;
OFPortDesc ofp = protocol.getPortInformation(sw, port);
if (ofp == null) return;
Set<OFPortState> state = ofp.getState();
boolean portUp = (state.contains(OFPortState.STP_BLOCK) ||
state.contains(OFPortState.LINK_DOWN) ||
state.contains(OFPortState.BLOCKED))? false : true;
if (portUp) operation = UpdateOperation.PORT_UP;
else operation = UpdateOperation.PORT_DOWN;
updates.add(new LDUpdate(sw.getId(), port, operation));
}
private void initiatePeriodicTopologyUpdate() {
this.controller.scheduleTask(
new IOFTask() {
public boolean execute() {
do {
LDUpdate update = null;
try {
update = updates.remove();
} catch ( NoSuchElementException e ) {
// no element in the queue.
return true;
}
try {
for (ILinkDiscoveryListener lda : linkDiscoveryAware) { // order maintained
lda.linkDiscoveryUpdate(update);
}
}
catch (Exception e) {
logger.error("Error in link discovery updates loop: {}", e);
}
} while ( true );
}
},
0,
300 /* milliseconds */);
}
/**
* Add a switch port to the quarantine queue. Schedule the
* quarantine task if the quarantine queue was empty before adding
* this switch port.
* @param npt
*/
@SuppressWarnings("unused")
private void addToQuarantineQueue(NodePortTuple npt) {
if (quarantineQueue.contains(npt) == false)
quarantineQueue.add(npt);
}
/**
* Remove a switch port from the quarantine queue.
*/
private void removeFromQuarantineQueue(NodePortTuple npt) {
// Remove all occurrences of the node port tuple from the list.
while (quarantineQueue.remove(npt));
}
/**
* Add a switch port to maintenance queue.
*
* @param npt {@link NodePortTuple} object.
*/
private void addToMaintenanceQueue(NodePortTuple npt) {
// TODO We are not checking if the switch port tuple is already
// in the maintenance list or not. This will be an issue for
// really large number of switch ports in the network.
if (maintenanceQueue.contains(npt) == false)
maintenanceQueue.add(npt);
}
/**
* Remove a switch port from maintenance queue.
*
* @param npt {@link NodePortTuple} object.
*/
private void removeFromMaintenanceQueue(NodePortTuple npt) {
// Remove all occurrences of the node port tuple from the queue.
while (maintenanceQueue.remove(npt));
}
@Override
public ILinkDiscovery.LinkType getLinkType(Link lt, LinkInfo info) {
if ( lt == null || info == null ) {
return ILinkDiscovery.LinkType.INVALID_LINK;
}
if (info.getUnicastValidTime() != null) {
return ILinkDiscovery.LinkType.DIRECT_LINK;
} else if (info.getMulticastValidTime() != null) {
return ILinkDiscovery.LinkType.MULTIHOP_LINK;
}
return ILinkDiscovery.LinkType.INVALID_LINK;
}
private void discoverLinks() {
// timeout known links.
this.links.timeoutLinks();
//increment LLDP clock
lldpClock = (lldpClock + 1)% LLDP_TO_ALL_INTERVAL;
if (lldpClock == 0) {
discoverOnAllPorts();
// Logger.stdout(this.toString());
}
}
private UpdateOperation getUpdateOperation(Set<OFPortState> srcPortState) {
boolean srcPortUp =
!srcPortState.contains(OFPortState.STP_BLOCK) &&
!srcPortState.contains(OFPortState.LINK_DOWN) &&
!srcPortState.contains(OFPortState.BLOCKED);
if (srcPortUp) return UpdateOperation.PORT_UP;
else return UpdateOperation.PORT_DOWN;
}
private UpdateOperation getUpdateOperation(Set<OFPortState> srcPortState, Set<OFPortState> dstPortState) {
boolean srcPortUp =
!srcPortState.contains(OFPortState.STP_BLOCK) &&
!srcPortState.contains(OFPortState.LINK_DOWN) &&
!srcPortState.contains(OFPortState.BLOCKED);
boolean dstPortUp =
!dstPortState.contains(OFPortState.STP_BLOCK) &&
!dstPortState.contains(OFPortState.LINK_DOWN) &&
!dstPortState.contains(OFPortState.BLOCKED);
boolean added = srcPortUp && dstPortUp;
if (added) return UpdateOperation.LINK_UPDATED;
return UpdateOperation.LINK_REMOVED;
}
/**
* Send LLDPs to all switch-ports
*/
private void discoverOnAllPorts() {
for ( IOFSwitch sw : controller.getSwitches() ) {
if ( sw == null || !sw.isConnected() ) continue;
Collection<OFPortDesc> pports = protocol.getEnabledPorts(sw);
if ( pports != null ) {
for ( OFPortDesc ofp: pports ) {
sendDiscoveryMessage(sw, ofp.getPortNo(), true, false);
// If the switch port is not already in the maintenance
// queue, add it.
NodePortTuple npt = new NodePortTuple(sw.getId(), ofp.getPortNo());
addToMaintenanceQueue(npt);
}
}
}
}
/**
* Send link discovery message out of a given switch port.
* The discovery message may be a standard LLDP or a modified
* LLDP, where the dst mac address is set to :ff.
*
* TODO: The modified LLDP will updated in the future and may use a different eth-type.
*
* @param sw
* @param port
* @param isStandard indicates standard or modified LLDP
* @param isReverse indicates whether the LLDP was sent as a response
* @return return false value if we cannot send discovery message to output stream to switch
*/
private boolean sendDiscoveryMessage(IOFSwitch sw, OFPort port, boolean isStandard, boolean isReverse) {
if (sw == null) {
return true;
}
if ( port == OFPort.CONTROLLER || port == OFPort.LOCAL )
return true;
OFPortDesc ofpPort = protocol.getPortInformation(sw, port);
if (ofpPort == null) {
logger.error("sw: {}, port: {} is null", sw.getId(), port.getPortNumber());
return true;
}
if (ofpPort.getHwAddr() == null) {
logger.error("switch {} might be already removed", sw.getId());
}
// using "nearest customer bridge" MAC address for broadest possible propagation
// through provider and TPMR bridges (see IEEE 802.1AB-2009 and 802.1Q-2011),
// in particular the Linux bridge which behaves mostly like a provider bridge
byte[] chassisId = new byte[] {4, 0, 0, 0, 0, 0, 0}; // filled in later
byte[] portId = new byte[] {2, 0, 0}; // filled in later
byte[] ttlValue = new byte[] {0, 0x78};
// OpenFlow OUI - 00-26-E1
byte[] dpidTLVValue = new byte[] {0x0, 0x26, (byte) 0xe1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
LLDPTLV dpidTLV = new LLDPTLV().setType((byte) 127).setLength((short) dpidTLVValue.length).setValue(dpidTLVValue);
byte[] dpidArray = new byte[8];
ByteBuffer dpidBB = ByteBuffer.wrap(dpidArray);
ByteBuffer portBB = ByteBuffer.wrap(portId, 1, 2);
Long dpid = sw.getId();
dpidBB.putLong(dpid);
// set the ethernet source mac to last 6 bytes of dpid
// System.arraycopy(dpidArray, 2, ofpPort.getHardwareAddress(), 0, 6);
System.arraycopy(dpidArray, 2, ofpPort.getHwAddr().getBytes(), 0, 6);
// set the chassis id's value to last 6 bytes of dpid
System.arraycopy(dpidArray, 2, chassisId, 1, 6);
// set the optional tlv to the full dpid
System.arraycopy(dpidArray, 0, dpidTLVValue, 4, 8);
// set the portId to the outgoing port
portBB.putShort( port.getShortPortNumber() );
LLDP lldp = new LLDP();
lldp.setChassisId(new LLDPTLV().setType((byte) 1).setLength((short) chassisId.length).setValue(chassisId));
lldp.setPortId(new LLDPTLV().setType((byte) 2).setLength((short) portId.length).setValue(portId));
lldp.setTtl(new LLDPTLV().setType((byte) 3).setLength((short) ttlValue.length).setValue(ttlValue));
lldp.getOptionalTLVList().add(dpidTLV);
// Add the controller identifier to the TLV value.
lldp.getOptionalTLVList().add(controllerTLV);
if (isReverse) {
lldp.getOptionalTLVList().add(reverseTLV);
}else {
lldp.getOptionalTLVList().add(forwardTLV);
}
Ethernet ethernet;
if (isStandard) {
ethernet = new Ethernet()
.setSourceMACAddress(ofpPort.getHwAddr().getBytes())
.setDestinationMACAddress(LLDP_STANDARD_DST_MAC_STRING)
.setEtherType(Ethernet.TYPE_LLDP);
ethernet.setPayload(lldp);
} else {
BSN bsn = new BSN(BSN.BSN_TYPE_BDDP);
bsn.setPayload(lldp);
ethernet = new Ethernet()
.setSourceMACAddress(ofpPort.getHwAddr().getBytes())
.setDestinationMACAddress(LLDP_BSN_DST_MAC_STRING)
.setEtherType(Ethernet.TYPE_BSN);
ethernet.setPayload(bsn);
}
// serialize and wrap in a packet out
byte[] data = ethernet.serialize();
OFFactory fac = OFFactories.getFactory(sw.getVersion());
OFPacketOut.Builder po = fac.buildPacketOut();
po.setBufferId(OFBufferId.NO_BUFFER);
if ( sw.getVersion() == OFVersion.OF_10 )
po.setInPort(OFPort.ANY /*Openflow 1.0 calls this 'None'*/);
else
po.setInPort(OFPort.CONTROLLER); // packet-out is created by the controller.
List<OFAction> actions = new ArrayList<OFAction>();
OFActionOutput.Builder action_output = fac.actions().buildOutput();
actions.add( action_output.setPort(port).setMaxLen(0).build());
po
.setData(data)
.setActions(actions);
return sw.getConnection().write(po.build());
}
@Override
protected boolean handleHandshakedEvent(Connection conn, MessageContext context) {
IOFSwitch sw = conn.getSwitch();
if ( sw == null) {
logger.error("CRITICAL: switch is null for connection");
return false;
}
if ( protocol.getEnabledPorts(sw) != null ) {
for ( OFPort p : protocol.getEnabledPortNumbers(sw) ) {
processNewPort(sw, p);
}
}
Set<Disconnection> re_connected = new HashSet<Disconnection>();
for( Disconnection d : this.disconnections ) {
if ( d.getSwitchId() == conn.getSwitch().getId() ) {
re_connected.add(d);
}
}
this.disconnections.removeAll(re_connected);
LDUpdate update = new LDUpdate(sw.getId(), SwitchType.BASIC_SWITCH, UpdateOperation.SWITCH_UPDATED);
updates.add(update);
return true;
}
@Override
protected boolean handleDisconnect(Connection conn) {
try {
this.disconnections.add(new Disconnection(conn.getSwitch()));
} catch ( Exception e ) {
// this connection is cut before the FEATURE_REPLY is exchanged.
// thus, we do not need to add to the set.
}
return true;
}
@Override
protected boolean handleMessage(Connection conn, MessageContext context, OFMessage msg, List<OFMessage> outgoing) {
switch (msg.getType()) {
case PACKET_IN:
return this.handlePacketIn(conn.getSwitch(), context, (OFPacketIn) msg, outgoing);
case PORT_STATUS:
return this.handlePortStatus(conn.getSwitch(), context, (OFPortStatus) msg, outgoing);
default:
break;
}
return true;
}
/**
* This method handles a PACKET_IN message as follows:
*
* <ol>
* <li> First, decapsulate the Ethernet part of the PACKET_IN payload.
* <li> if the ETHERTYPE of the header is BSN and its payload is LLDP,
* that means a BDDP packet (non-standard bigswitch-specific LLDP) is
* received. If not, just return true to pass this PACKET_IN to other module.
* The BDDP packet is handled by calling {@link #handleLldp(LLDP, IOFSwitch, OFPacketIn, boolean, List)}.
* <li> If the ETHERTYPE of the header is LLDP, then handle the packet
* by calling {@link #handleLldp(LLDP, IOFSwitch, OFPacketIn, boolean, List)}.
* <li> If the ETHERTYPE of the header is smaller than (<) 1500 and
* destMac & LINK_LOCAL_MASK) == LINK_LOCAL_VALUE, then we just return false
* to suppress the further processing of the PACKET_IN msg.
* </ol>
*
* However, in the current OFMLinkDiscovery Implementation,
* As {@link #initialize()} has been coded only to accept the standard LLDP
* packets with Ethertype 0x88cc, there are other cases that never be executed.
*
* @param sw IOFSwitch object
* @param context MessageContext object
* @param pi packet-in message
* @param outgoing messages to send to switch in result
* @return true if correctly handled
*/
private boolean handlePacketIn(IOFSwitch sw, MessageContext context, OFPacketIn pi, List<OFMessage> outgoing) {
Ethernet eth = (Ethernet) context.get(MessageContext.ETHER_PAYLOAD);
if ( eth == null ) {
// parse Ethernet header and put into the context
eth = new Ethernet();
eth.deserialize(pi.getData(), 0, pi.getData().length);
context.put(MessageContext.ETHER_PAYLOAD, eth);
}
if(eth.getEtherType() == Ethernet.TYPE_BSN) {
BSN bsn = (BSN) eth.getPayload();
if (bsn == null) return false;
if (bsn.getPayload() == null) return false;
// It could be a packet other than BSN LLDP, therefore
// continue with the regular processing.
if (bsn.getPayload() instanceof LLDP == false) {
// this packet-in is not for me
return true;
}
return handleLldp((LLDP) bsn.getPayload(), sw, pi, false, outgoing);
} else if (eth.getEtherType() == Ethernet.TYPE_LLDP) {
return handleLldp((LLDP) eth.getPayload(), sw, pi, true, outgoing);
} else if (eth.getEtherType() < 1500) {
long destMac = eth.getDestinationMAC().toLong();
if ((destMac & LINK_LOCAL_MASK) == LINK_LOCAL_VALUE){
return false;
}
}
// If packet-in is from a quarantine port, stop processing.
NodePortTuple npt = new NodePortTuple(sw.getId(), pi.getInPort());
if (quarantineQueue.contains(npt)) {
return false;
}
// this packet-in is not for me
return true;
}
/**
* From the LLDP packet received, this method first extract following information:
*
* <ol>
* <li> remote switch information
* <li> controller id (if this message is created by a controller)
* <li> direction of the message
* </ol>
*
* The business logic of this method are as follows:
*
* <ol>
* <li> if the message is a standard LLDP which is not created by this controller, then drop it.
* <li> if the message is a non-standard LLDP which is not created by this controller, then return true
* to allow the further processing. (maybe forwarding?)
* <li> if the remote switch or one of the sides of the link port is not enabled, then drop it.
* <li> or, call {@link Links#addOrUpdateLink(long, short, OFPhysicalPort, long, int, OFPhysicalPort, boolean, boolean)},
* remove the node & port pair (both side) from the Quarantine and Maintenance Queue and drop the message.
* </ol>
*
* @param lldp LLDP packet to process
* @param sw the switch that the lldp packet is received
* @param pi OFPacketIn message itself
* @param isStandard true(standard LLDP), or false
* @param outgoing list of OFMessage objects to be delivered to switches after this method ends the execution
* @return true to further process the message, or false.
*/
private boolean handleLldp(LLDP lldp, IOFSwitch sw, OFPacketIn pi, boolean isStandard, List<OFMessage> outgoing) {
// If LLDP is suppressed on this port, ignore received packet as well
if (sw == null) {
return false;
}
// If this is a malformed LLDP, or not from us, exit
if (lldp.getPortId() == null || lldp.getPortId().getLength() != 3) {
return true;
}
long myId = ByteBuffer.wrap(controllerTLV.getValue()).getLong();
long otherId = 0;
boolean myLLDP = false;
Boolean isReverse = null;
ByteBuffer portBB = ByteBuffer.wrap(lldp.getPortId().getValue());
portBB.position(1);
OFPort remotePort = OFPort.of(portBB.getShort());
IOFSwitch remoteSwitch = null;
// Verify this LLDP packet matches what we're looking for
for (LLDPTLV lldptlv : lldp.getOptionalTLVList()) {
// '127' means 'Organizationally specific TLV'.
if (lldptlv.getType() == 127 && lldptlv.getLength() == 12 &&
lldptlv.getValue()[0] == 0x0 && lldptlv.getValue()[1] == 0x26 &&
lldptlv.getValue()[2] == (byte)0xe1 && lldptlv.getValue()[3] == 0x0) {
ByteBuffer dpidBB = ByteBuffer.wrap(lldptlv.getValue());
remoteSwitch = this.controller.getSwitch(dpidBB.getLong(4));
}
// if type is set to 12 (0x0c), it means the TLV is created by the controller
// (controllerTLV).
else if (lldptlv.getType() == 12 && lldptlv.getLength() == 8){
otherId = ByteBuffer.wrap(lldptlv.getValue()).getLong();
if (myId == otherId)
myLLDP = true;
}
else if (lldptlv.getType() == TLV_DIRECTION_TYPE &&
lldptlv.getLength() == TLV_DIRECTION_LENGTH) {
if (lldptlv.getValue()[0] == TLV_DIRECTION_VALUE_FORWARD[0])
isReverse = false;
else if (lldptlv.getValue()[0] == TLV_DIRECTION_VALUE_REVERSE[0])
isReverse = true;
}
}
if (myLLDP == false) {
// This is not the LLDP sent by this controller.
// If the LLDP message has multicast bit set, then we need to broadcast
// the packet as a regular packet.
if (isStandard) {
// if the packet is not standard LLDP (also not created by the controller)
// then we suppress the further processing of this packet.
return false;
}
else if (myId < otherId) {
// if this LLDP is created by the other controller
// and myId value is smaller than the ID of the controller,
// we allow the further processing of this LLDP.
return true;
}
}
if (remoteSwitch == null || remoteSwitch == sw ) {
// process no further
return false;
}
if ( !protocol.portEnabled(remoteSwitch, remotePort) ) {
// process no further
return false;
}
OFPort inputPort = getInputPort(pi);
if ( !protocol.portEnabled(sw, inputPort) ) {
// process no further
return false;
}
Link lt = this.links.addOrUpdateLink(
remoteSwitch.getId(), // remote switch id
remotePort, // remote port number
protocol.getPortInformation(remoteSwitch, remotePort), // remote physical port
sw.getId(), // local switch id
inputPort, // remote port number
protocol.getPortInformation(sw, inputPort), // local physical port
isStandard,
isReverse
);
// Remove the node ports from the quarantine and maintenance queues.
NodePortTuple nptSrc = new NodePortTuple(lt.getSrc(), lt.getSrcPort());
NodePortTuple nptDst = new NodePortTuple(lt.getDst(), lt.getDstPort());
removeFromQuarantineQueue(nptSrc);
removeFromMaintenanceQueue(nptSrc);
removeFromQuarantineQueue(nptDst);
removeFromMaintenanceQueue(nptDst);
// Consume this message
return false;
}
/**
* Handle PORT_STATUS message.
*
* <ol>
* <li> if the PORT_STATUS message shows that a link has been deleted or
* modified where the port is down or configured down,
* we mark the link has been deleted by calling
* {@link Links#deleteLinksOnPort(NodePortTuple)}.
* <li> or if the PORT_STATUS indicates that the status of a link has been changed,
* we mark the link has been changed by calling
* {@link Links#updatePortStatus(Long, int, OFPortStatus)}.
* <li> and finally, if a link has NOT been deleted,
* we process the PORT_STATUS as an indication that a new port
* has been added, by calling {@link #processNewPort(IOFSwitch, int)}.
* </ol>
*
* @param sw IOFSwitch object
* @param context MesasgeContext object
* @param ps OFPortStatus object
* @param outgoing messages to send to switch in result
* @return true if correctly handled, false otherwise
*/
private boolean handlePortStatus(IOFSwitch sw, MessageContext context, OFPortStatus ps, List<OFMessage> outgoing) {
if (sw == null) {
return false;
}
logger.debug("OFPortStatus received by OFMLinkDiscovery ={}", ps);
/*
* following is the original implementation by Floodlight.
*/
OFPort portnum = ps.getDesc().getPortNo();
NodePortTuple npt = null;
try {
npt = new NodePortTuple(sw.getId(), portnum);
} catch ( RuntimeException e ) {
// Features reply has not yet been set.
return false;
}
boolean linkDeleted = false;
boolean linkInfoChanged = false;
// if ps is a delete, or a modify where the port is down or
// configured down
if ( ps.getReason() == OFPortReason.DELETE ||
(ps.getReason() == OFPortReason.MODIFY &&
!protocol.portEnabled(ps.getDesc()))) {
// Reason: Port Status Changed
this.links.deleteLinksOnPort( npt );
linkDeleted = true;
}
else if (ps.getReason() == OFPortReason.MODIFY) {
// If ps is a port modification and the port state has changed
// that affects links in the topology
linkInfoChanged = links.updatePortStatus(sw.getId(), portnum, ps);
}
if (!linkDeleted && !linkInfoChanged){
// does nothing
}
if (!linkDeleted) {
// Send LLDP right away when port state is changed for faster
// cluster-merge. If it is a link delete then there is not need
// to send the LLDPs right away and instead we wait for the LLDPs
// to be sent on the timer as it is normally done
// do it outside the write-lock
// sendLLDPTask.reschedule(1000, TimeUnit.MILLISECONDS);
processNewPort(sw, portnum);
}
return true;
}
/**
* Process a new port.
* (Send LLDP message. Add the port to quarantine)
*
* @param sw
* @param portnum
*/
private void processNewPort(IOFSwitch sw, OFPort portnum) {
if ( sw != null ) {
sendDiscoveryMessage(sw, portnum, true, false);
// Add to maintenance queue to ensure that BDDP packets
// are sent out.
addToMaintenanceQueue( new NodePortTuple(sw.getId(), portnum) );
}
}
public String toString() {
return this.links.getStringRepresentation();
}
@Override
public List<PrettyLink> getPrettyLinks() {
Map<Link, LinkInfo> links = this.links.getLinks();
List<PrettyLink> list = new LinkedList<PrettyLink>();
for ( Link l : links.keySet() ){
list.add( new PrettyLink(l));
}
return list;
}
@Override
public List<PrettyLink> getSwitchLinks(Long switchId) {
Map<Link, LinkInfo> links = this.links.getLinks();
List<PrettyLink> list = new LinkedList<PrettyLink>();
for ( Link l : links.keySet() ){
if (l.getSrc() == switchId) {
list.add( new PrettyLink(l));
}
}
return list;
}
@Override
public PrettyLink getOutLink(Long switchId, int outPort) {
List<PrettyLink> links = getSwitchLinks(switchId);
for (PrettyLink l : links) {
if (l.getSrcPort().getPortNumber() == outPort) {
return l;
}
}
return null;
}
@Override
public Set<NodePortTuple> getSuppressLLDPsInfo() {
// TODO Auto-generated method stub
return null;
}
@Override
public void AddToSuppressLLDPs(long sw, int port) {
// TODO Auto-generated method stub
}
@Override
public void RemoveFromSuppressLLDPs(long sw, int port) {
// TODO Auto-generated method stub
}
@Override
public Set<Integer> getQuarantinedPorts(long sw) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isAutoPortFastFeature() {
// TODO Auto-generated method stub
return false;
}
@Override
public void setAutoPortFastFeature(boolean autoPortFastFeature) {
// TODO Auto-generated method stub
}
@Override
public OFModel[] getModels() {
return new OFModel[] { this.links };
}
}
| |
package org.docksidestage.sqlite.dbflute.readonly.bsbhv;
import java.util.List;
import org.dbflute.*;
import org.dbflute.bhv.*;
import org.dbflute.bhv.readable.*;
import org.dbflute.bhv.referrer.*;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.HpSLSFunction;
import org.dbflute.cbean.result.*;
import org.dbflute.exception.*;
import org.dbflute.optional.OptionalEntity;
import org.dbflute.outsidesql.executor.*;
import org.docksidestage.sqlite.dbflute.readonly.exbhv.*;
import org.docksidestage.sqlite.dbflute.readonly.bsbhv.loader.*;
import org.docksidestage.sqlite.dbflute.readonly.exentity.*;
import org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.*;
import org.docksidestage.sqlite.dbflute.readonly.cbean.*;
/**
* The behavior of MEMBER as TABLE. <br>
* <pre>
* [primary key]
* MEMBER_ID
*
* [column]
* MEMBER_ID, MEMBER_NAME, MEMBER_ACCOUNT, MEMBER_STATUS_CODE, FORMALIZED_DATETIME, BIRTHDATE, MEMBER_REGISTER_DATETIME, MEMBER_REGISTER_USER, MEMBER_REGISTER_PROCESS, MEMBER_UPDATE_DATETIME, MEMBER_UPDATE_USER, MEMBER_UPDATE_PROCESS, VERSION_NO
*
* [sequence]
*
*
* [identity]
* MEMBER_ID
*
* [version-no]
* VERSION_NO
*
* [foreign table]
* MEMBER_STATUS, MEMBER_ADDRESS(AsValid), MEMBER_SECURITY(AsOne), MEMBER_WITHDRAWAL(AsOne)
*
* [referrer table]
* MEMBER_ADDRESS, MEMBER_LOGIN, MEMBER_SERVICE, PURCHASE, MEMBER_SECURITY, MEMBER_WITHDRAWAL
*
* [foreign property]
* memberStatus, memberAddressAsValid, memberSecurityAsOne, memberWithdrawalAsOne
*
* [referrer property]
* memberAddressList, memberLoginList, memberServiceList, purchaseList
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class RoyBsMemberBhv extends AbstractBehaviorReadable<RoyMember, RoyMemberCB> {
// ===================================================================================
// Definition
// ==========
/*df:beginQueryPath*/
/*df:endQueryPath*/
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public RoyMemberDbm asDBMeta() { return RoyMemberDbm.getInstance(); }
/** {@inheritDoc} */
public String asTableDbName() { return "MEMBER"; }
// ===================================================================================
// New Instance
// ============
/** {@inheritDoc} */
public RoyMemberCB newConditionBean() { return new RoyMemberCB(); }
// ===================================================================================
// Count Select
// ============
/**
* Select the count of uniquely-selected records by the condition-bean. {IgnorePagingCondition, IgnoreSpecifyColumn}<br>
* SpecifyColumn is ignored but you can use it only to remove text type column for union's distinct.
* <pre>
* <span style="color: #70226C">int</span> count = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectCount</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @return The count for the condition. (NotMinus)
*/
public int selectCount(CBCall<RoyMemberCB> cbLambda) {
return facadeSelectCount(createCB(cbLambda));
}
/**
* Select the count of uniquely-selected records by the condition-bean. {IgnorePagingCondition, IgnoreSpecifyColumn}<br>
* SpecifyColumn is ignored but you can use it only to remove text type column for union's distinct.
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().setFoo...(value);
* <span style="color: #70226C">int</span> count = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectCount</span>(cb);
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @return The count for the condition. (NotMinus)
*/
public int selectCount(RoyMemberCB cb) {
return facadeSelectCount(cb);
}
// ===================================================================================
// Entity Select
// =============
/**
* Select the entity by the condition-bean. <br>
* It returns not-null optional entity, so you should ... <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, alwaysPresent().</span> <br>
* <span style="color: #AD4747; font-size: 120%">If it might be no data, isPresent() and orElse(), ...</span>
* <pre>
* <span style="color: #3F7E5E">// if the data always exists as your business rule</span>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }).<span style="color: #CC4747">alwaysPresent</span>(<span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present, or exception</span>
* ... = <span style="color: #553000">member</span>.get...
* });
*
* <span style="color: #3F7E5E">// if it might be no data, ...</span>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }).<span style="color: #CC4747">ifPresent</span>(<span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present</span>
* ... = <span style="color: #553000">member</span>.get...
* }).<span style="color: #994747">orElse</span>(() <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if not present</span>
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @return The optional entity selected by the condition. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMember> selectEntity(CBCall<RoyMemberCB> cbLambda) {
return facadeSelectEntity(createCB(cbLambda));
}
/**
* Select the entity by the condition-bean. <br>
* It returns not-null optional entity, so you should ... <br>
* <span style="color: #AD4747; font-size: 120%">If the data always exists as your business rule, alwaysPresent().</span> <br>
* <span style="color: #AD4747; font-size: 120%">If it might be no data, get() after check by isPresent() or orElse(), ...</span>
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().set...
*
* <span style="color: #3F7E5E">// if the data always exists as your business rule</span>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #DD4747">selectEntity</span>(cb)}).<span style="color: #CC4747">alwaysPresent</span>(member <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present, or exception</span>
* ... = member.get...
* });
*
* <span style="color: #3F7E5E">// if it might be no data, ...</span>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectEntity</span>(cb).<span style="color: #CC4747">ifPresent</span>(member <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present</span>
* ... = member.get...
* }).<span style="color: #994747">orElse</span>(() <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if not present</span>
* });
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @return The optional entity selected by the condition. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMember> selectEntity(RoyMemberCB cb) {
return facadeSelectEntity(cb);
}
protected OptionalEntity<RoyMember> facadeSelectEntity(RoyMemberCB cb) {
return doSelectOptionalEntity(cb, typeOfSelectedEntity());
}
protected <ENTITY extends RoyMember> OptionalEntity<ENTITY> doSelectOptionalEntity(RoyMemberCB cb, Class<? extends ENTITY> tp) {
return createOptionalEntity(doSelectEntity(cb, tp), cb);
}
protected Entity doReadEntity(ConditionBean cb) { return facadeSelectEntity(downcast(cb)).orElse(null); }
/**
* Select the entity by the condition-bean with deleted check. <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, this method is good.</span>
* <pre>
* RoyMember <span style="color: #553000">member</span> = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectEntityWithDeletedCheck</span>(cb <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> cb.acceptPK(1));
* ... = <span style="color: #553000">member</span>.get...(); <span style="color: #3F7E5E">// the entity always be not null</span>
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @return The entity selected by the condition. (NotNull: if no data, throws exception)
* @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found)
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public RoyMember selectEntityWithDeletedCheck(CBCall<RoyMemberCB> cbLambda) {
return facadeSelectEntityWithDeletedCheck(createCB(cbLambda));
}
/**
* Select the entity by the condition-bean with deleted check. <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, this method is good.</span>
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().set...;
* RoyMember member = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectEntityWithDeletedCheck</span>(cb);
* ... = member.get...(); <span style="color: #3F7E5E">// the entity always be not null</span>
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @return The entity selected by the condition. (NotNull: if no data, throws exception)
* @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found)
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public RoyMember selectEntityWithDeletedCheck(RoyMemberCB cb) {
return facadeSelectEntityWithDeletedCheck(cb);
}
/**
* Select the entity by the primary-key value.
* @param memberId : PK, ID, NotNull, INTEGER(2000000000, 10), FK to MEMBER_ADDRESS. (NotNull)
* @return The optional entity selected by the PK. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMember> selectByPK(Integer memberId) {
return facadeSelectByPK(memberId);
}
protected OptionalEntity<RoyMember> facadeSelectByPK(Integer memberId) {
return doSelectOptionalByPK(memberId, typeOfSelectedEntity());
}
protected <ENTITY extends RoyMember> ENTITY doSelectByPK(Integer memberId, Class<? extends ENTITY> tp) {
return doSelectEntity(xprepareCBAsPK(memberId), tp);
}
protected <ENTITY extends RoyMember> OptionalEntity<ENTITY> doSelectOptionalByPK(Integer memberId, Class<? extends ENTITY> tp) {
return createOptionalEntity(doSelectByPK(memberId, tp), memberId);
}
protected RoyMemberCB xprepareCBAsPK(Integer memberId) {
assertObjectNotNull("memberId", memberId);
return newConditionBean().acceptPK(memberId);
}
/**
* Select the entity by the unique-key value.
* @param memberAccount : UQ, NotNull, TEXT(2000000000, 10). (NotNull)
* @return The optional entity selected by the unique key. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMember> selectByUniqueOf(String memberAccount) {
return facadeSelectByUniqueOf(memberAccount);
}
protected OptionalEntity<RoyMember> facadeSelectByUniqueOf(String memberAccount) {
return doSelectByUniqueOf(memberAccount, typeOfSelectedEntity());
}
protected <ENTITY extends RoyMember> OptionalEntity<ENTITY> doSelectByUniqueOf(String memberAccount, Class<? extends ENTITY> tp) {
return createOptionalEntity(doSelectEntity(xprepareCBAsUniqueOf(memberAccount), tp), memberAccount);
}
protected RoyMemberCB xprepareCBAsUniqueOf(String memberAccount) {
assertObjectNotNull("memberAccount", memberAccount);
return newConditionBean().acceptUniqueOf(memberAccount);
}
// ===================================================================================
// List Select
// ===========
/**
* Select the list as result bean.
* <pre>
* ListResultBean<RoyMember> <span style="color: #553000">memberList</span> = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectList</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...;
* <span style="color: #553000">cb</span>.query().addOrderBy...;
* });
* <span style="color: #70226C">for</span> (RoyMember <span style="color: #553000">member</span> : <span style="color: #553000">memberList</span>) {
* ... = <span style="color: #553000">member</span>.get...;
* }
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @return The result bean of selected list. (NotNull: if no data, returns empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public ListResultBean<RoyMember> selectList(CBCall<RoyMemberCB> cbLambda) {
return facadeSelectList(createCB(cbLambda));
}
/**
* Select the list as result bean.
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().set...;
* cb.query().addOrderBy...;
* ListResultBean<RoyMember> <span style="color: #553000">memberList</span> = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectList</span>(cb);
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">memberList</span>) {
* ... = member.get...;
* }
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @return The result bean of selected list. (NotNull: if no data, returns empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public ListResultBean<RoyMember> selectList(RoyMemberCB cb) {
return facadeSelectList(cb);
}
@Override
protected boolean isEntityDerivedMappable() { return true; }
// ===================================================================================
// Page Select
// ===========
/**
* Select the page as result bean. <br>
* (both count-select and paging-select are executed)
* <pre>
* PagingResultBean<RoyMember> <span style="color: #553000">page</span> = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectPage</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* <span style="color: #553000">cb</span>.query().addOrderBy...
* <span style="color: #553000">cb</span>.<span style="color: #CC4747">paging</span>(20, 3); <span style="color: #3F7E5E">// 20 records per a page and current page number is 3</span>
* });
* <span style="color: #70226C">int</span> allRecordCount = <span style="color: #553000">page</span>.getAllRecordCount();
* <span style="color: #70226C">int</span> allPageCount = <span style="color: #553000">page</span>.getAllPageCount();
* <span style="color: #70226C">boolean</span> isExistPrePage = <span style="color: #553000">page</span>.isExistPrePage();
* <span style="color: #70226C">boolean</span> isExistNextPage = <span style="color: #553000">page</span>.isExistNextPage();
* ...
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">page</span>) {
* ... = member.get...;
* }
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @return The result bean of selected page. (NotNull: if no data, returns bean as empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public PagingResultBean<RoyMember> selectPage(CBCall<RoyMemberCB> cbLambda) {
return facadeSelectPage(createCB(cbLambda));
}
/**
* Select the page as result bean. <br>
* (both count-select and paging-select are executed)
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().setFoo...(value);
* cb.query().addOrderBy_Bar...();
* cb.<span style="color: #CC4747">paging</span>(20, 3); <span style="color: #3F7E5E">// 20 records per a page and current page number is 3</span>
* PagingResultBean<RoyMember> <span style="color: #553000">page</span> = <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectPage</span>(cb);
* <span style="color: #70226C">int</span> allRecordCount = <span style="color: #553000">page</span>.getAllRecordCount();
* <span style="color: #70226C">int</span> allPageCount = <span style="color: #553000">page</span>.getAllPageCount();
* <span style="color: #70226C">boolean</span> isExistPrePage = <span style="color: #553000">page</span>.isExistPrePage();
* <span style="color: #70226C">boolean</span> isExistNextPage = <span style="color: #553000">page</span>.isExistNextPage();
* ...
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">page</span>) {
* ... = member.get...();
* }
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @return The result bean of selected page. (NotNull: if no data, returns bean as empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public PagingResultBean<RoyMember> selectPage(RoyMemberCB cb) {
return facadeSelectPage(cb);
}
// ===================================================================================
// Cursor Select
// =============
/**
* Select the cursor by the condition-bean.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectCursor</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }, <span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* ... = <span style="color: #553000">member</span>.getMemberName();
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMember. (NotNull)
* @param entityLambda The handler of entity row of RoyMember. (NotNull)
*/
public void selectCursor(CBCall<RoyMemberCB> cbLambda, EntityRowHandler<RoyMember> entityLambda) {
facadeSelectCursor(createCB(cbLambda), entityLambda);
}
/**
* Select the cursor by the condition-bean.
* <pre>
* RoyMemberCB cb = <span style="color: #70226C">new</span> RoyMemberCB();
* cb.query().set...
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectCursor</span>(cb, <span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* ... = <span style="color: #553000">member</span>.getMemberName();
* });
* </pre>
* @param cb The condition-bean of RoyMember. (NotNull)
* @param entityRowHandler The handler of entity row of RoyMember. (NotNull)
*/
public void selectCursor(RoyMemberCB cb, EntityRowHandler<RoyMember> entityRowHandler) {
facadeSelectCursor(cb, entityRowHandler);
}
// ===================================================================================
// Scalar Select
// =============
/**
* Select the scalar value derived by a function from uniquely-selected records. <br>
* You should call a function method after this method called like as follows:
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">selectScalar</span>(Date.class).max(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.specify().<span style="color: #CC4747">column...</span>; <span style="color: #3F7E5E">// required for the function</span>
* <span style="color: #553000">cb</span>.query().set...
* });
* </pre>
* @param <RESULT> The type of result.
* @param resultType The type of result. (NotNull)
* @return The scalar function object to specify function for scalar value. (NotNull)
*/
public <RESULT> HpSLSFunction<RoyMemberCB, RESULT> selectScalar(Class<RESULT> resultType) {
return facadeScalarSelect(resultType);
}
// ===================================================================================
// Sequence
// ========
@Override
protected Number doReadNextVal() {
String msg = "This table is NOT related to sequence: " + asTableDbName();
throw new UnsupportedOperationException(msg);
}
// ===================================================================================
// Load Referrer
// =============
/**
* Load referrer for the list by the referrer loader.
* <pre>
* List<Member> <span style="color: #553000">memberList</span> = <span style="color: #0000C0">memberBhv</span>.selectList(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* });
* memberBhv.<span style="color: #CC4747">load</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -> {</span>
* <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span>
* <span style="color: #3F7E5E">//});</span>
*
* <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span>
* <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span>
* <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span>
* });
* <span style="color: #70226C">for</span> (Member member : <span style="color: #553000">memberList</span>) {
* List<Purchase> purchaseList = member.<span style="color: #CC4747">getPurchaseList()</span>;
* <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) {
* ...
* }
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has order by FK before callback.
* @param memberList The entity list of member. (NotNull)
* @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull)
*/
public void load(List<RoyMember> memberList, ReferrerLoaderHandler<RoyLoaderOfMember> loaderLambda) {
xassLRArg(memberList, loaderLambda);
loaderLambda.handle(new RoyLoaderOfMember().ready(memberList, _behaviorSelector));
}
/**
* Load referrer for the entity by the referrer loader.
* <pre>
* Member <span style="color: #553000">member</span> = <span style="color: #0000C0">memberBhv</span>.selectEntityWithDeletedCheck(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> <span style="color: #553000">cb</span>.acceptPK(1));
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">load</span>(<span style="color: #553000">member</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -> {</span>
* <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span>
* <span style="color: #3F7E5E">//});</span>
*
* <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span>
* <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span>
* <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span>
* });
* List<Purchase> purchaseList = <span style="color: #553000">member</span>.<span style="color: #CC4747">getPurchaseList()</span>;
* <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) {
* ...
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has order by FK before callback.
* @param member The entity of member. (NotNull)
* @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull)
*/
public void load(RoyMember member, ReferrerLoaderHandler<RoyLoaderOfMember> loaderLambda) {
xassLRArg(member, loaderLambda);
loaderLambda.handle(new RoyLoaderOfMember().ready(xnewLRAryLs(member), _behaviorSelector));
}
/**
* Load referrer of memberAddressList by the set-upper of referrer. <br>
* MEMBER_ADDRESS by MEMBER_ID, named 'memberAddressList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberAddress</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">addressCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">addressCB</span>.setupSelect...
* <span style="color: #553000">addressCB</span>.query().set...
* <span style="color: #553000">addressCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">memberList</span>) {
* ... = member.<span style="color: #CC4747">getMemberAddressList()</span>;
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param memberList The entity list of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberAddress> loadMemberAddress(List<RoyMember> memberList, ReferrerConditionSetupper<RoyMemberAddressCB> refCBLambda) {
xassLRArg(memberList, refCBLambda);
return doLoadMemberAddress(memberList, new LoadReferrerOption<RoyMemberAddressCB, RoyMemberAddress>().xinit(refCBLambda));
}
/**
* Load referrer of memberAddressList by the set-upper of referrer. <br>
* MEMBER_ADDRESS by MEMBER_ID, named 'memberAddressList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberAddress</span>(<span style="color: #553000">member</span>, <span style="color: #553000">addressCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">addressCB</span>.setupSelect...
* <span style="color: #553000">addressCB</span>.query().set...
* <span style="color: #553000">addressCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* ... = <span style="color: #553000">member</span>.<span style="color: #CC4747">getMemberAddressList()</span>;
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param member The entity of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberAddress> loadMemberAddress(RoyMember member, ReferrerConditionSetupper<RoyMemberAddressCB> refCBLambda) {
xassLRArg(member, refCBLambda);
return doLoadMemberAddress(xnewLRLs(member), new LoadReferrerOption<RoyMemberAddressCB, RoyMemberAddress>().xinit(refCBLambda));
}
protected NestedReferrerListGateway<RoyMemberAddress> doLoadMemberAddress(List<RoyMember> memberList, LoadReferrerOption<RoyMemberAddressCB, RoyMemberAddress> option) {
return helpLoadReferrerInternally(memberList, option, "memberAddressList");
}
/**
* Load referrer of memberLoginList by the set-upper of referrer. <br>
* MEMBER_LOGIN by MEMBER_ID, named 'memberLoginList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberLogin</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">loginCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">loginCB</span>.setupSelect...
* <span style="color: #553000">loginCB</span>.query().set...
* <span style="color: #553000">loginCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">memberList</span>) {
* ... = member.<span style="color: #CC4747">getMemberLoginList()</span>;
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param memberList The entity list of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberLogin> loadMemberLogin(List<RoyMember> memberList, ReferrerConditionSetupper<RoyMemberLoginCB> refCBLambda) {
xassLRArg(memberList, refCBLambda);
return doLoadMemberLogin(memberList, new LoadReferrerOption<RoyMemberLoginCB, RoyMemberLogin>().xinit(refCBLambda));
}
/**
* Load referrer of memberLoginList by the set-upper of referrer. <br>
* MEMBER_LOGIN by MEMBER_ID, named 'memberLoginList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberLogin</span>(<span style="color: #553000">member</span>, <span style="color: #553000">loginCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">loginCB</span>.setupSelect...
* <span style="color: #553000">loginCB</span>.query().set...
* <span style="color: #553000">loginCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* ... = <span style="color: #553000">member</span>.<span style="color: #CC4747">getMemberLoginList()</span>;
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param member The entity of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberLogin> loadMemberLogin(RoyMember member, ReferrerConditionSetupper<RoyMemberLoginCB> refCBLambda) {
xassLRArg(member, refCBLambda);
return doLoadMemberLogin(xnewLRLs(member), new LoadReferrerOption<RoyMemberLoginCB, RoyMemberLogin>().xinit(refCBLambda));
}
protected NestedReferrerListGateway<RoyMemberLogin> doLoadMemberLogin(List<RoyMember> memberList, LoadReferrerOption<RoyMemberLoginCB, RoyMemberLogin> option) {
return helpLoadReferrerInternally(memberList, option, "memberLoginList");
}
/**
* Load referrer of memberServiceList by the set-upper of referrer. <br>
* MEMBER_SERVICE by MEMBER_ID, named 'memberServiceList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberService</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">serviceCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">serviceCB</span>.setupSelect...
* <span style="color: #553000">serviceCB</span>.query().set...
* <span style="color: #553000">serviceCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">memberList</span>) {
* ... = member.<span style="color: #CC4747">getMemberServiceList()</span>;
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param memberList The entity list of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberService> loadMemberService(List<RoyMember> memberList, ReferrerConditionSetupper<RoyMemberServiceCB> refCBLambda) {
xassLRArg(memberList, refCBLambda);
return doLoadMemberService(memberList, new LoadReferrerOption<RoyMemberServiceCB, RoyMemberService>().xinit(refCBLambda));
}
/**
* Load referrer of memberServiceList by the set-upper of referrer. <br>
* MEMBER_SERVICE by MEMBER_ID, named 'memberServiceList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadMemberService</span>(<span style="color: #553000">member</span>, <span style="color: #553000">serviceCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">serviceCB</span>.setupSelect...
* <span style="color: #553000">serviceCB</span>.query().set...
* <span style="color: #553000">serviceCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* ... = <span style="color: #553000">member</span>.<span style="color: #CC4747">getMemberServiceList()</span>;
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param member The entity of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyMemberService> loadMemberService(RoyMember member, ReferrerConditionSetupper<RoyMemberServiceCB> refCBLambda) {
xassLRArg(member, refCBLambda);
return doLoadMemberService(xnewLRLs(member), new LoadReferrerOption<RoyMemberServiceCB, RoyMemberService>().xinit(refCBLambda));
}
protected NestedReferrerListGateway<RoyMemberService> doLoadMemberService(List<RoyMember> memberList, LoadReferrerOption<RoyMemberServiceCB, RoyMemberService> option) {
return helpLoadReferrerInternally(memberList, option, "memberServiceList");
}
/**
* Load referrer of purchaseList by the set-upper of referrer. <br>
* PURCHASE by MEMBER_ID, named 'purchaseList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* <span style="color: #70226C">for</span> (RoyMember member : <span style="color: #553000">memberList</span>) {
* ... = member.<span style="color: #CC4747">getPurchaseList()</span>;
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param memberList The entity list of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyPurchase> loadPurchase(List<RoyMember> memberList, ReferrerConditionSetupper<RoyPurchaseCB> refCBLambda) {
xassLRArg(memberList, refCBLambda);
return doLoadPurchase(memberList, new LoadReferrerOption<RoyPurchaseCB, RoyPurchase>().xinit(refCBLambda));
}
/**
* Load referrer of purchaseList by the set-upper of referrer. <br>
* PURCHASE by MEMBER_ID, named 'purchaseList'.
* <pre>
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">member</span>, <span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(referrerList -> {</span>
* <span style="color: #3F7E5E">// ...</span>
* <span style="color: #3F7E5E">//});</span>
* ... = <span style="color: #553000">member</span>.<span style="color: #CC4747">getPurchaseList()</span>;
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has settings before callback as follows:
* <pre>
* cb.query().setMemberId_InScope(pkList);
* cb.query().addOrderBy_MemberId_Asc();
* </pre>
* @param member The entity of member. (NotNull)
* @param refCBLambda The callback to set up referrer condition-bean for loading referrer. (NotNull)
* @return The callback interface which you can load nested referrer by calling withNestedReferrer(). (NotNull)
*/
public NestedReferrerListGateway<RoyPurchase> loadPurchase(RoyMember member, ReferrerConditionSetupper<RoyPurchaseCB> refCBLambda) {
xassLRArg(member, refCBLambda);
return doLoadPurchase(xnewLRLs(member), new LoadReferrerOption<RoyPurchaseCB, RoyPurchase>().xinit(refCBLambda));
}
protected NestedReferrerListGateway<RoyPurchase> doLoadPurchase(List<RoyMember> memberList, LoadReferrerOption<RoyPurchaseCB, RoyPurchase> option) {
return helpLoadReferrerInternally(memberList, option, "purchaseList");
}
// ===================================================================================
// Pull out Relation
// =================
/**
* Pull out the list of foreign table 'RoyMemberStatus'.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of foreign table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyMemberStatus> pulloutMemberStatus(List<RoyMember> memberList)
{ return helpPulloutInternally(memberList, "memberStatus"); }
/**
* Pull out the list of foreign table 'RoyMemberAddress'.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of foreign table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyMemberAddress> pulloutMemberAddressAsValid(List<RoyMember> memberList)
{ return helpPulloutInternally(memberList, "memberAddressAsValid"); }
/**
* Pull out the list of referrer-as-one table 'RoyMemberSecurity'.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of referrer-as-one table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyMemberSecurity> pulloutMemberSecurityAsOne(List<RoyMember> memberList)
{ return helpPulloutInternally(memberList, "memberSecurityAsOne"); }
/**
* Pull out the list of referrer-as-one table 'RoyMemberWithdrawal'.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of referrer-as-one table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyMemberWithdrawal> pulloutMemberWithdrawalAsOne(List<RoyMember> memberList)
{ return helpPulloutInternally(memberList, "memberWithdrawalAsOne"); }
// ===================================================================================
// Extract Column
// ==============
/**
* Extract the value list of (single) primary key memberId.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of the column value. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<Integer> extractMemberIdList(List<RoyMember> memberList)
{ return helpExtractListInternally(memberList, "memberId"); }
/**
* Extract the value list of (single) unique key memberAccount.
* @param memberList The list of member. (NotNull, EmptyAllowed)
* @return The list of the column value. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<String> extractMemberAccountList(List<RoyMember> memberList)
{ return helpExtractListInternally(memberList, "memberAccount"); }
// ===================================================================================
// OutsideSql
// ==========
/**
* Prepare the all facade executor of outside-SQL to execute it.
* <pre>
* <span style="color: #3F7E5E">// main style</span>
* memberBhv.outideSql().selectEntity(pmb); <span style="color: #3F7E5E">// optional</span>
* memberBhv.outideSql().selectList(pmb); <span style="color: #3F7E5E">// ListResultBean</span>
* memberBhv.outideSql().selectPage(pmb); <span style="color: #3F7E5E">// PagingResultBean</span>
* memberBhv.outideSql().selectPagedListOnly(pmb); <span style="color: #3F7E5E">// ListResultBean</span>
* memberBhv.outideSql().selectCursor(pmb, handler); <span style="color: #3F7E5E">// (by handler)</span>
* memberBhv.outideSql().execute(pmb); <span style="color: #3F7E5E">// int (updated count)</span>
* memberBhv.outideSql().call(pmb); <span style="color: #3F7E5E">// void (pmb has OUT parameters)</span>
*
* <span style="color: #3F7E5E">// traditional style</span>
* memberBhv.outideSql().traditionalStyle().selectEntity(path, pmb, entityType);
* memberBhv.outideSql().traditionalStyle().selectList(path, pmb, entityType);
* memberBhv.outideSql().traditionalStyle().selectPage(path, pmb, entityType);
* memberBhv.outideSql().traditionalStyle().selectPagedListOnly(path, pmb, entityType);
* memberBhv.outideSql().traditionalStyle().selectCursor(path, pmb, handler);
* memberBhv.outideSql().traditionalStyle().execute(path, pmb);
*
* <span style="color: #3F7E5E">// options</span>
* memberBhv.outideSql().removeBlockComment().selectList()
* memberBhv.outideSql().removeLineComment().selectList()
* memberBhv.outideSql().formatSql().selectList()
* </pre>
* <p>The invoker of behavior command should be not null when you call this method.</p>
* @return The new-created all facade executor of outside-SQL. (NotNull)
*/
public OutsideSqlAllFacadeExecutor<RoyMemberBhv> outsideSql() {
return doOutsideSql();
}
// ===================================================================================
// Optimistic Lock Info
// ====================
@Override
protected boolean hasVersionNoValue(Entity et) { return downcast(et).getVersionNo() != null; }
@Override
protected boolean hasUpdateDateValue(Entity et) { return downcast(et).getMemberUpdateDatetime() != null; }
// ===================================================================================
// Type Helper
// ===========
protected Class<? extends RoyMember> typeOfSelectedEntity() { return RoyMember.class; }
protected Class<RoyMember> typeOfHandlingEntity() { return RoyMember.class; }
protected Class<RoyMemberCB> typeOfHandlingConditionBean() { return RoyMemberCB.class; }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Package
///////////////
package com.hp.hpl.jena.graph.compose;
// Imports
///////////////
import com.hp.hpl.jena.graph.*;
import com.hp.hpl.jena.graph.impl.WrappedBulkUpdateHandler;
import com.hp.hpl.jena.shared.*;
import com.hp.hpl.jena.util.iterator.*;
import java.util.*;
/**
* <p>
* A base class for composition graphs that are composed from zero or more
* sub-graphs (thus providing a basis for polyadic composition operators).
* A distinguished graph is the designated graph for additions to the union.
* By default, this is the first sub-graph of the composition, however any
* of the graphs in the composition can be nominated to be the distinguished
* graph.
* </p>
*
* @author Ian Dickinson, HP Labs
* (<a href="mailto:Ian.Dickinson@hp.com" >email</a>)
* @version CVS $Id: Polyadic.java,v 1.2 2009-08-08 11:25:31 andy_seaborne Exp $
*/
public abstract class Polyadic extends CompositionBase
{
// Constants
//////////////////////////////////
// Static variables
//////////////////////////////////
// Instance variables
//////////////////////////////////
/** A list of the sub-graphs that this composition contains */
protected List<Graph> m_subGraphs = new ArrayList<Graph>();
/** The distinguished graph for adding to. If null, use the 0'th graph in the list. */
protected Graph m_baseGraph = null;
// Constructors
//////////////////////////////////
/**
* <p>
* Construct a composition of exactly no sub graphs.
* </p>
*/
public Polyadic() {
}
/**
* <p>
* Construct a composition of all of the given graphs
* </p>
*
* @param graphs An array of the sub-graphs of this composition
*/
public Polyadic( Graph[] graphs) {
for (int i = 0; i < graphs.length; i++) {
m_subGraphs.add( graphs[i] );
}
}
@SuppressWarnings("hiding")
private PrefixMapping pm;
@Override
public PrefixMapping getPrefixMapping()
{
if (pm == null) pm = new PolyadicPrefixMappingImpl( this );
return pm;
}
/**
* <p>
* Construct a composition of all of the given graphs.
* </p>
*
* @param graphs An iterator of the sub-graphs of this composition. If graphs is
* a closable iterator, it will be automatically closed.
*/
public Polyadic( Iterator<Graph> graphs ) {
while (graphs.hasNext()) {
m_subGraphs.add( graphs.next() );
}
if (graphs instanceof ClosableIterator<?>) {
((ClosableIterator<Graph>) graphs).close();
}
}
// External signature methods
//////////////////////////////////
/**
* <p>
* Close the graph by closing all of the sub-graphs.
* </p>
*
* @see com.hp.hpl.jena.graph.Graph#close()
*/
@Override
public void close() {
for (Iterator<Graph> i = m_subGraphs.iterator(); i.hasNext(); ) {
i.next().close();
}
super.close();
}
/**
* <p>
* Answer true if this graph contains the given graph as a sub-component.
* </p>
*
* @param graph A graph to test
* @return True if the graph is this graph, or is a sub-graph of this one.
* @see com.hp.hpl.jena.graph.Graph#dependsOn(Graph)
*/
@Override
public boolean dependsOn( Graph graph ) {
return (graph == this) || m_subGraphs.contains( graph );
}
/**
* <p>
* Add the given graph to this composition.
* </p>
*
* @param graph A sub-graph to add to this composition
*/
public void addGraph( Graph graph ) {
m_subGraphs.add( graph );
}
/**
* <p>
* Remove the given graph from this composition. If the removed graph is the
* designated updateable graph, the updatable graph goes back to the default
* for this composition.
* </p>
*
* @param graph A sub-graph to remove from this composition
*/
public void removeGraph( Graph graph ) {
m_subGraphs.remove( graph );
if (m_baseGraph == graph) {
m_baseGraph = null;
}
}
/**
* <p>
* Answer the distinguished graph for the composition, which will be the graph
* that receives triple adds and deletes. If no base graph is defined,
* return null.
* </p>
*
* @return The distinguished updateable graph, or null if there are no graphs
* in this composition
*/
public Graph getBaseGraph() {
if (m_baseGraph == null) {
// no designated graph, so default to the first graph on the list
return (m_subGraphs.size() == 0) ? null : m_subGraphs.get( 0 );
}
else {
return m_baseGraph;
}
}
/**
* <p>
* Answer the distinguished graph for the composition, which will be the graph
* that receives triple adds and deletes. If no base graph is defined, throw
* a {@link JenaException}.
* </p>
*
* @return The distinguished updateable graph, or null if there are no graphs
* in this composition
*/
public Graph getRequiredBaseGraph() {
Graph base = getBaseGraph();
if (base == null) {
throw new JenaException( "This polyadic graph should have a base graph, but none is defined" );
}
else {
return base;
}
}
/**
* <p>
* Set the designated updateable graph for this composition.
* </p>
*
* @param graph One of the graphs currently in this composition to be the
* designated graph to receive udpates
* @exception IllegalArgumentException if graph is not one of the members of
* the composition
*/
public void setBaseGraph( Graph graph ) {
if (m_subGraphs.contains( graph )) {
m_baseGraph = graph;
bulkHandler = null;
}
else {
throw new IllegalArgumentException( "The updateable graph must be one of the graphs from the composition" );
}
}
/**
* <p>
* Answer a list of the graphs other than the updateable (base) graph
* </p>
*
* @return A list of all of the sub-graphs, excluding the base graph.
*/
public List<Graph> getSubGraphs() {
List<Graph> sg = new ArrayList<Graph>( m_subGraphs );
if (getBaseGraph() != null) {
sg.remove( getBaseGraph() );
}
return sg;
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
if (bulkHandler == null)
bulkHandler = new WrappedBulkUpdateHandler( this, getRequiredBaseGraph().getBulkUpdateHandler() );
return bulkHandler;
}
// the following methods all delegate handling capabilities to the base graph
// TODO: this needs to be integrated with WrappedGraph, but we don't have time to do so before Jena 2.0 release
@Override
public TransactionHandler getTransactionHandler() {
return (getBaseGraph() == null) ? super.getTransactionHandler() : getBaseGraph().getTransactionHandler();
}
@Override
public Capabilities getCapabilities() {
return (getBaseGraph() == null) ? super.getCapabilities() : getBaseGraph().getCapabilities();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.common.state;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.util.Preconditions;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.EnumMap;
import static org.apache.flink.api.common.state.StateTtlConfig.CleanupStrategies.EMPTY_STRATEGY;
import static org.apache.flink.api.common.state.StateTtlConfig.IncrementalCleanupStrategy.DEFAULT_INCREMENTAL_CLEANUP_STRATEGY;
import static org.apache.flink.api.common.state.StateTtlConfig.RocksdbCompactFilterCleanupStrategy.DEFAULT_ROCKSDB_COMPACT_FILTER_CLEANUP_STRATEGY;
import static org.apache.flink.api.common.state.StateTtlConfig.StateVisibility.NeverReturnExpired;
import static org.apache.flink.api.common.state.StateTtlConfig.TtlTimeCharacteristic.ProcessingTime;
import static org.apache.flink.api.common.state.StateTtlConfig.UpdateType.OnCreateAndWrite;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Configuration of state TTL logic.
*
* <p>Note: The map state with TTL currently supports {@code null} user values only if the user
* value serializer can handle {@code null} values. If the serializer does not support {@code null}
* values, it can be wrapped with {@link
* org.apache.flink.api.java.typeutils.runtime.NullableSerializer} at the cost of an extra byte in
* the serialized form.
*/
@PublicEvolving
public class StateTtlConfig implements Serializable {
private static final long serialVersionUID = -7592693245044289793L;
public static final StateTtlConfig DISABLED =
newBuilder(Time.milliseconds(Long.MAX_VALUE))
.setUpdateType(UpdateType.Disabled)
.build();
/**
* This option value configures when to update last access timestamp which prolongs state TTL.
*/
public enum UpdateType {
/** TTL is disabled. State does not expire. */
Disabled,
/**
* Last access timestamp is initialised when state is created and updated on every write
* operation.
*/
OnCreateAndWrite,
/** The same as <code>OnCreateAndWrite</code> but also updated on read. */
OnReadAndWrite
}
/** This option configures whether expired user value can be returned or not. */
public enum StateVisibility {
/** Return expired user value if it is not cleaned up yet. */
ReturnExpiredIfNotCleanedUp,
/** Never return expired user value. */
NeverReturnExpired
}
/** This option configures time scale to use for ttl. */
public enum TtlTimeCharacteristic {
/**
* Processing time, see also <code>
* org.apache.flink.streaming.api.TimeCharacteristic.ProcessingTime</code>.
*/
ProcessingTime
}
private final UpdateType updateType;
private final StateVisibility stateVisibility;
private final TtlTimeCharacteristic ttlTimeCharacteristic;
private final Time ttl;
private final CleanupStrategies cleanupStrategies;
private StateTtlConfig(
UpdateType updateType,
StateVisibility stateVisibility,
TtlTimeCharacteristic ttlTimeCharacteristic,
Time ttl,
CleanupStrategies cleanupStrategies) {
this.updateType = checkNotNull(updateType);
this.stateVisibility = checkNotNull(stateVisibility);
this.ttlTimeCharacteristic = checkNotNull(ttlTimeCharacteristic);
this.ttl = checkNotNull(ttl);
this.cleanupStrategies = cleanupStrategies;
checkArgument(ttl.toMilliseconds() > 0, "TTL is expected to be positive.");
}
@Nonnull
public UpdateType getUpdateType() {
return updateType;
}
@Nonnull
public StateVisibility getStateVisibility() {
return stateVisibility;
}
@Nonnull
public Time getTtl() {
return ttl;
}
@Nonnull
public TtlTimeCharacteristic getTtlTimeCharacteristic() {
return ttlTimeCharacteristic;
}
public boolean isEnabled() {
return updateType != UpdateType.Disabled;
}
@Nonnull
public CleanupStrategies getCleanupStrategies() {
return cleanupStrategies;
}
@Override
public String toString() {
return "StateTtlConfig{"
+ "updateType="
+ updateType
+ ", stateVisibility="
+ stateVisibility
+ ", ttlTimeCharacteristic="
+ ttlTimeCharacteristic
+ ", ttl="
+ ttl
+ '}';
}
@Nonnull
public static Builder newBuilder(@Nonnull Time ttl) {
return new Builder(ttl);
}
/** Builder for the {@link StateTtlConfig}. */
public static class Builder {
private UpdateType updateType = OnCreateAndWrite;
private StateVisibility stateVisibility = NeverReturnExpired;
private TtlTimeCharacteristic ttlTimeCharacteristic = ProcessingTime;
private Time ttl;
private boolean isCleanupInBackground = true;
private final EnumMap<CleanupStrategies.Strategies, CleanupStrategies.CleanupStrategy>
strategies = new EnumMap<>(CleanupStrategies.Strategies.class);
public Builder(@Nonnull Time ttl) {
this.ttl = ttl;
}
/**
* Sets the ttl update type.
*
* @param updateType The ttl update type configures when to update last access timestamp
* which prolongs state TTL.
*/
@Nonnull
public Builder setUpdateType(UpdateType updateType) {
this.updateType = updateType;
return this;
}
@Nonnull
public Builder updateTtlOnCreateAndWrite() {
return setUpdateType(UpdateType.OnCreateAndWrite);
}
@Nonnull
public Builder updateTtlOnReadAndWrite() {
return setUpdateType(UpdateType.OnReadAndWrite);
}
/**
* Sets the state visibility.
*
* @param stateVisibility The state visibility configures whether expired user value can be
* returned or not.
*/
@Nonnull
public Builder setStateVisibility(@Nonnull StateVisibility stateVisibility) {
this.stateVisibility = stateVisibility;
return this;
}
@Nonnull
public Builder returnExpiredIfNotCleanedUp() {
return setStateVisibility(StateVisibility.ReturnExpiredIfNotCleanedUp);
}
@Nonnull
public Builder neverReturnExpired() {
return setStateVisibility(StateVisibility.NeverReturnExpired);
}
/**
* Sets the time characteristic.
*
* @param ttlTimeCharacteristic The time characteristic configures time scale to use for
* ttl.
*/
@Nonnull
public Builder setTtlTimeCharacteristic(
@Nonnull TtlTimeCharacteristic ttlTimeCharacteristic) {
this.ttlTimeCharacteristic = ttlTimeCharacteristic;
return this;
}
@Nonnull
public Builder useProcessingTime() {
return setTtlTimeCharacteristic(ProcessingTime);
}
/** Cleanup expired state in full snapshot on checkpoint. */
@Nonnull
public Builder cleanupFullSnapshot() {
strategies.put(CleanupStrategies.Strategies.FULL_STATE_SCAN_SNAPSHOT, EMPTY_STRATEGY);
return this;
}
/**
* Cleanup expired state incrementally cleanup local state.
*
* <p>Upon every state access this cleanup strategy checks a bunch of state keys for
* expiration and cleans up expired ones. It keeps a lazy iterator through all keys with
* relaxed consistency if backend supports it. This way all keys should be regularly checked
* and cleaned eventually over time if any state is constantly being accessed.
*
* <p>Additionally to the incremental cleanup upon state access, it can also run per every
* record. Caution: if there are a lot of registered states using this option, they all will
* be iterated for every record to check if there is something to cleanup.
*
* <p>Note: if no access happens to this state or no records are processed in case of {@code
* runCleanupForEveryRecord}, expired state will persist.
*
* <p>Note: Time spent for the incremental cleanup increases record processing latency.
*
* <p>Note: At the moment incremental cleanup is implemented only for Heap state backend.
* Setting it for RocksDB will have no effect.
*
* <p>Note: If heap state backend is used with synchronous snapshotting, the global iterator
* keeps a copy of all keys while iterating because of its specific implementation which
* does not support concurrent modifications. Enabling of this feature will increase memory
* consumption then. Asynchronous snapshotting does not have this problem.
*
* @param cleanupSize max number of keys pulled from queue for clean up upon state touch for
* any key
* @param runCleanupForEveryRecord run incremental cleanup per each processed record
*/
@Nonnull
public Builder cleanupIncrementally(
@Nonnegative int cleanupSize, boolean runCleanupForEveryRecord) {
strategies.put(
CleanupStrategies.Strategies.INCREMENTAL_CLEANUP,
new IncrementalCleanupStrategy(cleanupSize, runCleanupForEveryRecord));
return this;
}
/**
* Cleanup expired state while Rocksdb compaction is running.
*
* <p>RocksDB compaction filter will query current timestamp, used to check expiration, from
* Flink every time after processing {@code queryTimeAfterNumEntries} number of state
* entries. Updating the timestamp more often can improve cleanup speed but it decreases
* compaction performance because it uses JNI call from native code.
*
* @param queryTimeAfterNumEntries number of state entries to process by compaction filter
* before updating current timestamp
*/
@Nonnull
public Builder cleanupInRocksdbCompactFilter(long queryTimeAfterNumEntries) {
strategies.put(
CleanupStrategies.Strategies.ROCKSDB_COMPACTION_FILTER,
new RocksdbCompactFilterCleanupStrategy(queryTimeAfterNumEntries));
return this;
}
/**
* Disable default cleanup of expired state in background (enabled by default).
*
* <p>If some specific cleanup is configured, e.g. {@link #cleanupIncrementally(int,
* boolean)} or {@link #cleanupInRocksdbCompactFilter(long)}, this setting does not disable
* it.
*/
@Nonnull
public Builder disableCleanupInBackground() {
isCleanupInBackground = false;
return this;
}
/**
* Sets the ttl time.
*
* @param ttl The ttl time.
*/
@Nonnull
public Builder setTtl(@Nonnull Time ttl) {
this.ttl = ttl;
return this;
}
@Nonnull
public StateTtlConfig build() {
return new StateTtlConfig(
updateType,
stateVisibility,
ttlTimeCharacteristic,
ttl,
new CleanupStrategies(strategies, isCleanupInBackground));
}
}
/**
* TTL cleanup strategies.
*
* <p>This class configures when to cleanup expired state with TTL. By default, state is always
* cleaned up on explicit read access if found expired. Currently cleanup of state full snapshot
* can be additionally activated.
*/
public static class CleanupStrategies implements Serializable {
private static final long serialVersionUID = -1617740467277313524L;
static final CleanupStrategy EMPTY_STRATEGY = new EmptyCleanupStrategy();
private final boolean isCleanupInBackground;
private final EnumMap<Strategies, CleanupStrategy> strategies;
/** Fixed strategies ordinals in {@code strategies} config field. */
enum Strategies {
FULL_STATE_SCAN_SNAPSHOT,
INCREMENTAL_CLEANUP,
ROCKSDB_COMPACTION_FILTER
}
/** Base interface for cleanup strategies configurations. */
interface CleanupStrategy extends Serializable {}
static class EmptyCleanupStrategy implements CleanupStrategy {
private static final long serialVersionUID = 1373998465131443873L;
}
private CleanupStrategies(
EnumMap<Strategies, CleanupStrategy> strategies, boolean isCleanupInBackground) {
this.strategies = strategies;
this.isCleanupInBackground = isCleanupInBackground;
}
public boolean inFullSnapshot() {
return strategies.containsKey(Strategies.FULL_STATE_SCAN_SNAPSHOT);
}
public boolean isCleanupInBackground() {
return isCleanupInBackground;
}
@Nullable
public IncrementalCleanupStrategy getIncrementalCleanupStrategy() {
IncrementalCleanupStrategy defaultStrategy =
isCleanupInBackground ? DEFAULT_INCREMENTAL_CLEANUP_STRATEGY : null;
return (IncrementalCleanupStrategy)
strategies.getOrDefault(Strategies.INCREMENTAL_CLEANUP, defaultStrategy);
}
public boolean inRocksdbCompactFilter() {
return getRocksdbCompactFilterCleanupStrategy() != null;
}
@Nullable
public RocksdbCompactFilterCleanupStrategy getRocksdbCompactFilterCleanupStrategy() {
RocksdbCompactFilterCleanupStrategy defaultStrategy =
isCleanupInBackground ? DEFAULT_ROCKSDB_COMPACT_FILTER_CLEANUP_STRATEGY : null;
return (RocksdbCompactFilterCleanupStrategy)
strategies.getOrDefault(Strategies.ROCKSDB_COMPACTION_FILTER, defaultStrategy);
}
}
/** Configuration of cleanup strategy while taking the full snapshot. */
public static class IncrementalCleanupStrategy implements CleanupStrategies.CleanupStrategy {
private static final long serialVersionUID = 3109278696501988780L;
static final IncrementalCleanupStrategy DEFAULT_INCREMENTAL_CLEANUP_STRATEGY =
new IncrementalCleanupStrategy(5, false);
/** Max number of keys pulled from queue for clean up upon state touch for any key. */
private final int cleanupSize;
/** Whether to run incremental cleanup per each processed record. */
private final boolean runCleanupForEveryRecord;
private IncrementalCleanupStrategy(int cleanupSize, boolean runCleanupForEveryRecord) {
Preconditions.checkArgument(
cleanupSize > 0,
"Number of incrementally cleaned up state entries should be positive.");
this.cleanupSize = cleanupSize;
this.runCleanupForEveryRecord = runCleanupForEveryRecord;
}
public int getCleanupSize() {
return cleanupSize;
}
public boolean runCleanupForEveryRecord() {
return runCleanupForEveryRecord;
}
}
/** Configuration of cleanup strategy using custom compaction filter in RocksDB. */
public static class RocksdbCompactFilterCleanupStrategy
implements CleanupStrategies.CleanupStrategy {
private static final long serialVersionUID = 3109278796506988980L;
static final RocksdbCompactFilterCleanupStrategy
DEFAULT_ROCKSDB_COMPACT_FILTER_CLEANUP_STRATEGY =
new RocksdbCompactFilterCleanupStrategy(1000L);
/**
* Number of state entries to process by compaction filter before updating current
* timestamp.
*/
private final long queryTimeAfterNumEntries;
private RocksdbCompactFilterCleanupStrategy(long queryTimeAfterNumEntries) {
this.queryTimeAfterNumEntries = queryTimeAfterNumEntries;
}
public long getQueryTimeAfterNumEntries() {
return queryTimeAfterNumEntries;
}
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.view.xslt;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.Enumeration;
import java.util.Map;
import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Templates;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.URIResolver;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContextException;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.SimpleTransformErrorListener;
import org.springframework.util.xml.TransformerUtils;
import org.springframework.web.servlet.view.AbstractUrlBasedView;
import org.springframework.web.util.WebUtils;
/**
* XSLT-driven View that allows for response context to be rendered as the
* result of an XSLT transformation.
*
* <p>The XSLT Source object is supplied as a parameter in the model and then
* {@link #locateSource detected} during response rendering. Users can either specify
* a specific entry in the model via the {@link #setSourceKey sourceKey} property or
* have Spring locate the Source object. This class also provides basic conversion
* of objects into Source implementations. See {@link #getSourceTypes() here}
* for more details.
*
* <p>All model parameters are passed to the XSLT Transformer as parameters.
* In addition the user can configure {@link #setOutputProperties output properties}
* to be passed to the Transformer.
*
* @author Rob Harrop
* @author Juergen Hoeller
* @since 2.0
*/
public class XsltView extends AbstractUrlBasedView {
private Class<? extends TransformerFactory> transformerFactoryClass;
private String sourceKey;
private URIResolver uriResolver;
private ErrorListener errorListener = new SimpleTransformErrorListener(logger);
private boolean indent = true;
private Properties outputProperties;
private boolean cacheTemplates = true;
private TransformerFactory transformerFactory;
private Templates cachedTemplates;
/**
* Specify the XSLT TransformerFactory class to use.
* <p>The default constructor of the specified class will be called
* to build the TransformerFactory for this view.
*/
public void setTransformerFactoryClass(Class<? extends TransformerFactory> transformerFactoryClass) {
this.transformerFactoryClass = transformerFactoryClass;
}
/**
* Set the name of the model attribute that represents the XSLT Source.
* If not specified, the model map will be searched for a matching value type.
* <p>The following source types are supported out of the box:
* {@link Source}, {@link Document}, {@link Node}, {@link Reader},
* {@link InputStream} and {@link Resource}.
* @see #getSourceTypes
* @see #convertSource
*/
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
/**
* Set the URIResolver used in the transform.
* <p>The URIResolver handles calls to the XSLT {@code document()} function.
*/
public void setUriResolver(URIResolver uriResolver) {
this.uriResolver = uriResolver;
}
/**
* Set an implementation of the {@link javax.xml.transform.ErrorListener}
* interface for custom handling of transformation errors and warnings.
* <p>If not set, a default
* {@link org.springframework.util.xml.SimpleTransformErrorListener} is
* used that simply logs warnings using the logger instance of the view class,
* and rethrows errors to discontinue the XML transformation.
* @see org.springframework.util.xml.SimpleTransformErrorListener
*/
public void setErrorListener(ErrorListener errorListener) {
this.errorListener = (errorListener != null ? errorListener : new SimpleTransformErrorListener(logger));
}
/**
* Set whether the XSLT transformer may add additional whitespace when
* outputting the result tree.
* <p>Default is {@code true} (on); set this to {@code false} (off)
* to not specify an "indent" key, leaving the choice up to the stylesheet.
* @see javax.xml.transform.OutputKeys#INDENT
*/
public void setIndent(boolean indent) {
this.indent = indent;
}
/**
* Set arbitrary transformer output properties to be applied to the stylesheet.
* <p>Any values specified here will override defaults that this view sets
* programmatically.
* @see javax.xml.transform.Transformer#setOutputProperty
*/
public void setOutputProperties(Properties outputProperties) {
this.outputProperties = outputProperties;
}
/**
* Turn on/off the caching of the XSLT {@link Templates} instance.
* <p>The default value is "true". Only set this to "false" in development,
* where caching does not seriously impact performance.
*/
public void setCacheTemplates(boolean cacheTemplates) {
this.cacheTemplates = cacheTemplates;
}
/**
* Initialize this XsltView's TransformerFactory.
*/
@Override
protected void initApplicationContext() throws BeansException {
this.transformerFactory = newTransformerFactory(this.transformerFactoryClass);
this.transformerFactory.setErrorListener(this.errorListener);
if (this.uriResolver != null) {
this.transformerFactory.setURIResolver(this.uriResolver);
}
if (this.cacheTemplates) {
this.cachedTemplates = loadTemplates();
}
}
/**
* Instantiate a new TransformerFactory for this view.
* <p>The default implementation simply calls
* {@link javax.xml.transform.TransformerFactory#newInstance()}.
* If a {@link #setTransformerFactoryClass "transformerFactoryClass"}
* has been specified explicitly, the default constructor of the
* specified class will be called instead.
* <p>Can be overridden in subclasses.
* @param transformerFactoryClass the specified factory class (if any)
* @return the new TransactionFactory instance
* @see #setTransformerFactoryClass
* @see #getTransformerFactory()
*/
protected TransformerFactory newTransformerFactory(Class<? extends TransformerFactory> transformerFactoryClass) {
if (transformerFactoryClass != null) {
try {
return ReflectionUtils.accessibleConstructor(transformerFactoryClass).newInstance();
}
catch (Exception ex) {
throw new TransformerFactoryConfigurationError(ex, "Could not instantiate TransformerFactory");
}
}
else {
return TransformerFactory.newInstance();
}
}
/**
* Return the TransformerFactory that this XsltView uses.
* @return the TransformerFactory (never {@code null})
*/
protected final TransformerFactory getTransformerFactory() {
return this.transformerFactory;
}
@Override
protected void renderMergedOutputModel(
Map<String, Object> model, HttpServletRequest request, HttpServletResponse response)
throws Exception {
Templates templates = this.cachedTemplates;
if (templates == null) {
templates = loadTemplates();
}
Transformer transformer = createTransformer(templates);
configureTransformer(model, response, transformer);
configureResponse(model, response, transformer);
Source source = null;
try {
source = locateSource(model);
if (source == null) {
throw new IllegalArgumentException("Unable to locate Source object in model: " + model);
}
transformer.transform(source, createResult(response));
}
finally {
closeSourceIfNecessary(source);
}
}
/**
* Create the XSLT {@link Result} used to render the result of the transformation.
* <p>The default implementation creates a {@link StreamResult} wrapping the supplied
* HttpServletResponse's {@link HttpServletResponse#getOutputStream() OutputStream}.
* @param response current HTTP response
* @return the XSLT Result to use
* @throws Exception if the Result cannot be built
*/
protected Result createResult(HttpServletResponse response) throws Exception {
return new StreamResult(response.getOutputStream());
}
/**
* <p>Locate the {@link Source} object in the supplied model,
* converting objects as required.
* The default implementation first attempts to look under the configured
* {@link #setSourceKey source key}, if any, before attempting to locate
* an object of {@link #getSourceTypes() supported type}.
* @param model the merged model Map
* @return the XSLT Source object (or {@code null} if none found)
* @throws Exception if an error occurred during locating the source
* @see #setSourceKey
* @see #convertSource
*/
protected Source locateSource(Map<String, Object> model) throws Exception {
if (this.sourceKey != null) {
return convertSource(model.get(this.sourceKey));
}
Object source = CollectionUtils.findValueOfType(model.values(), getSourceTypes());
return (source != null ? convertSource(source) : null);
}
/**
* Return the array of {@link Class Classes} that are supported when converting to an
* XSLT {@link Source}.
* <p>Currently supports {@link Source}, {@link Document}, {@link Node},
* {@link Reader}, {@link InputStream} and {@link Resource}.
* @return the supported source types
*/
protected Class<?>[] getSourceTypes() {
return new Class<?>[] {Source.class, Document.class, Node.class, Reader.class, InputStream.class, Resource.class};
}
/**
* Convert the supplied {@link Object} into an XSLT {@link Source} if the
* {@link Object} type is {@link #getSourceTypes() supported}.
* @param source the original source object
* @return the adapted XSLT Source
* @throws IllegalArgumentException if the given Object is not of a supported type
*/
protected Source convertSource(Object source) throws Exception {
if (source instanceof Source) {
return (Source) source;
}
else if (source instanceof Document) {
return new DOMSource(((Document) source).getDocumentElement());
}
else if (source instanceof Node) {
return new DOMSource((Node) source);
}
else if (source instanceof Reader) {
return new StreamSource((Reader) source);
}
else if (source instanceof InputStream) {
return new StreamSource((InputStream) source);
}
else if (source instanceof Resource) {
Resource resource = (Resource) source;
return new StreamSource(resource.getInputStream(), resource.getURI().toASCIIString());
}
else {
throw new IllegalArgumentException("Value '" + source + "' cannot be converted to XSLT Source");
}
}
/**
* Configure the supplied {@link Transformer} instance.
* <p>The default implementation copies parameters from the model into the
* Transformer's {@link Transformer#setParameter parameter set}.
* This implementation also copies the {@link #setOutputProperties output properties}
* into the {@link Transformer} {@link Transformer#setOutputProperty output properties}.
* Indentation properties are set as well.
* @param model merged output Map (never {@code null})
* @param response current HTTP response
* @param transformer the target transformer
* @see #copyModelParameters(Map, Transformer)
* @see #copyOutputProperties(Transformer)
* @see #configureIndentation(Transformer)
*/
protected void configureTransformer(Map<String, Object> model, HttpServletResponse response, Transformer transformer) {
copyModelParameters(model, transformer);
copyOutputProperties(transformer);
configureIndentation(transformer);
}
/**
* Configure the indentation settings for the supplied {@link Transformer}.
* @param transformer the target transformer
* @see org.springframework.util.xml.TransformerUtils#enableIndenting(javax.xml.transform.Transformer)
* @see org.springframework.util.xml.TransformerUtils#disableIndenting(javax.xml.transform.Transformer)
*/
protected final void configureIndentation(Transformer transformer) {
if (this.indent) {
TransformerUtils.enableIndenting(transformer);
}
else {
TransformerUtils.disableIndenting(transformer);
}
}
/**
* Copy the configured output {@link Properties}, if any, into the
* {@link Transformer#setOutputProperty output property set} of the supplied
* {@link Transformer}.
* @param transformer the target transformer
*/
protected final void copyOutputProperties(Transformer transformer) {
if (this.outputProperties != null) {
Enumeration<?> en = this.outputProperties.propertyNames();
while (en.hasMoreElements()) {
String name = (String) en.nextElement();
transformer.setOutputProperty(name, this.outputProperties.getProperty(name));
}
}
}
/**
* Copy all entries from the supplied Map into the
* {@link Transformer#setParameter(String, Object) parameter set}
* of the supplied {@link Transformer}.
* @param model merged output Map (never {@code null})
* @param transformer the target transformer
*/
protected final void copyModelParameters(Map<String, Object> model, Transformer transformer) {
for (Map.Entry<String, Object> entry : model.entrySet()) {
transformer.setParameter(entry.getKey(), entry.getValue());
}
}
/**
* Configure the supplied {@link HttpServletResponse}.
* <p>The default implementation of this method sets the
* {@link HttpServletResponse#setContentType content type} and
* {@link HttpServletResponse#setCharacterEncoding encoding}
* from the "media-type" and "encoding" output properties
* specified in the {@link Transformer}.
* @param model merged output Map (never {@code null})
* @param response current HTTP response
* @param transformer the target transformer
*/
protected void configureResponse(Map<String, Object> model, HttpServletResponse response, Transformer transformer) {
String contentType = getContentType();
String mediaType = transformer.getOutputProperty(OutputKeys.MEDIA_TYPE);
String encoding = transformer.getOutputProperty(OutputKeys.ENCODING);
if (StringUtils.hasText(mediaType)) {
contentType = mediaType;
}
if (StringUtils.hasText(encoding)) {
// Only apply encoding if content type is specified but does not contain charset clause already.
if (contentType != null && !contentType.toLowerCase().contains(WebUtils.CONTENT_TYPE_CHARSET_PREFIX)) {
contentType = contentType + WebUtils.CONTENT_TYPE_CHARSET_PREFIX + encoding;
}
}
response.setContentType(contentType);
}
/**
* Load the {@link Templates} instance for the stylesheet at the configured location.
*/
private Templates loadTemplates() throws ApplicationContextException {
Source stylesheetSource = getStylesheetSource();
try {
Templates templates = this.transformerFactory.newTemplates(stylesheetSource);
if (logger.isDebugEnabled()) {
logger.debug("Loading templates '" + templates + "'");
}
return templates;
}
catch (TransformerConfigurationException ex) {
throw new ApplicationContextException("Can't load stylesheet from '" + getUrl() + "'", ex);
}
finally {
closeSourceIfNecessary(stylesheetSource);
}
}
/**
* Create the {@link Transformer} instance used to prefer the XSLT transformation.
* <p>The default implementation simply calls {@link Templates#newTransformer()}, and
* configures the {@link Transformer} with the custom {@link URIResolver} if specified.
* @param templates the XSLT Templates instance to create a Transformer for
* @return the Transformer object
* @throws TransformerConfigurationException in case of creation failure
*/
protected Transformer createTransformer(Templates templates) throws TransformerConfigurationException {
Transformer transformer = templates.newTransformer();
if (this.uriResolver != null) {
transformer.setURIResolver(this.uriResolver);
}
return transformer;
}
/**
* Get the XSLT {@link Source} for the XSLT template under the {@link #setUrl configured URL}.
* @return the Source object
*/
protected Source getStylesheetSource() {
String url = getUrl();
if (logger.isDebugEnabled()) {
logger.debug("Loading XSLT stylesheet from '" + url + "'");
}
try {
Resource resource = getApplicationContext().getResource(url);
return new StreamSource(resource.getInputStream(), resource.getURI().toASCIIString());
}
catch (IOException ex) {
throw new ApplicationContextException("Can't load XSLT stylesheet from '" + url + "'", ex);
}
}
/**
* Close the underlying resource managed by the supplied {@link Source} if applicable.
* <p>Only works for {@link StreamSource StreamSources}.
* @param source the XSLT Source to close (may be {@code null})
*/
private void closeSourceIfNecessary(Source source) {
if (source instanceof StreamSource) {
StreamSource streamSource = (StreamSource) source;
if (streamSource.getReader() != null) {
try {
streamSource.getReader().close();
}
catch (IOException ex) {
// ignore
}
}
if (streamSource.getInputStream() != null) {
try {
streamSource.getInputStream().close();
}
catch (IOException ex) {
// ignore
}
}
}
}
}
| |
package org.pdxfinder.services;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.openjson.JSONArray;
import com.github.openjson.JSONException;
import com.github.openjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.pdxfinder.graph.dao.OntologyTerm;
import org.pdxfinder.graph.repositories.OntologyTermRepository;
import org.pdxfinder.graph.repositories.SampleRepository;
import org.pdxfinder.rdbms.dao.MappingEntity;
import org.pdxfinder.rdbms.repositories.MappingEntityRepository;
import org.pdxfinder.services.dto.PaginationDTO;
import org.pdxfinder.services.mapping.CSV;
import org.pdxfinder.services.mapping.MappingContainer;
import org.pdxfinder.services.mapping.MappingEntityType;
import org.pdxfinder.services.mapping.Status;
import org.pdxfinder.services.zooma.*;
import org.pdxfinder.utils.DamerauLevenshteinAlgorithm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
/*
* Created by csaba on 09/07/2018.
*/
@Service
public class MappingService {
private final static Logger log = LoggerFactory.getLogger(MappingService.class);
private ObjectMapper mapper = new ObjectMapper();
public static final String URI = "http://www.pdxfinder.org/";
private static final String NAME = "pdx-finder";
private static final List<String> TOPIC = Arrays.asList("PDXFinder");
private static final String TYPE = "DATABASE";
private static final String EVIDENCE = "SUBMITTER_PROVIDED";
private static final String ACCURACY = "PRECISE";
private static final String ANNOTATOR = "Nathalie Conte";
@Value("${data-dir}")
private String rootDir;
@Value("${mappings.mappedTermUrl}")
private String knowledgBaseURL;
private SampleRepository sampleRepository;
private MappingEntityRepository mappingEntityRepository;
private MappingContainer container;
private boolean INITIALIZED = false;
private UtilityService utilityService;
private PaginationService paginationService;
private OntologyTermRepository ontologyTermRepository;
@Autowired
public MappingService(SampleRepository sampleRepository,
MappingEntityRepository mappingEntityRepository,
OntologyTermRepository ontologyTermRepository,
UtilityService utilityService,
PaginationService paginationService) {
this.sampleRepository = sampleRepository;
this.mappingEntityRepository = mappingEntityRepository;
this.ontologyTermRepository = ontologyTermRepository;
this.utilityService = utilityService;
this.paginationService = paginationService;
container = new MappingContainer();
}
public String getDiagnosisMappingKey(String dataSource, String diagnosis, String originTissue, String tumorType){
String mapKey = MappingEntityType.diagnosis.get() + "__" + dataSource + "__" + diagnosis + "__" + originTissue + "__" + tumorType;
mapKey = mapKey.replaceAll("[^a-zA-Z0-9 _-]", "").toLowerCase();
return mapKey;
}
public String getTreatmentMappingKey(String dataSource, String treatmentName){
String mapKey = MappingEntityType.treatment.get() + "__" + dataSource + "__" + treatmentName;
mapKey = mapKey.replaceAll("[^a-zA-Z0-9 _-]", "").toLowerCase();
return mapKey;
}
public MappingEntity getDiagnosisMapping(String dataSource, String diagnosis, String originTissue, String tumorType) {
if (!INITIALIZED) loadRules("json");
String mapKey = getDiagnosisMappingKey(dataSource, diagnosis, originTissue, tumorType);
return container.getEntityById(mapKey);
}
public MappingEntity getTreatmentMapping(String dataSource, String treatmentName) {
if (!INITIALIZED) loadRules("json");
String mapKey = getTreatmentMappingKey(dataSource, treatmentName);
return container.getEntityById(mapKey);
}
public void saveMappingsToFile(String fileName, List<MappingEntity> maprules) {
Map<String, List<MappingEntity>> mappings = new HashMap<>();
mappings.put("mappings", maprules);
String json = JSONObject.wrap(mappings).toString();
try {
BufferedWriter writer = new BufferedWriter(new FileWriter(fileName, false));
writer.append(json);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Loads rules from a source: file or h2
*
* @param source
*/
private void loadRules(String source) {
if (container == null) container = new MappingContainer();
log.info("Loading mapping rules");
if (source.equals("json")) {
String mappingRulesDir = rootDir + "/mapping";
File folder = new File(mappingRulesDir);
if (folder.exists()) {
String diagnosisMappingsFilePath = mappingRulesDir + "/diagnosis_mappings.json";
String treatmentMappingsFilePath = mappingRulesDir + "/treatment_mappings.json";
File diagnosisFile = new File(diagnosisMappingsFilePath);
File treatmentFile = new File(treatmentMappingsFilePath);
if (diagnosisFile.exists()) {
loadDiagnosisMappings(diagnosisMappingsFilePath);
} else {
log.error("Diagnosis mappings file not found at " + diagnosisMappingsFilePath);
}
if (treatmentFile.exists()) {
loadTreatmentMappings(treatmentMappingsFilePath);
} else {
log.error("Treatment mappings file not found at " + treatmentMappingsFilePath);
}
} else {
log.error("Mapping rules directory not found at " + mappingRulesDir);
}
} else if (source.equals("h2")) {
} else {
log.error("Couldn't load mapping rules, no source was specified");
}
INITIALIZED = true;
}
/**
* Populates the container with the diagnosis mapping rules
*
* @param file
*/
private void loadDiagnosisMappings(String file) {
String json = utilityService.parseFile(file);
try {
JSONObject job = new JSONObject(json);
if (job.has("mappings")) {
JSONArray rows = job.getJSONArray("mappings");
for (int i = 0; i < rows.length(); i++) {
JSONObject row = rows.getJSONObject(i);
JSONObject mappingVal = row.getJSONObject("mappingValues");
String dataSource = mappingVal.getString("DataSource");
String sampleDiagnosis = mappingVal.getString("SampleDiagnosis").toLowerCase();
String originTissue = mappingVal.getString("OriginTissue");
String tumorType = mappingVal.getString("TumorType");
String ontologyTerm = row.getString("mappedTermLabel");
String mapType = row.getString("mapType");
String justification = row.getString("justification");
String mappedTermUrl = row.getString("mappedTermUrl");
Long entityId = Long.parseLong(row.getString("entityId"));
//if(ds!= null && !ds.toLowerCase().equals(dataSource.toLowerCase())) continue;
if (ontologyTerm.equals("") || ontologyTerm == null) continue;
if (sampleDiagnosis.equals("") || sampleDiagnosis == null) continue;
String updatedDiagnosis = sampleDiagnosis;
String pattern = "(.*)Malignant(.*)Neoplasm(.*)";
if (sampleDiagnosis.matches(pattern)) {
updatedDiagnosis = (sampleDiagnosis.replaceAll(pattern, "\t$1$2Cancer$3")).trim();
log.info("Updating label from mapping service of diagnosis '{}' with '{}'", sampleDiagnosis, updatedDiagnosis);
}
// Remove commas from diagnosis
sampleDiagnosis = updatedDiagnosis.replaceAll(",", "");
//DO not ask, I know it looks horrible...
if (originTissue == null || originTissue.equals("null")) originTissue = "";
if (tumorType == null || tumorType.equals("null")) tumorType = "";
if (justification == null || justification.equals("null")) justification = "";
//make everything lowercase
if (dataSource != null) dataSource = dataSource.toLowerCase();
if (originTissue != null) originTissue = originTissue.toLowerCase();
if (tumorType != null) tumorType = tumorType.toLowerCase();
sampleDiagnosis = sampleDiagnosis.toLowerCase();
Map<String, String> mappingValues = new HashMap<>();
mappingValues.put("DataSource", dataSource);
mappingValues.put("SampleDiagnosis", sampleDiagnosis);
mappingValues.put("OriginTissue", originTissue);
mappingValues.put("TumorType", tumorType);
MappingEntity me = new MappingEntity(MappingEntityType.diagnosis.get(), getDiagnosisMappingLabels(), mappingValues);
me.setMappedTermLabel(ontologyTerm);
me.setMapType(mapType);
me.setJustification(justification);
me.setEntityId(entityId);
me.setMappedTermUrl(mappedTermUrl);
me.setMappingKey(me.generateMappingKey());
container.addEntity(me);
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
private void loadTreatmentMappings(String file) {
String json = utilityService.parseFile(file);
try {
JSONObject job = new JSONObject(json);
if (job.has("mappings")) {
JSONArray rows = job.getJSONArray("mappings");
for (int i = 0; i < rows.length(); i++) {
JSONObject row = rows.getJSONObject(i);
JSONObject mappingVal = row.getJSONObject("mappingValues");
String dataSource = mappingVal.getString("DataSource");
String treatmentName = mappingVal.getString("TreatmentName").toLowerCase();
String ontologyTerm = row.getString("mappedTermLabel");
String mapType = row.getString("mapType");
String justification = row.getString("justification");
String mappedTermUrl = row.getString("mappedTermUrl");
Long entityId = Long.parseLong(row.getString("entityId"));
if (ontologyTerm.equals("") || ontologyTerm == null) continue;
//DO not ask, I know it looks horrible...
if (justification == null || justification.equals("null")) justification = "";
//make everything lowercase
if (dataSource != null) dataSource = dataSource.toLowerCase();
Map<String, String> mappingValues = new HashMap<>();
mappingValues.put("DataSource", dataSource);
mappingValues.put("TreatmentName", treatmentName);
MappingEntity me = new MappingEntity(MappingEntityType.treatment.get(), getTreatmentMappingLabels(), mappingValues);
me.setMappedTermLabel(ontologyTerm);
me.setMapType(mapType);
me.setJustification(justification);
me.setEntityId(entityId);
me.setMappedTermUrl(mappedTermUrl);
me.setMappingKey(me.generateMappingKey());
container.addEntity(me);
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
public Map<String, List<MappingEntity>> getMissingDiagnosisMappings(String ds) {
MappingContainer mc = new MappingContainer();
if (ds == null || ds.isEmpty()) {
} else {
}
Map<String, List<MappingEntity>> entityMap = new HashMap<>();
List<MappingEntity> mappingEntities = mappingEntityRepository.findByMappedTermLabel(null); // new ArrayList<>();
entityMap.put("mappings", mappingEntities);
return entityMap;
}
/*
public MappingContainer getSavedDiagnosisMappings(String ds){
if(!INITIALIZED){
loadRules("file");
}
//no filter, return everything
if(ds == null) return existingDiagnosisMappings;
MappingContainer mc = new MappingContainer();
List<MappingEntity> results = existingDiagnosisMappings.getMappings().values().stream().filter(
x -> x.getEntityType().equals("DIAGNOSIS") &&
x.getMappingValues().get("DataSource").equals(ds)).collect(Collectors.toList());
results.forEach(x -> {
mc.addEntity(x);
});
return mc;
}
*/
public MappingContainer getMappingsByDSAndType(List<String> ds, String type) {
if (!INITIALIZED) {
loadRules("json");
}
MappingContainer mc = new MappingContainer();
for (MappingEntity me : container.getMappings().values()) {
if (me.getEntityType().toLowerCase().equals(type.toLowerCase())) {
for (String dataSource : ds) {
if (dataSource.toLowerCase().equals(me.getMappingValues().get("DataSource").toLowerCase())) {
//clone object but purge keys
MappingEntity me2 = new MappingEntity();
me2.setEntityId(me.getEntityId());
me2.setEntityType(me.getEntityType());
me2.setMappingLabels(me.getMappingLabels());
me2.setMappingValues(me.getMappingValues());
me2.setMappedTermUrl(me.getMappedTermUrl());
me2.setMappedTermLabel(me.getMappedTermLabel());
me2.setMapType(me.getMapType());
me2.setJustification(me.getJustification());
me2.setStatus(me.getStatus());
me2.setSuggestedMappings(me.getSuggestedMappings());
me2.setMappingKey(me.getMappingKey());
mc.addEntity(me2);
}
}
}
}
return mc;
}
private List<MappingEntity> getSuggestionsForUnmappedEntity(MappingEntity me, MappingContainer mc) {
String entityType = me.getEntityType();
TreeMap<Integer, List<MappingEntity>> unorderedSuggestions = new TreeMap<>();
//APPLY MAPPING SUGGESTION LOGIC HERE
List<MappingEntity> mapSuggList = mc.getMappings().values().stream().filter(x -> x.getEntityType().equals(entityType)).collect(Collectors.toList());
//Use the Damerau Levenshtein algorithm to determine string similarity
DamerauLevenshteinAlgorithm dla = new DamerauLevenshteinAlgorithm(1, 1, 2, 2);
String typeKeyValues1 = getTypeKeyValues(me);
mapSuggList.forEach(x -> {
//get similarity index components
int simIndex = 0;
for (String label : x.getMappingLabels()) {
simIndex += getSimilarityIndexComponent(dla, me.getEntityType(), label, me.getMappingValues().get(label), x.getMappingValues().get(label));
}
//x.setSimilarityIndex(getStringSimilarity(dla, typeKeyValues1, getTypeKeyValues(x)));
Integer index = new Integer(simIndex);
if (unorderedSuggestions.containsKey(index)) {
unorderedSuggestions.get(index).add(x);
} else {
List<MappingEntity> list = new ArrayList<>();
list.add(x);
unorderedSuggestions.put(index, list);
}
});
//take all mapped entities and order them by their stringsimilarity to the unmapped entity
//mapSuggList.stream().sorted((x1, x2) -> Integer.compare(getStringSimilarity(dla, typeKeyValues1, getTypeKeyValues(x1)), getStringSimilarity(dla, typeKeyValues1, getTypeKeyValues(x2))) );
//mapSuggList = mapSuggList.stream().sorted(Comparator.comparing(MappingEntity::getSimilarityIndex)).collect(Collectors.toList());
TreeMap<Integer, List<MappingEntity>> orderedSuggestions = new TreeMap<>(unorderedSuggestions);
List<MappingEntity> resultList = new ArrayList<>();
//log.info("UNMAPPED: "+me.getMappingValues().get("SampleDiagnosis")+" "+me.getMappingValues().get("OriginTissue"));
int entityCounter = 0;
for (Map.Entry<Integer, List<MappingEntity>> entry : orderedSuggestions.entrySet()) {
Integer ix = entry.getKey();
List<MappingEntity> list = entry.getValue();
for (MappingEntity ment : list) {
//log.info("SUGG: " + ment.getMappingValues().get("SampleDiagnosis") + " " + ment.getMappingValues().get("OriginTissue") + "INDEX:" + ix);
resultList.add(ment);
entityCounter++;
if (entityCounter >= 10) break;
}
if (entityCounter >= 10) break;
}
//return the first 10 suggestions
return resultList;
}
private int getSimilarityIndexComponent(DamerauLevenshteinAlgorithm dla, String entityType, String entityAttribute, String attribute1, String attribute2) {
if (entityType.toUpperCase().equals("DIAGNOSIS")) {
if (entityAttribute.equals("SampleDiagnosis")) {
return dla.execute(attribute1.toLowerCase(), attribute2.toLowerCase()) * 5;
}
if (entityAttribute.equals("OriginTissue")) {
int diff = dla.execute(attribute1.toLowerCase(), attribute2.toLowerCase());
//the origin tissue is very different, less likely will be a good suggestion
if (diff > 4) return 50;
return diff;
}
int diff = dla.execute(attribute1.toLowerCase(), attribute2.toLowerCase());
if (diff > 4) return 1;
return diff;
}
else if(entityType.toUpperCase().equals("TREATMENT")){
if (entityAttribute.equals("TreatmentName")) {
return dla.execute(attribute1.toLowerCase(), attribute2.toLowerCase()) * 5;
}
int diff = dla.execute(attribute1.toLowerCase(), attribute2.toLowerCase());
if (diff > 4) return 1;
return diff;
}
return 10000;
}
String getTypeKeyValues(MappingEntity me) {
String key = "";
if (me == null) return key;
switch (me.getEntityType()) {
case "DIAGNOSIS":
for (String label : getDiagnosisMappingLabels()) {
key += me.getMappingValues().get(label).toLowerCase();
}
break;
default:
key = "";
}
return key;
}
public List<String> getDiagnosisMappingLabels() {
List<String> mapLabels = new ArrayList<>();
mapLabels.add("DataSource");
mapLabels.add("SampleDiagnosis");
mapLabels.add("OriginTissue");
mapLabels.add("TumorType");
return mapLabels;
}
public List<String> getTreatmentMappingLabels() {
List<String> mapLabels = new ArrayList<>();
mapLabels.add("DataSource");
mapLabels.add("TreatmentName");
return mapLabels;
}
private int getStringSimilarity(DamerauLevenshteinAlgorithm dla, String key1, String key2) {
return dla.execute(key1, key2);
}
public List<ZoomaEntity> transformMappingsForZooma() {
JsonNode mappingRow = utilityService.readJsonURL(knowledgBaseURL);
Map<String, List<Object>> dMappingRow = mapper.convertValue(mappingRow, Map.class);
List<ZoomaEntity> zoomaEntities = new ArrayList<>();
for (Object data : dMappingRow.get("row")) {
MappingEntity mappingEntity = mapper.convertValue(data, MappingEntity.class);
/* RETRIEVE DATA FROM MAPPING ENTITY */
Long entityId = mappingEntity.getEntityId();
String entityType = mappingEntity.getEntityType();
List<String> mappingLabels = mappingEntity.getMappingLabels();
Map<String, String> mappingValues = mappingEntity.getMappingValues();
String originTissue = mappingValues.get("OriginTissue");
String tumorType = mappingValues.get("TumorType");
String sampleDiagnosis = mappingValues.get("SampleDiagnosis");
String dataSource = mappingValues.get("DataSource");
String mappedTermLabel = mappingEntity.getMappedTermLabel();
String mappedTermUrl = mappingEntity.getMappedTermUrl();
String mapType = mappingEntity.getMapType();
String justification = mappingEntity.getJustification();
String status = mappingEntity.getStatus();
/* ZOOMA BIOLOGICAL-ENTITY DATA */
Studies studies = new Studies(dataSource.toUpperCase(), null);
String bioEntity = StringUtils.join(
Arrays.asList(dataSource, sampleDiagnosis, originTissue, tumorType), "__"
);
BiologicalEntities biologicalEntities = new BiologicalEntities(bioEntity.toUpperCase(), studies, null);
/* ZOOMA SEMANTIC-TAG DATA */
List<String> semanticTag = Arrays.asList(mappedTermUrl);
/* ZOOMA PROVENANCE DATA */
Source source = new Source(URI, NAME, TOPIC, TYPE);
Provenance provenance = new Provenance(
source,
EVIDENCE,
ACCURACY,
ANNOTATOR,
"2018-11-30 10:48"
);
for (String mappingLabel : mappingLabels) {
/* ZOOMA PROPERTY DATA */
Property property = new org.pdxfinder.services.zooma.Property(
mappingLabel,
StringUtils.upperCase(mappingValues.get(mappingLabel)));
List<String> annotations = new ArrayList<>();
if (mappingLabel.equals("SampleDiagnosis")) {
annotations = semanticTag;
}
ZoomaEntity zoomaEntity = new ZoomaEntity(
biologicalEntities,
property,
annotations,
provenance
);
zoomaEntities.add(zoomaEntity);
}
}
return zoomaEntities;
}
public void saveUnmappedTreatment(String dataSource, String treatment) {
List<String> mappingLabels = Arrays.asList("DataSource", "TreatmentName");
Map mappingValues = new HashMap();
mappingValues.put("DataSource", dataSource.toLowerCase());
mappingValues.put("TreatmentName", treatment);
MappingEntity mappingEntity = new MappingEntity(MappingEntityType.treatment.get(), mappingLabels, mappingValues);
saveUnmappedTerms(mappingEntity);
}
public void saveUnmappedDiagnosis(String dataSource, String diagnosis, String originTissue, String tumorType) {
ArrayList<String> mappingLabels = new ArrayList<>();
mappingLabels.add("DataSource");
mappingLabels.add("SampleDiagnosis");
mappingLabels.add("OriginTissue");
mappingLabels.add("TumorType");
Map mappingValues = new HashMap();
mappingValues.put("DataSource", dataSource.toLowerCase());
mappingValues.put("SampleDiagnosis", diagnosis);
mappingValues.put("OriginTissue", originTissue);
mappingValues.put("TumorType", tumorType);
MappingEntity mappingEntity = new MappingEntity(MappingEntityType.diagnosis.get(), mappingLabels, mappingValues);
saveUnmappedTerms(mappingEntity);
}
public void saveUnmappedTerms(MappingEntity mappingEntity) {
mappingEntity.setStatus(Status.unmapped.get());
mappingEntity.setMappedTermLabel("-");
mappingEntity.setDateCreated(new Date());
mappingEntity.setEntityType(mappingEntity.getEntityType().toLowerCase());
String mappingKey = mappingEntity.generateMappingKey();
mappingEntity.setMappingKey(mappingKey);
MappingEntity entity = mappingEntityRepository.findByMappingKey(mappingKey);
if (entity == null) {
mappingEntityRepository.save(mappingEntity);
log.info("UNMAPPED TERM WAS SAVED: {}", mappingEntity.generateMappingKey());
}
}
public void saveMappedTerms(List<MappingEntity> mappingEntities) {
for (MappingEntity mappingEntity : mappingEntities) {
mappingEntity.setEntityId(null);
mappingEntity.setStatus(Status.validated.get());
mappingEntity.setEntityType(mappingEntity.getEntityType().toLowerCase());
String mappingKey = mappingEntity.getMappingKey();
MappingEntity entity = mappingEntityRepository.findByMappingKey(mappingKey);
if (entity == null) {
mappingEntityRepository.save(mappingEntity);
log.warn("{} was SAVED ", mappingKey);
} else {
log.warn("{} was not NOT SAVED: found in the Database ", mappingKey);
}
}
}
public void purgeMappingDatabase(){
log.warn("Deleting H2 database and all its {} mapping data", mappingEntityRepository.findAll().size());
mappingEntityRepository.deleteAll();
}
public List<MappingEntity> loadMappingsFromFile(String jsonFile) {
String jsonKey = "mappings";
List<MappingEntity> mappingEntities = new ArrayList<>();
List<Map<String, Object>> mappings = utilityService.serializeJSONToMaps(jsonFile, jsonKey);
mappings.forEach(mapping -> {
MappingEntity mappingEntity = mapper.convertValue(mapping, MappingEntity.class);
mappingEntity.setMappingKey(mappingEntity.generateMappingKey());
mappingEntity.setDateCreated(new Date());
mappingEntities.add(mappingEntity);
});
return mappingEntities;
}
public void readArchive(String entityType) {
String jsonKey = "mappings";
String mappingDirectory = rootDir+"/mapping/backup/"+entityType;
utilityService.listAllFilesInADirectory(mappingDirectory);
}
public void writeMappingsToFile(String entityType) {
if (!containsSpecialCharacters(entityType)) throw new IllegalArgumentException("EntityType contains illegal characters");
else {
String jsonKey = "mappings";
String mappingFile = rootDir + "/mapping/" + entityType + "_mappings.json";
// Generate Unique name to back up previous mapping file
String baseDirectory = rootDir + "/mapping/backup/";
String backupPreviousMappingFile = baseDirectory + entityType + "/" +
(new Date()).toString().replaceAll(" ", "-") + "-" + entityType + "_mappings.json";
// Back up previous mapping file before replacement
utilityService.moveFile(mappingFile, backupPreviousMappingFile);
// Get Latest mapped terms from the data base
List<MappingEntity> mappingEntities = mappingEntityRepository.findByEntityTypeAndStatusIsNot(entityType, "unmapped");
Map dataMap = new HashMap();
dataMap.put(jsonKey, mappingEntities);
// Write latest mapped terms to the file system
try {
String newFile = mapper.writeValueAsString(dataMap);
utilityService.writeToFile(newFile, mappingFile, false);
} catch (JsonProcessingException e) {
}
}
}
private boolean containsSpecialCharacters(String input){
return input.matches("^[A-Za-z0-9-_]*$");
}
public PaginationDTO search(int page,
int size,
List<String> entityType,
String mappingLabel,
List<String> mappingValue,
String mappedTermLabel,
String mapType,
String mappedTermsOnly,
List<String> status) {
String sortColumn = "id";
Sort.Direction direction = getSortDirection("asc");
Pageable pageable = null;
// requested data size is either greater than zero or forced to 10
size = (size > 0) ? size : 10;
// requested page is either +ve or forced to default page
int start = (page > 0) ? page - 1 : 0;
Sort sort = Sort.by(direction, sortColumn);
pageable = PageRequest.of(start, size, sort);
Page<MappingEntity> mappingEntityPage = mappingEntityRepository.findByMultipleFilters(entityType, mappingLabel, mappingValue, mappedTermLabel, mapType, mappedTermsOnly, status, pageable);
List<MappingEntity> mappingEntityList = new ArrayList<>();
mappingEntityPage.forEach(mappingEntity -> {
//get suggestions for missing mapping
if (mappingEntity.getMappedTermLabel().equals("-")) {
mappingEntity
.setSuggestedMappings(getSuggestionsForUnmappedEntity(
mappingEntity,
getMappedEntitiesByType(mappingEntity.getEntityType())));
}
mappingEntityList.add(mappingEntity);
});
PaginationDTO paginationDto = paginationService.initializeDTO(mappingEntityPage);
paginationDto.setAdditionalProperty("mappings", mappingEntityList);
return paginationDto;
}
public Sort.Direction getSortDirection(String sortDir) {
Sort.Direction direction = Sort.Direction.ASC;
if (sortDir.equals("desc")) {
direction = Sort.Direction.DESC;
}
return direction;
}
public MappingContainer getMappedEntitiesByType(String entityType) {
List<MappingEntity> mappedEntities = mappingEntityRepository.findByEntityTypeAndMapTypeIsNotNull(entityType);
MappingContainer mc = new MappingContainer();
mappedEntities.forEach(mappedEntity -> {
mc.addEntity(mappedEntity);
});
return mc;
}
public List<Map> getMappingSummary(String entityType) {
List<Object[]> summary = mappingEntityRepository.findMissingMappingStat(entityType);
List<String> resultColumns = Arrays.asList("DataSource", "Unmapped", "Mapped", "Validated", "Created", "Orphaned");
List<Map> mappingSummary = utilityService.objectArrayListToMapList(summary, resultColumns);
return mappingSummary;
}
public MappingEntity getMappingEntityById(Integer entityId) {
Long id = Long.parseLong(String.valueOf(entityId));
MappingEntity mappingEntity = mappingEntityRepository.findByEntityId(id).get();
//Get suggestions only if mapped term is missing
MappingContainer mappingContainer = getMappedEntitiesByType(mappingEntity.getEntityType());
// Remove present mappingEntity from mappingContainer to be used for suggestion
mappingContainer.getMappings().remove(mappingEntity.getMappingKey());
mappingEntity
.setSuggestedMappings(getSuggestionsForUnmappedEntity(
mappingEntity,
mappingContainer));
return mappingEntity;
}
public boolean checkExistence(Long entityId) {
return mappingEntityRepository.existsByEntityId(entityId);
}
public Optional<MappingEntity> getByMappingKeyAndEntityId(String mappingKey, Long entityId) {
Optional<MappingEntity> mappingEntity = mappingEntityRepository
.findByMappingKeyAndEntityId(mappingKey, entityId);
return mappingEntity;
}
// Update Bulk List of Mapping Entity Records
public List<MappingEntity> updateRecords(List<MappingEntity> submittedEntities) {
List<MappingEntity> savedEntities = new ArrayList<>();
submittedEntities.forEach(newEntity -> {
MappingEntity updated = mappingEntityRepository.findByEntityId(newEntity.getEntityId())
.map(mappingEntity -> {
mappingEntity.setDateUpdated(new Date());
mappingEntity.setStatus(newEntity.getStatus());
mappingEntity.setMappedTermLabel(newEntity.getMappedTermLabel());
mappingEntity.setMappedTermUrl(newEntity.getMappedTermUrl());
mappingEntity.setMapType(newEntity.getMapType());
mappingEntity.setJustification(newEntity.getJustification());
return mappingEntityRepository.save(mappingEntity);
})
.orElseGet(() -> {
return newEntity;
});
savedEntities.add(updated);
});
/* WRITE updated mapped terms to the file system and backup old file */
writeMappingsToFile(submittedEntities.get(0).getEntityType());
return savedEntities;
}
@Cacheable("ontologies")
public List<OntologyTerm> getOntologyTermsByType(String type) {
return ontologyTermRepository.findByType(type);
}
public List<MappingEntity> processUploadedCSV(List<Map<String, String>> csvData) {
List<MappingEntity> savedEntities = new ArrayList<>();
csvData.forEach(eachData -> {
// Retrieve the entityId from the csv data
Long entityId = Long.parseLong(eachData.get(CSV.entityId.get()));
// Pull the data from h2 based on the entityId, update the data from csvData and save
MappingEntity updated = mappingEntityRepository.findByEntityId(entityId)
.map(mappingEntity -> {
/*
* Get Decision column of this csvData, if Yes, change Status to Validated
* If Decision column is NO, Pick content of ApprovedTerm column, replace maped term for that entity and set status as validated
*/
if (eachData.get(CSV.decision.get()).equalsIgnoreCase(CSV.no.get())){
mappingEntity.setMappedTermLabel(eachData.get(CSV.mappedTerm.get()));
mappingEntity.setMappedTermUrl(eachData.get(CSV.mappedTermUrl.get()));
}
mappingEntity.setDateUpdated(new Date());
mappingEntity.setStatus(Status.validated.get());
return mappingEntityRepository.save(mappingEntity);
})
.orElseGet(() -> {
return new MappingEntity();
});
savedEntities.add(updated);
});
return savedEntities;
}
public void setRootDir(String rootDir) {
this.rootDir = rootDir;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/v2/bigtable.proto
package com.google.bigtable.v2;
/**
*
*
* <pre>
* Request message for client connection keep-alive and warming.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.PingAndWarmRequest}
*/
public final class PingAndWarmRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.bigtable.v2.PingAndWarmRequest)
PingAndWarmRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use PingAndWarmRequest.newBuilder() to construct.
private PingAndWarmRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PingAndWarmRequest() {
name_ = "";
appProfileId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PingAndWarmRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private PingAndWarmRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
appProfileId_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.bigtable.v2.BigtableProto
.internal_static_google_bigtable_v2_PingAndWarmRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.BigtableProto
.internal_static_google_bigtable_v2_PingAndWarmRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.PingAndWarmRequest.class,
com.google.bigtable.v2.PingAndWarmRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int APP_PROFILE_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object appProfileId_;
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @return The appProfileId.
*/
@java.lang.Override
public java.lang.String getAppProfileId() {
java.lang.Object ref = appProfileId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
appProfileId_ = s;
return s;
}
}
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @return The bytes for appProfileId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAppProfileIdBytes() {
java.lang.Object ref = appProfileId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
appProfileId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(appProfileId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, appProfileId_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(appProfileId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, appProfileId_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.bigtable.v2.PingAndWarmRequest)) {
return super.equals(obj);
}
com.google.bigtable.v2.PingAndWarmRequest other =
(com.google.bigtable.v2.PingAndWarmRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getAppProfileId().equals(other.getAppProfileId())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + APP_PROFILE_ID_FIELD_NUMBER;
hash = (53 * hash) + getAppProfileId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.PingAndWarmRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.v2.PingAndWarmRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for client connection keep-alive and warming.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.PingAndWarmRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.bigtable.v2.PingAndWarmRequest)
com.google.bigtable.v2.PingAndWarmRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.bigtable.v2.BigtableProto
.internal_static_google_bigtable_v2_PingAndWarmRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.BigtableProto
.internal_static_google_bigtable_v2_PingAndWarmRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.PingAndWarmRequest.class,
com.google.bigtable.v2.PingAndWarmRequest.Builder.class);
}
// Construct using com.google.bigtable.v2.PingAndWarmRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
appProfileId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.bigtable.v2.BigtableProto
.internal_static_google_bigtable_v2_PingAndWarmRequest_descriptor;
}
@java.lang.Override
public com.google.bigtable.v2.PingAndWarmRequest getDefaultInstanceForType() {
return com.google.bigtable.v2.PingAndWarmRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.bigtable.v2.PingAndWarmRequest build() {
com.google.bigtable.v2.PingAndWarmRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.bigtable.v2.PingAndWarmRequest buildPartial() {
com.google.bigtable.v2.PingAndWarmRequest result =
new com.google.bigtable.v2.PingAndWarmRequest(this);
result.name_ = name_;
result.appProfileId_ = appProfileId_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.v2.PingAndWarmRequest) {
return mergeFrom((com.google.bigtable.v2.PingAndWarmRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.v2.PingAndWarmRequest other) {
if (other == com.google.bigtable.v2.PingAndWarmRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getAppProfileId().isEmpty()) {
appProfileId_ = other.appProfileId_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.v2.PingAndWarmRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.bigtable.v2.PingAndWarmRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The unique name of the instance to check permissions for as well as
* respond. Values are of the form `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object appProfileId_ = "";
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @return The appProfileId.
*/
public java.lang.String getAppProfileId() {
java.lang.Object ref = appProfileId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
appProfileId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @return The bytes for appProfileId.
*/
public com.google.protobuf.ByteString getAppProfileIdBytes() {
java.lang.Object ref = appProfileId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
appProfileId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @param value The appProfileId to set.
* @return This builder for chaining.
*/
public Builder setAppProfileId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
appProfileId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAppProfileId() {
appProfileId_ = getDefaultInstance().getAppProfileId();
onChanged();
return this;
}
/**
*
*
* <pre>
* This value specifies routing for replication. If not specified, the
* "default" application profile will be used.
* </pre>
*
* <code>string app_profile_id = 2;</code>
*
* @param value The bytes for appProfileId to set.
* @return This builder for chaining.
*/
public Builder setAppProfileIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
appProfileId_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.bigtable.v2.PingAndWarmRequest)
}
// @@protoc_insertion_point(class_scope:google.bigtable.v2.PingAndWarmRequest)
private static final com.google.bigtable.v2.PingAndWarmRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.v2.PingAndWarmRequest();
}
public static com.google.bigtable.v2.PingAndWarmRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PingAndWarmRequest> PARSER =
new com.google.protobuf.AbstractParser<PingAndWarmRequest>() {
@java.lang.Override
public PingAndWarmRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new PingAndWarmRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<PingAndWarmRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PingAndWarmRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.bigtable.v2.PingAndWarmRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.metric.v2;
import java.util.Arrays;
import java.util.Set;
import org.apache.poi.ss.formula.functions.T;
import org.deidentifier.arx.ARXConfiguration;
import org.deidentifier.arx.DataDefinition;
import org.deidentifier.arx.aggregates.HierarchyBuilder;
import org.deidentifier.arx.aggregates.HierarchyBuilderIntervalBased;
import org.deidentifier.arx.aggregates.HierarchyBuilderRedactionBased;
import org.deidentifier.arx.criteria.DPresence;
import org.deidentifier.arx.framework.check.groupify.HashGroupifyEntry;
import org.deidentifier.arx.framework.check.groupify.IHashGroupify;
import org.deidentifier.arx.framework.data.Data;
import org.deidentifier.arx.framework.data.GeneralizationHierarchy;
import org.deidentifier.arx.framework.lattice.Node;
import org.deidentifier.arx.metric.MetricConfiguration;
/**
* This class implements a variant of the Loss metric.
*
* @author Fabian Prasser
*/
public class MetricMDNMLoss extends AbstractMetricMultiDimensional {
/** SUID. */
private static final long serialVersionUID = -573670902335136600L;
/** Total number of tuples, depends on existence of research subset. */
private double tuples;
/** Domain shares for each dimension. */
private DomainShare[] shares;
/** Configuration factor. */
private final double gFactor;
/** Configuration factor. */
private final double gsFactor;
/** Configuration factor. */
private final double sFactor;
/**
* Default constructor which treats all transformation methods equally.
*/
public MetricMDNMLoss(){
this(0.5d, AggregateFunction.RANK);
}
/**
* Default constructor which treats all transformation methods equally.
*
* @param function
*/
public MetricMDNMLoss(AggregateFunction function){
this(0.5d, function);
}
/**
* A constructor that allows to define a factor weighting generalization and suppression.
*
* @param gsFactor A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
* @param function
*/
public MetricMDNMLoss(double gsFactor, AggregateFunction function){
super(false, false, function);
this.gsFactor = gsFactor;
this.sFactor = gsFactor < 0.5d ? 2d * gsFactor : 1d;
this.gFactor = gsFactor <= 0.5d ? 1d : 1d - 2d * (gsFactor - 0.5d);
}
/**
* Returns the configuration of this metric.
*
* @return
*/
public MetricConfiguration getConfiguration() {
return new MetricConfiguration(false, // monotonic
gsFactor, // gs-factor
false, // precomputed
0.0d, // precomputation threshold
this.getAggregateFunction() // aggregate function
);
}
/**
* Returns the factor used weight generalized values.
*
* @return
*/
public double getGeneralizationFactor() {
return gFactor;
}
/**
* Returns the factor weighting generalization and suppression.
*
* @return A factor [0,1] weighting generalization and suppression.
* The default value is 0.5, which means that generalization
* and suppression will be treated equally. A factor of 0
* will favor suppression, and a factor of 1 will favor
* generalization. The values in between can be used for
* balancing both methods.
*/
public double getGeneralizationSuppressionFactor() {
return gsFactor;
}
@Override
public String getName() {
return "Loss";
}
/**
* Returns the factor used to weight suppressed values.
*
* @return
*/
public double getSuppressionFactor() {
return sFactor;
}
@Override
public String toString() {
return "Loss ("+gsFactor+"/"+gFactor+"/"+sFactor+")";
}
@Override
protected ILMultiDimensionalWithBound getInformationLossInternal(Node node, HashGroupifyEntry entry) {
// Init
double[] result = new double[getDimensions()];
int dimensions = getDimensions();
int[] transformation = node.getTransformation();
// Compute
for (int dimension = 0; dimension < dimensions; dimension++) {
int value = entry.key[dimension];
int level = transformation[dimension];
result[dimension] = (double) entry.count / shares[dimension].getShare(value, level);
}
// Return
return new ILMultiDimensionalWithBound(super.createInformationLoss(result));
}
@Override
protected ILMultiDimensionalWithBound getInformationLossInternal(Node node, IHashGroupify g) {
// Prepare
int[] transformation = node.getTransformation();
int dimensions = transformation.length;
double[] result = new double[dimensions];
double[] bound = new double[dimensions];
// Compute NDS and lower bound
HashGroupifyEntry m = g.getFirstEntry();
while (m != null) {
if (m.count>0) {
for (int dimension=0; dimension<dimensions; dimension++){
int value = m.key[dimension];
int level = transformation[dimension];
double share = (double)m.count * shares[dimension].getShare(value, level);
result[dimension] += m.isNotOutlier ? share * gFactor :
(sFactor == 1d ? m.count : share + sFactor * ((double)m.count - share));
bound[dimension] += share * gFactor;
}
}
m = m.nextOrdered;
}
// Normalize
for (int dimension=0; dimension<dimensions; dimension++){
result[dimension] = normalize(result[dimension], dimension);
bound[dimension] = normalize(bound[dimension], dimension);
}
// Return information loss and lower bound
return new ILMultiDimensionalWithBound(super.createInformationLoss(result),
super.createInformationLoss(bound));
}
@Override
protected AbstractILMultiDimensional getLowerBoundInternal(Node node) {
return null;
}
@Override
protected AbstractILMultiDimensional getLowerBoundInternal(Node node,
IHashGroupify g) {
// Prepare
int[] transformation = node.getTransformation();
int dimensions = transformation.length;
double[] bound = new double[dimensions];
// Compute lower bound
HashGroupifyEntry m = g.getFirstEntry();
while (m != null) {
if (m.count>0) {
for (int dimension=0; dimension<dimensions; dimension++){
int value = m.key[dimension];
int level = transformation[dimension];
double share = (double)m.count * shares[dimension].getShare(value, level);
bound[dimension] += share * gFactor;
}
}
m = m.nextOrdered;
}
// Normalize
for (int dimension=0; dimension<dimensions; dimension++){
bound[dimension] = normalize(bound[dimension], dimension);
}
// Return
return super.createInformationLoss(bound);
}
/**
* For subclasses.
*
* @return
*/
protected DomainShare[] getShares(){
return this.shares;
}
@SuppressWarnings("unchecked")
@Override
protected void initializeInternal(final DataDefinition definition,
final Data input,
final GeneralizationHierarchy[] hierarchies,
final ARXConfiguration config) {
// Prepare weights
super.initializeInternal(definition, input, hierarchies, config);
// Compute domain shares
this.shares = new DomainShare[hierarchies.length];
for (int i=0; i<shares.length; i++) {
// Extract info
String attribute = input.getHeader()[i];
String[][] hierarchy = definition.getHierarchy(attribute);
HierarchyBuilder<?> builder = definition.getHierarchyBuilder(attribute);
// Create shares for redaction-based hierarchies
if ((builder instanceof HierarchyBuilderRedactionBased) &&
((HierarchyBuilderRedactionBased<?>)builder).isDomainPropertiesAvailable()){
shares[i] = new DomainShareRedaction((HierarchyBuilderRedactionBased<?>)builder);
// Create shares for interval-based hierarchies
} else if (builder instanceof HierarchyBuilderIntervalBased){
shares[i] = new DomainShareInterval<T>((HierarchyBuilderIntervalBased<T>)builder,
hierarchies[i].getArray(),
input.getDictionary().getMapping()[i]);
// Create fallback-shares for materialized hierarchies
} else {
shares[i] = new DomainShareMaterialized(hierarchy,
input.getDictionary().getMapping()[i],
hierarchies[i].getArray());
}
}
// Determine total number of tuples
this.tuples = input.getDataLength();
if (config.containsCriterion(DPresence.class)) {
Set<DPresence> criteria = config.getCriteria(DPresence.class);
if (criteria.size() > 1) {
throw new IllegalStateException("Only one d-presence criterion supported!");
}
this.tuples = criteria.iterator().next().getSubset().getArray().length;
}
// Min and max
double[] min = new double[shares.length];
Arrays.fill(min, 0d);
double[] max = new double[shares.length];
Arrays.fill(max, 1d);
super.setMin(min);
super.setMax(max);
}
/**
* Normalizes the aggregate.
*
* @param aggregate
* @param dimension
* @return
*/
protected double normalize(double aggregate, int dimension) {
double min = gFactor * tuples / shares[dimension].getDomainSize();
double max = tuples;
double result = (aggregate - min) / (max - min);
result = result >= 0d ? result : 0d;
return round(result);
}
}
| |
package org.docksidestage.compatible10x.dbflute.whitebox.bhv;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.dbflute.bhv.referrer.ConditionBeanSetupper;
import org.dbflute.bhv.referrer.ReferrerLoaderHandler;
import org.dbflute.cbean.result.ListResultBean;
import org.dbflute.cbean.scoping.OrQuery;
import org.dbflute.cbean.scoping.ScalarQuery;
import org.dbflute.cbean.scoping.SubQuery;
import org.dbflute.cbean.scoping.UnionQuery;
import org.dbflute.exception.SpecifyColumnTwoOrMoreColumnException;
import org.dbflute.exception.SpecifyColumnWithDerivedReferrerException;
import org.dbflute.exception.SpecifyDerivedReferrerTwoOrMoreException;
import org.dbflute.hook.CallbackContext;
import org.dbflute.hook.SqlLogHandler;
import org.dbflute.hook.SqlLogInfo;
import org.dbflute.util.DfTypeUtil;
import org.dbflute.util.Srl;
import org.docksidestage.compatible10x.dbflute.bsbhv.loader.LoaderOfMember;
import org.docksidestage.compatible10x.dbflute.bsentity.dbmeta.MemberServiceDbm;
import org.docksidestage.compatible10x.dbflute.bsentity.dbmeta.SummaryWithdrawalDbm;
import org.docksidestage.compatible10x.dbflute.cbean.MemberCB;
import org.docksidestage.compatible10x.dbflute.cbean.MemberLoginCB;
import org.docksidestage.compatible10x.dbflute.cbean.MemberServiceCB;
import org.docksidestage.compatible10x.dbflute.cbean.PurchaseCB;
import org.docksidestage.compatible10x.dbflute.cbean.SummaryWithdrawalCB;
import org.docksidestage.compatible10x.dbflute.exbhv.MemberBhv;
import org.docksidestage.compatible10x.dbflute.exbhv.MemberServiceBhv;
import org.docksidestage.compatible10x.dbflute.exbhv.SummaryWithdrawalBhv;
import org.docksidestage.compatible10x.dbflute.exentity.Member;
import org.docksidestage.compatible10x.dbflute.exentity.MemberLogin;
import org.docksidestage.compatible10x.unit.UnitContainerTestCase;
/**
* @author jflute
* @since 0.6.0 (2008/01/16 Wednesday)
*/
public class WxBhvScalarSelectBasicTest extends UnitContainerTestCase {
// ===================================================================================
// Attribute
// =========
private MemberBhv memberBhv;
private MemberServiceBhv memberServiceBhv;
private SummaryWithdrawalBhv summaryWithdrawalBhv;
// ===================================================================================
// Basic
// =====
public void test_ScalarSelect_max() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.specify().columnBirthdate();
cb.query().setMemberStatusCode_Equal_Formalized();
cb.query().setBirthdate_IsNotNull();
cb.query().addOrderBy_Birthdate_Desc();
cb.fetchFirst(1);
Date expected = memberBhv.selectEntityWithDeletedCheck(cb).getBirthdate();
// ## Act ##
memberBhv.scalarSelect(Date.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnBirthdate(); // *Point!
cb.query().setMemberStatusCode_Equal_Formalized();
}
}).alwaysPresent(birthdate -> {
/* ## Assert ## */
assertEquals(expected, birthdate);
});
}
// ===================================================================================
// (Unique) Count
// ==============
public void test_ScalarSelect_count() {
// ## Arrange ##
MemberCB cb = new MemberCB();
int countAll = memberBhv.selectCount(cb);
// ## Act ##
Integer scalarCount = memberBhv.scalarSelect(Integer.class).count(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberId();
}
});
// ## Assert ##
assertEquals(countAll, scalarCount);
}
// ===================================================================================
// Count Distinct
// ==============
public void test_ScalarSelect_countDistinct_basic() {
// ## Arrange ##
MemberCB cb = new MemberCB();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
HashSet<String> statusSet = new HashSet<String>();
for (Member member : memberList) {
statusSet.add(member.getMemberStatusCode());
}
// ## Act ##
Integer kindCount = memberBhv.scalarSelect(Integer.class).countDistinct(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberStatusCode();
}
});
// ## Assert ##
assertEquals(statusSet.size(), kindCount);
}
public void test_ScalarSelect_countDistinct_noHist() {
// ## Arrange ##
// ## Act ##
Integer kindCount = memberBhv.scalarSelect(Integer.class).countDistinct(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberStatusCode();
cb.query().setMemberName_Equal("no exist");
}
});
// ## Assert ##
assertEquals(0, kindCount);
}
// ===================================================================================
// DerivedReferrer
// ===============
public void test_ScalarSelect_DerivedReferrer_basic() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
subCB.query().setPaymentCompleteFlg_Equal_True();
subCB.query().setPurchasePrice_GreaterEqual(800);
}
}, Member.ALIAS_productKindCount, op -> op.coalesce(0));
cb.query().setMemberStatusCode_Equal_Formalized();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
Integer expected = 0;
for (Member member : memberList) {
Integer max = member.getProductKindCount();
log(member.getMemberName() + " = " + max);
expected = expected + member.getProductKindCount();
}
// ## Act ##
Integer sum = memberBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
subCB.query().setPaymentCompleteFlg_Equal_True();
subCB.query().setPurchasePrice_GreaterEqual(800);
}
}, null);
cb.query().setMemberStatusCode_Equal_Formalized();
}
}).get();
// ## Assert ##
log("sum = " + sum);
assertEquals(expected, sum);
}
public void test_ScalarSelect_DerivedReferrer_with_UnionQuery() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
subCB.query().setPaymentCompleteFlg_Equal_True();
subCB.query().setPurchasePrice_GreaterEqual(800);
}
}, Member.ALIAS_productKindCount, op -> op.coalesce(0));
cb.orScopeQuery(new OrQuery<MemberCB>() {
public void query(MemberCB orCB) {
orCB.query().setMemberStatusCode_Equal_Withdrawal();
orCB.query().setMemberName_PrefixSearch("S");
}
});
ListResultBean<Member> memberList = memberBhv.selectList(cb);
Integer expected = 0;
for (Member member : memberList) {
Integer max = member.getProductKindCount();
log(member.getMemberName() + " = " + max);
expected = expected + member.getProductKindCount();
}
// ## Act ##
Integer sum = memberBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
subCB.query().setPaymentCompleteFlg_Equal_True();
subCB.query().setPurchasePrice_GreaterEqual(800);
}
}, null);
cb.query().setMemberStatusCode_Equal_Withdrawal();
cb.union(new UnionQuery<MemberCB>() {
public void query(MemberCB unionCB) {
unionCB.query().setMemberName_PrefixSearch("S");
}
});
}
}).get();
// ## Assert ##
log("sum = " + sum);
}
// ===================================================================================
// UnionQuery
// ==========
public void test_ScalarSelect_with_UnionQuery_basic_sum() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberServiceAsOne();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
Integer expected = 0;
for (Member member : memberList) {
Integer pointCount = member.getMemberServiceAsOne().getServicePointCount();
log("pointCount = " + pointCount);
expected = expected + pointCount;
}
final Set<String> markSet = new HashSet<String>();
CallbackContext.setSqlLogHandlerOnThread(new SqlLogHandler() {
public void handle(SqlLogInfo info) {
MemberServiceDbm dbm = MemberServiceDbm.getInstance();
String displaySql = info.getDisplaySql();
assertTrue(Srl.contains(displaySql, dbm.columnMemberServiceId().getColumnDbName()));
assertTrue(Srl.contains(displaySql, dbm.columnServicePointCount().getColumnDbName()));
assertFalse(Srl.contains(displaySql, dbm.columnServiceRankCode().getColumnDbName()));
markSet.add("handle");
}
});
// ## Act ##
try {
Integer sum = memberServiceBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberServiceCB>() {
public void query(MemberServiceCB cb) {
cb.specify().columnServicePointCount();
cb.union(new UnionQuery<MemberServiceCB>() {
public void query(MemberServiceCB unionCB) {
}
});
}
}).get();
// ## Assert ##
log("sum = " + sum);
assertEquals(expected, sum); // should be selected uniquely
assertTrue(markSet.contains("handle"));
} finally {
CallbackContext.clearSqlLogHandlerOnThread();
}
}
public void test_ScalarSelect_with_UnionQuery_PrimaryKey_sum() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberServiceAsOne();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
Integer expected = 0;
for (Member member : memberList) {
Integer pointCount = member.getMemberServiceAsOne().getServicePointCount();
log("pointCount = " + pointCount);
expected = expected + pointCount;
}
final Set<String> markSet = new HashSet<String>();
CallbackContext.setSqlLogHandlerOnThread(new SqlLogHandler() {
public void handle(SqlLogInfo info) {
MemberServiceDbm dbm = MemberServiceDbm.getInstance();
String displaySql = info.getDisplaySql();
assertTrue(Srl.contains(displaySql, dbm.columnMemberServiceId().getColumnDbName()));
assertFalse(Srl.contains(displaySql, dbm.columnServicePointCount().getColumnDbName()));
assertFalse(Srl.contains(displaySql, dbm.columnServiceRankCode().getColumnDbName()));
markSet.add("handle");
}
});
// ## Act ##
try {
Integer sum = memberServiceBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberServiceCB>() {
public void query(MemberServiceCB cb) {
cb.specify().columnMemberServiceId();
cb.union(new UnionQuery<MemberServiceCB>() {
public void query(MemberServiceCB unionCB) {
}
});
}
}).get();
// ## Assert ##
log("sum = " + sum);
assertTrue(markSet.contains("handle"));
} finally {
CallbackContext.clearSqlLogHandlerOnThread();
}
}
public void test_ScalarSelect_with_UnionQuery_noPrimaryKey_sum() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
}
}, Member.ALIAS_highestPurchasePrice);
cb.query().setMemberStatusCode_Equal_Withdrawal();
cb.query().addSpecifiedDerivedOrderBy_Desc(Member.ALIAS_highestPurchasePrice);
ListResultBean<Member> memberList = memberBhv.selectList(cb);
Integer expected = 0;
for (Member member : memberList) {
Integer maxPurchasePrice = member.getHighestPurchasePrice();
log("maxPurchasePrice = " + maxPurchasePrice);
expected = expected + maxPurchasePrice;
}
final Set<String> markSet = new HashSet<String>();
CallbackContext.setSqlLogHandlerOnThread(new SqlLogHandler() {
public void handle(SqlLogInfo info) {
String displaySql = info.getDisplaySql();
SummaryWithdrawalDbm dbm = SummaryWithdrawalDbm.getInstance();
assertTrue(Srl.contains(displaySql, dbm.columnMaxPurchasePrice().getColumnDbName()));
assertTrue(Srl.contains(displaySql, dbm.columnWithdrawalDatetime().getColumnDbName()));
markSet.add("handle");
}
});
// ## Act ##
try {
Integer sum = summaryWithdrawalBhv.scalarSelect(Integer.class).sum(new ScalarQuery<SummaryWithdrawalCB>() {
public void query(SummaryWithdrawalCB cb) {
cb.specify().columnMaxPurchasePrice();
cb.union(new UnionQuery<SummaryWithdrawalCB>() {
public void query(SummaryWithdrawalCB unionCB) {
}
});
}
}).get();
// ## Assert ##
log("sum = " + sum);
assertEquals(expected, sum); // should be selected uniquely
assertTrue(markSet.contains("handle"));
} finally {
CallbackContext.clearSqlLogHandlerOnThread();
}
}
// ===================================================================================
// Relation Column
// ===============
public void test_ScalarSelect_relation_basic() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberStatus();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
int expected = 0;
for (Member member : memberList) {
expected = expected + member.getMemberStatus().getDisplayOrder();
}
// ## Act ##
Integer sum = memberBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().specifyMemberStatus().columnDisplayOrder();
}
}).get();
// ## Assert ##
assertEquals(expected, sum);
}
public void test_ScalarSelect_relation_union() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberStatus();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
int expected = 0;
for (Member member : memberList) {
expected = expected + member.getMemberStatus().getDisplayOrder();
}
// ## Act ##
Integer sum = memberBhv.scalarSelect(Integer.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().specifyMemberStatus().columnDisplayOrder();
cb.union(new UnionQuery<MemberCB>() {
public void query(MemberCB unionCB) {
}
});
}
}).get();
// ## Assert ##
assertEquals(expected, sum);
}
public void test_ScalarSelect_relation_DerivedReferrer_basic() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberStatus();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
memberBhv.load(memberList, new ReferrerLoaderHandler<LoaderOfMember>() {
public void handle(LoaderOfMember loader) {
loader.pulloutMemberStatus().loadMemberLoginList(new ConditionBeanSetupper<MemberLoginCB>() {
public void setup(MemberLoginCB refCB) {
refCB.query().addOrderBy_MemberLoginId_Desc();
}
});
}
});
Long expected = 0L;
for (Member member : memberList) {
List<MemberLogin> loginList = member.getMemberStatus().getMemberLoginList();
long currentId = !loginList.isEmpty() ? loginList.get(0).getMemberLoginId() : 0;
expected = expected + currentId;
}
final Set<String> sqlSet = new HashSet<String>();
CallbackContext.setSqlLogHandlerOnThread(new SqlLogHandler() {
public void handle(SqlLogInfo info) {
sqlSet.add(info.getDisplaySql());
}
});
// ## Act ##
try {
Long sum = memberBhv.scalarSelect(Long.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().specifyMemberStatus().derivedMemberLoginList().max(new SubQuery<MemberLoginCB>() {
public void query(MemberLoginCB subCB) {
subCB.specify().columnMemberLoginId();
}
}, null);
}
}).get();
// ## Assert ##
assertEquals(expected, sum);
String sql = sqlSet.iterator().next();
assertContains(sql, "select sum((select max(sub1loc.MEMBER_LOGIN_ID)");
} finally {
CallbackContext.clearSqlLogHandlerOnThread();
}
}
public void test_ScalarSelect_relation_DerivedReferrer_union() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.setupSelect_MemberStatus();
ListResultBean<Member> memberList = memberBhv.selectList(cb);
memberBhv.load(memberList, new ReferrerLoaderHandler<LoaderOfMember>() {
public void handle(LoaderOfMember loader) {
loader.pulloutMemberStatus().loadMemberLoginList(new ConditionBeanSetupper<MemberLoginCB>() {
public void setup(MemberLoginCB refCB) {
refCB.query().addOrderBy_MemberLoginId_Desc();
}
});
}
});
Long expected = 0L;
for (Member member : memberList) {
List<MemberLogin> loginList = member.getMemberStatus().getMemberLoginList();
long currentId = !loginList.isEmpty() ? loginList.get(0).getMemberLoginId() : 0;
expected = expected + currentId;
}
final Set<String> sqlSet = new HashSet<String>();
CallbackContext.setSqlLogHandlerOnThread(new SqlLogHandler() {
public void handle(SqlLogInfo info) {
sqlSet.add(info.getDisplaySql());
}
});
// ## Act ##
try {
Long sum = memberBhv.scalarSelect(Long.class).sum(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().specifyMemberStatus().derivedMemberLoginList().max(new SubQuery<MemberLoginCB>() {
public void query(MemberLoginCB subCB) {
subCB.specify().columnMemberLoginId();
subCB.query().setMobileLoginFlg_Equal_True();
subCB.unionAll(new UnionQuery<MemberLoginCB>() {
public void query(MemberLoginCB unionCB) {
unionCB.query().setMobileLoginFlg_Equal_False();
}
});
}
}, null);
}
}).get();
// ## Assert ##
assertEquals(expected, sum);
String sql = sqlSet.iterator().next();
assertContains(sql, "select sum((select max(sub1main.MEMBER_LOGIN_ID)");
assertContains(sql, "union all ");
assertContains(sql, "from (select sub1loc.LOGIN_MEMBER_STATUS_CODE, sub1loc.MEMBER_LOGIN_ID");
} finally {
CallbackContext.clearSqlLogHandlerOnThread();
}
}
// ===================================================================================
// Illegal
// =======
public void test_ScalarSelect_duplicated_basic() {
// ## Arrange ##
// ## Act ##
try {
memberBhv.scalarSelect(Date.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberAccount();
cb.specify().columnBirthdate();
}
});
// ## Assert ##
fail();
} catch (SpecifyColumnTwoOrMoreColumnException e) {
// OK
log(e.getMessage());
}
}
public void test_ScalarSelect_duplicated_both() {
// ## Arrange ##
// ## Act ##
try {
memberBhv.scalarSelect(Date.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberAccount();
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchaseCount();
}
}, null);
}
});
// ## Assert ##
fail();
} catch (SpecifyColumnWithDerivedReferrerException e) {
// OK
log(e.getMessage());
}
}
public void test_ScalarSelect_duplicated_DerivedReferrer() {
// ## Arrange ##
// ## Act ##
try {
memberBhv.scalarSelect(Date.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchaseCount();
}
}, null);
cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
}
}, null);
}
});
// ## Assert ##
fail();
} catch (SpecifyDerivedReferrerTwoOrMoreException e) {
// OK
log(e.getMessage());
}
}
// ===================================================================================
// Option
// ======
public void test_ScalarSelect_option_basic() {
// ## Arrange ##
int coalesce = 7849238;
// ## Act ##
Integer max = memberBhv.scalarSelect(Integer.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnMemberId();
cb.query().setMemberStatusCode_Equal_Formalized();
cb.query().setMemberName_Equal("no exist");
}
}, op -> op.coalesce(coalesce)).get();
// ## Assert ##
assertEquals(Integer.valueOf(coalesce), max);
}
public void test_ScalarSelect_option_date() {
// ## Arrange ##
String coalesce = "2011-12-12";
// ## Act ##
Date birthdate = memberBhv.scalarSelect(Date.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().columnBirthdate();
cb.query().setMemberStatusCode_Equal_Formalized();
cb.query().setMemberName_Equal("no exist");
}
}, op -> op.coalesce(coalesce)).get();
// ## Assert ##
assertEquals(coalesce, DfTypeUtil.toString(birthdate, "yyyy-MM-dd"));
}
public void test_ScalarSelect_option_DerivedReferrer_basic() {
// ## Arrange ##
int coalesce = 7849238;
// ## Act ##
Integer max = memberBhv.scalarSelect(Integer.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().derivedPurchaseList().avg(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
}
}, null);
cb.query().setMemberStatusCode_Equal_Formalized();
cb.query().setMemberName_Equal("no exist");
}
}, op -> op.coalesce(coalesce)).get();
// ## Assert ##
assertEquals(Integer.valueOf(coalesce), max);
}
public void test_ScalarSelect_option_DerivedReferrer_severalFunction() {
// ## Arrange ##
int coalesce = 7849238;
// ## Act ##
Integer max = memberBhv.scalarSelect(Integer.class).max(new ScalarQuery<MemberCB>() {
public void query(MemberCB cb) {
cb.specify().derivedPurchaseList().avg(new SubQuery<PurchaseCB>() {
public void query(PurchaseCB subCB) {
subCB.specify().columnPurchasePrice();
}
}, null);
cb.query().setMemberStatusCode_Equal_Formalized();
cb.query().setMemberName_Equal("no exist");
}
}, op -> op.coalesce(coalesce).round(2)).get();
// ## Assert ##
assertEquals(Integer.valueOf(coalesce), max);
}
}
| |
package gov.nih.nci.evs.testUtil.ui;
import java.io.*;
import java.net.*;
import java.util.*;
/**
* <!-- LICENSE_TEXT_START -->
* Copyright 2008-2015 NGIT. This software was developed in conjunction
* with the National Cancer Institute, and so to the extent government
* employees are co-authors, any rights in such works shall be subject
* to Title 17 of the United States Code, section 105.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the disclaimer of Article 3,
* below. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 2. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by NGIT and the National
* Cancer Institute." If no such end-user documentation is to be
* included, this acknowledgment shall appear in the software itself,
* wherever such third-party acknowledgments normally appear.
* 3. The names "The National Cancer Institute", "NCI" and "NGIT" must
* not be used to endorse or promote products derived from this software.
* 4. This license does not authorize the incorporation of this software
* into any third party proprietary programs. This license does not
* authorize the recipient to use any trademarks owned by either NCI
* or NGIT
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE
* DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE,
* NGIT, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
/**
* @author EVS Team
* @version 1.0
*
* Modification history:
* Initial implementation kim.ong@ngc.com
*
*/
public class TestCaseData
{
// Variable declaration
private int searchType;
private String scheme;
private String version;
private String codingSchemeName;
private String codingSchemeURI;
private String namespace;
private String target;
private String algorithm;
private String matchText;
private String propertyName;
private String relationshipName;
private String source;
private String rela;
private String direction;
// Default constructor
public TestCaseData() {
}
// Constructor
public TestCaseData(
int searchType,
String scheme,
String version,
String codingSchemeName,
String codingSchemeURI,
String namespace,
String target,
String algorithm,
String matchText,
String propertyName,
String relationshipName,
String source,
String rela,
String direction) {
this.searchType = searchType;
this.scheme = scheme;
this.version = version;
this.codingSchemeName = codingSchemeName;
this.codingSchemeURI = codingSchemeURI;
this.namespace = namespace;
this.target = target;
this.algorithm = algorithm;
this.matchText = matchText;
this.propertyName = propertyName;
this.relationshipName = relationshipName;
this.source = source;
this.rela = rela;
this.direction = direction;
}
// Set methods
public void setSearchType(int searchType) {
this.searchType = searchType;
}
public void setScheme(String scheme) {
this.scheme = scheme;
}
public void setVersion(String version) {
this.version = version;
}
public void setCodingSchemeName(String codingSchemeName) {
this.codingSchemeName = codingSchemeName;
}
public void setCodingSchemeURI(String codingSchemeURI) {
this.codingSchemeURI = codingSchemeURI;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public void setTarget(String target) {
this.target = target;
}
public void setAlgorithm(String algorithm) {
this.algorithm = algorithm;
}
public void setMatchText(String matchText) {
this.matchText = matchText;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public void setRelationshipName(String relationshipName) {
this.relationshipName = relationshipName;
}
public void setSource(String source) {
this.source = source;
}
public void setRela(String rela) {
this.rela = rela;
}
public void setDirection(String direction) {
this.direction = direction;
}
// Get methods
public int getSearchType() {
return this.searchType;
}
public String getScheme() {
return this.scheme;
}
public String getVersion() {
return this.version;
}
public String getCodingSchemeName() {
return this.codingSchemeName;
}
public String getCodingSchemeURI() {
return this.codingSchemeURI;
}
public String getNamespace() {
return this.namespace;
}
public String getTarget() {
return this.target;
}
public String getAlgorithm() {
return this.algorithm;
}
public String getMatchText() {
return this.matchText;
}
public String getPropertyName() {
return this.propertyName;
}
public String getRelationshipName() {
return this.relationshipName;
}
public String getSource() {
return this.source;
}
public String getRela() {
return this.rela;
}
public String getDirection() {
return this.direction;
}
public TestCase createTestCase(
int testNumber,
String methodName,
Integer int_obj,
String browserLink) {
String validationMethod = null;
String matchedString = null;
boolean assertion = true;
return new TestCase(
testNumber,
searchType,
methodName,
browserLink,
this.scheme,
this.codingSchemeName,
this.codingSchemeURI,
this.version,
this.namespace,
this.target,
this.algorithm,
validationMethod,
this.matchText,
this.propertyName,
this.relationshipName,
this.source,
this.rela,
this.direction,
null,
matchedString,
assertion);
}
}
| |
package org.diorite.impl.inventory;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.diorite.impl.connection.packets.play.client.PacketPlayClientWindowClick;
import org.diorite.impl.connection.packets.play.server.PacketPlayServerSetSlot;
import org.diorite.impl.connection.packets.play.server.PacketPlayServerWindowItems;
import org.diorite.impl.entity.PlayerImpl;
import org.diorite.impl.inventory.item.ItemStackImpl;
import org.diorite.impl.inventory.item.ItemStackImplArray;
import org.diorite.entity.Player;
import org.diorite.inventory.InventoryType;
import org.diorite.inventory.PlayerInventory;
import org.diorite.inventory.item.BaseItemStack;
import org.diorite.inventory.item.ItemStack;
import org.diorite.inventory.slot.Slot;
import org.diorite.material.Material;
import org.diorite.utils.DioriteUtils;
public class PlayerInventoryImpl extends InventoryImpl<PlayerImpl> implements PlayerInventory
{
private static final short CURSOR_SLOT = - 1;
private static final int CURSOR_WINDOW = - 1;
private final int windowId;
private final PlayerImpl holder;
private final DragControllerImpl drag = new DragControllerImpl();
private final ItemStackImplArray content = ItemStackImplArray.create(InventoryType.PLAYER.getSize());
private final Slot[] slots = new Slot[InventoryType.PLAYER.getSize()];
private final AtomicReference<ItemStackImpl> cursorItem = new AtomicReference<>();
private boolean wasCursorNotNull; // used only by softUpdate
{
int i = 0;
this.slots[i] = Slot.BASE_RESULT_SLOT;
Arrays.fill(this.slots, 1, i + InventoryType.PLAYER_CRAFTING.getSize(), Slot.BASE_CRAFTING_SLOT);
i += InventoryType.PLAYER_CRAFTING.getSize();
Arrays.fill(this.slots, i, (i + InventoryType.PLAYER_ARMOR.getSize()) - 1, Slot.BASE_ARMOR_SLOT);
i += InventoryType.PLAYER_ARMOR.getSize() - 1;
Arrays.fill(this.slots, i, (i + InventoryType.PLAYER_EQ.getSize()), Slot.BASE_CONTAINER_SLOT);
i += InventoryType.PLAYER_EQ.getSize();
Arrays.fill(this.slots, i, (i + InventoryType.PLAYER_HOTBAR.getSize()), Slot.BASE_HOTBAR_SLOT);
}
public PlayerInventoryImpl(final PlayerImpl holder, final int windowId)
{
super(holder);
this.windowId = windowId;
this.holder = holder;
if (windowId == 0) // Owner of inventory always must be in viewers to be able to update
{
this.viewers.add(holder);
}
}
@Override
public int firstEmpty()
{
int i = this.hotbar.firstEmpty();
int offset = this.hotbar.getSlotOffset();
if (i == - 1)
{
i = this.eq.firstEmpty();
if (i == - 1)
{
return - 1;
}
offset = this.eq.getSlotOffset();
}
return offset + i;
}
@Override
public Slot getSlot(final int slot)
{
return (slot == PacketPlayClientWindowClick.SLOT_NOT_NEEDED) ? Slot.BASE_OUTSIDE_SLOT : this.slots[slot];
}
@Override
public int first(final Material material)
{
int i = this.hotbar.first(material);
int offset = this.hotbar.getSlotOffset();
if (i == - 1)
{
i = this.eq.first(material);
if (i == - 1)
{
return - 1;
}
offset = this.eq.getSlotOffset();
}
return offset + i;
}
@Override
public int first(final ItemStack item, final boolean withAmount)
{
int i = this.hotbar.first(item, withAmount);
int offset = this.hotbar.getSlotOffset();
if (i == - 1)
{
i = this.eq.first(item, withAmount);
if (i == - 1)
{
return - 1;
}
offset = this.eq.getSlotOffset();
}
return offset + i;
}
@Override
public int first(final ItemStack item, final int startIndex, final boolean withAmount)
{
int offset = this.hotbar.getSlotOffset();
int start = (startIndex >= offset) ? (startIndex - offset) : startIndex;
int i = (start >= this.hotbar.size()) ? - 1 : this.hotbar.first(item, start, withAmount);
if (i == - 1)
{
offset += this.eq.getSlotOffset();
start = (startIndex >= offset) ? (startIndex - offset) : (startIndex);
i = (start >= offset) ? - 1 : this.eq.first(item, start - this.eq.getSlotOffset(), withAmount);
if ((i >= this.eq.size()) || (i == - 1))
{
return - 1;
}
i += this.eq.getSlotOffset();
return i;
}
return offset + i;
}
@Override
public ItemStack[] add(final ItemStack... items)
{
Validate.noNullElements(items, "Item cannot be null");
final ItemStack[] leftover = new ItemStack[items.length];
boolean fully = true;
for (int i = 0; i < items.length; i++)
{
final ItemStack item = items[i];
int firstPartial = - 1;
while (true)
{
if (firstPartial != - 2)
{
firstPartial = this.first(item, firstPartial + 1, false);
}
if ((firstPartial == - 1) || (firstPartial == - 2))
{
final int firstFree = this.firstEmpty();
if (firstFree == - 1)
{
leftover[i] = item;
fully = false;
break;
}
if (item.getAmount() > item.getMaterial().getMaxStack())
{
final ItemStack stack = new BaseItemStack(item);
stack.setAmount(item.getMaterial().getMaxStack());
this.setItem(firstFree, stack);
item.setAmount(item.getAmount() - item.getMaterial().getMaxStack());
}
else
{
this.setItem(firstFree, item);
break;
}
}
else
{
final ItemStack itemStack = this.getItem(firstPartial);
if (itemStack.getAmount() >= itemStack.getMaterial().getMaxStack())
{
if (firstPartial == (this.fullEq.size() - 1))
{
firstPartial = - 2;
}
continue;
}
final int amount = item.getAmount();
final int partialAmount = itemStack.getAmount();
final int maxAmount = itemStack.getMaterial().getMaxStack();
if ((amount + partialAmount) <= maxAmount)
{
itemStack.setAmount(amount + partialAmount);
break;
}
itemStack.setAmount(maxAmount);
item.setAmount((amount + partialAmount) - maxAmount);
if (firstPartial == (this.fullEq.size() - 1))
{
firstPartial = - 2;
}
}
}
}
return fully ? DioriteUtils.EMPTY_ITEM_STACK : leftover;
}
@Override
public ItemStack getCursorItem()
{
return this.cursorItem.get();
}
@Override
public ItemStackImpl setCursorItem(final ItemStack cursorItem)
{
return this.cursorItem.getAndSet(ItemStackImpl.wrap(cursorItem));
}
@Override
public DragControllerImpl getDragController()
{
return this.drag;
}
@Override
public boolean atomicReplaceCursorItem(final ItemStack excepted, final ItemStack cursorItem) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return this.cursorItem.compareAndSet((ItemStackImpl) excepted, ItemStackImpl.wrap(cursorItem));
}
@Override
public ItemStack getHelmet()
{
return this.content.get(5);
}
@Override
public ItemStack getChestplate()
{
return this.content.get(6);
}
@Override
public ItemStack getLeggings()
{
return this.content.get(7);
}
@Override
public ItemStack getBoots()
{
return this.content.get(8);
}
@Override
public ItemStack setHelmet(final ItemStack helmet)
{
return this.content.getAndSet(5, ItemStackImpl.wrap(helmet));
}
@Override
public ItemStack setChestplate(final ItemStack chestplate)
{
return this.content.getAndSet(6, ItemStackImpl.wrap(chestplate));
}
@Override
public ItemStack setLeggings(final ItemStack leggings)
{
return this.content.getAndSet(7, ItemStackImpl.wrap(leggings));
}
@Override
public ItemStack setBoots(final ItemStack boots)
{
return this.content.getAndSet(8, ItemStackImpl.wrap(boots));
}
@Override
public boolean replaceHelmet(final ItemStack excepted, final ItemStack helmet) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return this.content.compareAndSet(5, (ItemStackImpl) excepted, ItemStackImpl.wrap(helmet));
}
@Override
public boolean replaceChestplate(final ItemStack excepted, final ItemStack chestplate) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return this.content.compareAndSet(6, (ItemStackImpl) excepted, ItemStackImpl.wrap(chestplate));
}
@Override
public boolean replaceLeggings(final ItemStack excepted, final ItemStack leggings) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return this.content.compareAndSet(7, (ItemStackImpl) excepted, ItemStackImpl.wrap(leggings));
}
@Override
public boolean replaceBoots(final ItemStack excepted, final ItemStack boots) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return this.content.compareAndSet(8, (ItemStackImpl) excepted, ItemStackImpl.wrap(boots));
}
@Override
public ItemStack getItemInHand()
{
if (this.holder == null)
{
return null;
}
return this.hotbar.getArray().get(this.holder.getHeldItemSlot());
}
@Override
public ItemStack setItemInHand(final ItemStack stack)
{
if (this.holder == null)
{
return null;
}
final int i = this.holder.getHeldItemSlot();
return this.content.getAndSet(i, ItemStackImpl.wrap(stack));
}
@Override
public boolean replaceItemInHand(final ItemStack excepted, final ItemStack stack) throws IllegalArgumentException
{
ItemStackImpl.validate(excepted);
return (this.holder != null) && this.content.compareAndSet(this.holder.getHeldItemSlot(), (ItemStackImpl) excepted, ItemStackImpl.wrap(stack));
}
@Override
public int getHeldItemSlot()
{
if (this.holder == null)
{
return - 1;
}
return this.holder.getHeldItemSlot();
}
@Override
public void setHeldItemSlot(final int slot)
{
if (this.holder == null)
{
return;
}
this.holder.setHeldItemSlot(slot);
}
@Override
public ItemStack getResult()
{
return this.content.get(0);
}
@Override
public ItemStack setResult(final ItemStack result)
{
return this.content.getAndSet(0, ItemStackImpl.wrap(result));
}
@Override
public boolean replaceResult(final ItemStack excepted, final ItemStack result)
{
ItemStackImpl.validate(excepted);
return this.content.compareAndSet(0, (ItemStackImpl) excepted, ItemStackImpl.wrap(result));
}
@Override
public ItemStack[] getCraftingSlots()
{
return this.content.getSubArray(1).toArray(new ItemStack[this.content.length() - 1]);
}
@Override
public ItemStackImplArray getArray()
{
return this.content;
}
@Override
public void update(final Player player) throws IllegalArgumentException
{
if (! this.viewers.contains(player))
{
throw new IllegalArgumentException("Player must be a viewer of inventory.");
}
((PlayerImpl) player).getNetworkManager().sendPacket(new PacketPlayServerWindowItems(this.windowId, this.content));
}
@Override
public void softUpdate()
{
ItemStackImpl cursor = this.cursorItem.get();
if ((this.wasCursorNotNull && (cursor == null)) || ((cursor != null) && cursor.isDirty()))
{
if (cursor != null)
{
if ((cursor.getAmount() == 0) || (Material.AIR.simpleEquals(cursor.getMaterial())))
{
this.atomicReplaceCursorItem(cursor, null);
cursor = null;
}
else
{
cursor.setClean();
this.wasCursorNotNull = true;
}
}
this.holder.getNetworkManager().sendPacket(new PacketPlayServerSetSlot(CURSOR_WINDOW, CURSOR_SLOT, cursor));
}
super.softUpdate();
}
@Override
public void update()
{
this.viewers.forEach(this::update);
}
@Override
public Player getEquipmentHolder()
{
return this.holder;
}
@Override
public PlayerImpl getHolder()
{
return this.holder;
}
@Override
public String toString()
{
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).appendSuper(super.toString()).append("holder", this.holder).append("content", this.content).toString();
}
private final PlayerArmorInventoryImpl armor = new PlayerArmorInventoryImpl(this);
private final PlayerCraftingInventoryImpl crafting = new PlayerCraftingInventoryImpl(this);
private final PlayerFullEqInventoryImpl fullEq = new PlayerFullEqInventoryImpl(this);
private final PlayerEqInventoryImpl eq = new PlayerEqInventoryImpl(this);
private final PlayerHotbarInventoryImpl hotbar = new PlayerHotbarInventoryImpl(this);
@Override
public PlayerArmorInventoryImpl getArmorInventory()
{
return this.armor;
}
@Override
public PlayerCraftingInventoryImpl getCraftingInventory()
{
return this.crafting;
}
@Override
public PlayerFullEqInventoryImpl getFullEqInventory()
{
return this.fullEq;
}
@Override
public PlayerEqInventoryImpl getEqInventory()
{
return this.eq;
}
@Override
public PlayerHotbarInventoryImpl getHotbarInventory()
{
return this.hotbar;
}
@Override
public int getWindowId()
{
return this.windowId;
}
@Override
public int getSlotOffset()
{
return 0;
}
}
| |
/*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.client.channel;
import java.lang.reflect.Field;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import org.junit.Assert;
import org.junit.Test;
import org.kaaproject.kaa.client.AbstractKaaClient;
import org.kaaproject.kaa.client.channel.impl.channels.DefaultOperationsChannel;
import org.kaaproject.kaa.client.persistence.KaaClientState;
import org.kaaproject.kaa.client.transport.AbstractHttpClient;
import org.kaaproject.kaa.common.TransportType;
import org.kaaproject.kaa.common.endpoint.security.KeyUtil;
import org.kaaproject.kaa.common.endpoint.security.MessageEncoderDecoder;
import org.mockito.Mockito;
public class DefaultOperationsChannelTest {
private static final Map<TransportType, ChannelDirection> SUPPORTED_TYPES = new HashMap<TransportType, ChannelDirection>();
static {
SUPPORTED_TYPES.put(TransportType.PROFILE, ChannelDirection.BIDIRECTIONAL);
SUPPORTED_TYPES.put(TransportType.CONFIGURATION, ChannelDirection.BIDIRECTIONAL);
SUPPORTED_TYPES.put(TransportType.NOTIFICATION, ChannelDirection.BIDIRECTIONAL);
SUPPORTED_TYPES.put(TransportType.USER, ChannelDirection.BIDIRECTIONAL);
SUPPORTED_TYPES.put(TransportType.EVENT, ChannelDirection.DOWN);
}
public ScheduledExecutorService fakeExecutor = new FakeExecutorService();
class DefaultOperationsChannelFake extends DefaultOperationsChannel {
private final int wantedNumberOfInvocations;
private KaaDataDemultiplexer demultiplexer;
private KaaDataMultiplexer multiplexer;
public DefaultOperationsChannelFake(AbstractKaaClient client,
KaaClientState state, int wantedNumberOfInvocations) {
super(client, state);
this.wantedNumberOfInvocations = wantedNumberOfInvocations;
}
@Override
protected ScheduledExecutorService createExecutor() {
super.createExecutor();
return fakeExecutor;
}
@Override
public void setDemultiplexer(KaaDataDemultiplexer demultiplexer) {
this.demultiplexer = demultiplexer;
super.setDemultiplexer(demultiplexer);
}
@Override
public void setMultiplexer(KaaDataMultiplexer multiplexer) {
this.multiplexer = multiplexer;
super.setMultiplexer(multiplexer);
}
@Override
public void onResponse(byte [] response) {
super.onResponse(response);
try {
Field field = DefaultOperationsChannel.class.getDeclaredField("stopped");
field.setAccessible(true);
field.setBoolean(this, true);
} catch (Exception e) {
throw new AssertionError(e.getMessage());
}
}
@Override
public LinkedHashMap<String, byte[]> createRequest(Map<TransportType, ChannelDirection> types) {
super.createRequest(types);
return new LinkedHashMap<>();
}
public void verify() throws Exception {
Mockito.verify(multiplexer, Mockito.times(wantedNumberOfInvocations)).compileRequest(Mockito.anyMap());
Mockito.verify(demultiplexer, Mockito.times(wantedNumberOfInvocations)).processResponse(Mockito.any(byte [].class));
}
}
@Test
public void testChannelGetters() {
AbstractKaaClient client = Mockito.mock(AbstractKaaClient.class);
KaaClientState state = Mockito.mock(KaaClientState.class);
KaaDataChannel channel = new DefaultOperationsChannel(client, state);
Assert.assertEquals(SUPPORTED_TYPES, channel.getSupportedTransportTypes());
Assert.assertEquals(TransportProtocolIdConstants.HTTP_TRANSPORT_ID, channel.getTransportProtocolId());
Assert.assertEquals("default_operations_long_poll_channel", channel.getId());
}
@Test
public void testChannelSync() throws Exception {
KaaChannelManager manager = Mockito.mock(KaaChannelManager.class);
AbstractHttpClient httpClient = Mockito.mock(AbstractHttpClient.class);
Mockito.when(
httpClient.executeHttpRequest(Mockito.anyString(),
Mockito.any(LinkedHashMap.class), Mockito.anyBoolean())).thenReturn(
new byte[] { 5, 5, 5 });
MessageEncoderDecoder encDec = Mockito.mock(MessageEncoderDecoder.class);
Mockito.when(httpClient.getEncoderDecoder()).thenReturn(encDec);
AbstractKaaClient client = Mockito.mock(AbstractKaaClient.class);
Mockito.when(
client.createHttpClient(Mockito.anyString(),
Mockito.any(PrivateKey.class),
Mockito.any(PublicKey.class),
Mockito.any(PublicKey.class))).thenReturn(httpClient);
Mockito.when(client.getChannelManager()).thenReturn(manager);
KaaClientState state = Mockito.mock(KaaClientState.class);
KaaDataMultiplexer multiplexer = Mockito.mock(KaaDataMultiplexer.class);
Mockito.when(multiplexer.compileRequest(Mockito.anyMap())).thenReturn(new byte [] { 1, 1 ,1 });
KaaDataDemultiplexer demultiplexer = Mockito.mock(KaaDataDemultiplexer.class);
DefaultOperationsChannelFake channel = new DefaultOperationsChannelFake(client, state, 3);
TransportConnectionInfo server = IPTransportInfoTest.createTestServerInfo(ServerType.OPERATIONS, TransportProtocolIdConstants.HTTP_TRANSPORT_ID,
"localhost", 9889, KeyUtil.generateKeyPair().getPublic());
channel.setDemultiplexer(null);
channel.setDemultiplexer(demultiplexer);
channel.setMultiplexer(null);
channel.setMultiplexer(multiplexer);
channel.setServer(server);
channel.sync(TransportType.BOOTSTRAP);
channel.sync(TransportType.CONFIGURATION);
channel.syncAll();
channel.verify();
}
@Test
public void testServerFailed() throws Exception {
KaaChannelManager manager = Mockito.mock(KaaChannelManager.class);
MessageEncoderDecoder encDec = Mockito.mock(MessageEncoderDecoder.class);
AbstractHttpClient httpClient = Mockito.mock(AbstractHttpClient.class);
Mockito.when(
httpClient.executeHttpRequest(Mockito.anyString(),
Mockito.any(LinkedHashMap.class), Mockito.anyBoolean())).thenThrow(new Exception());
Mockito.when(httpClient.getEncoderDecoder()).thenReturn(encDec);
AbstractKaaClient client = Mockito.mock(AbstractKaaClient.class);
Mockito.when(
client.createHttpClient(Mockito.anyString(),
Mockito.any(PrivateKey.class),
Mockito.any(PublicKey.class),
Mockito.any(PublicKey.class))).thenReturn(httpClient);
Mockito.when(client.getChannelManager()).thenReturn(manager);
KaaClientState state = Mockito.mock(KaaClientState.class);
KaaDataMultiplexer multiplexer = Mockito.mock(KaaDataMultiplexer.class);
KaaDataDemultiplexer demultiplexer = Mockito.mock(KaaDataDemultiplexer.class);
DefaultOperationsChannelFake channel = new DefaultOperationsChannelFake(client, state, 1);
channel.setDemultiplexer(demultiplexer);
channel.setMultiplexer(multiplexer);
TransportConnectionInfo server = IPTransportInfoTest.createTestServerInfo(ServerType.OPERATIONS, TransportProtocolIdConstants.HTTP_TRANSPORT_ID,
"localhost", 9889, KeyUtil.generateKeyPair().getPublic());
channel.setServer(server);
Mockito.verify(manager, Mockito.times(1)).onServerFailed(Mockito.any(TransportConnectionInfo.class));
}
@Test
public void testShutdown() throws Exception {
KaaChannelManager manager = Mockito.mock(KaaChannelManager.class);
AbstractHttpClient httpClient = Mockito.mock(AbstractHttpClient.class);
Mockito.when(
httpClient.executeHttpRequest(Mockito.anyString(),
Mockito.any(LinkedHashMap.class), Mockito.anyBoolean())).thenThrow(new Exception());
AbstractKaaClient client = Mockito.mock(AbstractKaaClient.class);
Mockito.when(
client.createHttpClient(Mockito.anyString(),
Mockito.any(PrivateKey.class),
Mockito.any(PublicKey.class),
Mockito.any(PublicKey.class))).thenReturn(httpClient);
Mockito.when(client.getChannelManager()).thenReturn(manager);
KaaClientState state = Mockito.mock(KaaClientState.class);
KaaDataMultiplexer multiplexer = Mockito.mock(KaaDataMultiplexer.class);
KaaDataDemultiplexer demultiplexer = Mockito.mock(KaaDataDemultiplexer.class);
DefaultOperationsChannelFake channel = new DefaultOperationsChannelFake(client, state, 0);
channel.syncAll();
channel.setDemultiplexer(demultiplexer);
channel.setMultiplexer(multiplexer);
channel.shutdown();
TransportConnectionInfo server = IPTransportInfoTest.createTestServerInfo(ServerType.OPERATIONS, TransportProtocolIdConstants.HTTP_TRANSPORT_ID,
"localhost", 9889, KeyUtil.generateKeyPair().getPublic());
channel.setServer(server);
channel.sync(TransportType.EVENT);
channel.syncAll();
channel.verify();
}
}
| |
package in.twizmwaz.cardinal.module.modules.wools;
import in.twizmwaz.cardinal.GameHandler;
import in.twizmwaz.cardinal.chat.ChatConstant;
import in.twizmwaz.cardinal.chat.LocalizedChatMessage;
import in.twizmwaz.cardinal.chat.UnlocalizedChatMessage;
import in.twizmwaz.cardinal.event.CardinalDeathEvent;
import in.twizmwaz.cardinal.event.ScoreboardUpdateEvent;
import in.twizmwaz.cardinal.event.SnowflakeChangeEvent;
import in.twizmwaz.cardinal.event.objective.ObjectiveCompleteEvent;
import in.twizmwaz.cardinal.event.objective.ObjectiveTouchEvent;
import in.twizmwaz.cardinal.module.GameObjective;
import in.twizmwaz.cardinal.module.modules.gameScoreboard.GameObjectiveScoreboardHandler;
import in.twizmwaz.cardinal.module.modules.regions.type.BlockRegion;
import in.twizmwaz.cardinal.module.modules.snowflakes.Snowflakes;
import in.twizmwaz.cardinal.module.modules.team.TeamModule;
import in.twizmwaz.cardinal.util.ChatUtils;
import in.twizmwaz.cardinal.util.FireworkUtil;
import in.twizmwaz.cardinal.util.MiscUtils;
import in.twizmwaz.cardinal.util.TeamUtils;
import org.apache.commons.lang.WordUtils;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.DyeColor;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.HandlerList;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.inventory.CraftItemEvent;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.event.player.PlayerPickupItemEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.Wool;
import org.bukkit.util.Vector;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
public class WoolObjective implements GameObjective {
private final TeamModule team;
private final String name;
private final String id;
private final DyeColor color;
private final BlockRegion place;
private final boolean craftable;
private final boolean show;
private Vector location;
private double proximity;
private Set<UUID> playersTouched;
private boolean touched;
private boolean complete;
private GameObjectiveScoreboardHandler scoreboardHandler;
protected WoolObjective(final TeamModule team, final String name, final String id, final DyeColor color, final BlockRegion place, final boolean craftable, final boolean show, final Vector location) {
this.team = team;
this.name = name;
this.id = id;
this.color = color;
this.place = place;
this.craftable = craftable;
this.show = show;
this.location = location;
this.proximity = Double.POSITIVE_INFINITY;
this.playersTouched = new HashSet<>();
this.scoreboardHandler = new GameObjectiveScoreboardHandler(this);
}
@Override
public void unload() {
HandlerList.unregisterAll(this);
}
@Override
public TeamModule getTeam() {
return team;
}
@Override
public String getName() {
return this.name;
}
@Override
public String getId() {
return id;
}
@Override
public boolean isTouched() {
return touched;
}
@Override
public boolean isComplete() {
return complete;
}
@Override
public boolean showOnScoreboard() {
return show;
}
public DyeColor getColor() {
return color;
}
@Override
public GameObjectiveScoreboardHandler getScoreboardHandler() {
return scoreboardHandler;
}
@EventHandler
public void onWoolPickup(InventoryClickEvent event) {
Player player = (Player) event.getWhoClicked();
if (!this.complete && GameHandler.getGameHandler().getMatch().isRunning()) {
try {
if (event.getCurrentItem().getType() == Material.WOOL && event.getCurrentItem().getData().getData() == color.getData()) {
if (TeamUtils.getTeamByPlayer(player) == team) {
boolean touchMessage = false;
if (!this.playersTouched.contains(player.getUniqueId())) {
this.playersTouched.add(player.getUniqueId());
if (this.show && !this.complete) {
TeamUtils.getTeamChannel(team).sendLocalizedMessage(new UnlocalizedChatMessage(ChatColor.GRAY + "{0}", new LocalizedChatMessage(ChatConstant.UI_OBJECTIVE_PICKED, team.getColor() + player.getName() + ChatColor.GRAY, MiscUtils.convertDyeColorToChatColor(color) + name.toUpperCase().replaceAll("_", " ") + ChatColor.GRAY)));
touchMessage = true;
}
}
boolean oldState = this.touched;
this.touched = true;
if (!oldState && location != null) proximity = location.distance(place.getVector());
ObjectiveTouchEvent touchEvent = new ObjectiveTouchEvent(this, player, !oldState, touchMessage);
Bukkit.getServer().getPluginManager().callEvent(touchEvent);
}
}
} catch (NullPointerException e) {
}
}
}
@EventHandler
public void onWoolPickup(PlayerPickupItemEvent event) {
Player player = event.getPlayer();
if (!this.complete && GameHandler.getGameHandler().getMatch().isRunning()) {
try {
if (event.getItem().getItemStack().getType() == Material.WOOL && event.getItem().getItemStack().getData().getData() == color.getData()) {
if (TeamUtils.getTeamByPlayer(player) == team) {
boolean touchMessage = false;
if (!this.playersTouched.contains(player.getUniqueId())) {
this.playersTouched.add(player.getUniqueId());
if (this.show && !this.complete) {
TeamUtils.getTeamChannel(team).sendLocalizedMessage(new UnlocalizedChatMessage(ChatColor.GRAY + "{0}", new LocalizedChatMessage(ChatConstant.UI_OBJECTIVE_PICKED, team.getColor() + player.getName() + ChatColor.GRAY, MiscUtils.convertDyeColorToChatColor(color) + name.toUpperCase().replaceAll("_", " ") + ChatColor.GRAY)));
touchMessage = true;
}
}
boolean oldState = this.touched;
this.touched = true;
if (!oldState && location != null) proximity = location.distance(place.getVector());
else if (!oldState) proximity = player.getLocation().toVector().distance(place.getVector());
ObjectiveTouchEvent touchEvent = new ObjectiveTouchEvent(this, player, !oldState, touchMessage);
Bukkit.getServer().getPluginManager().callEvent(touchEvent);
}
}
} catch (NullPointerException e) {
}
}
}
@EventHandler
public void onPlayerDeath(PlayerDeathEvent event) {
while (playersTouched.contains(event.getEntity().getUniqueId())) {
playersTouched.remove(event.getEntity().getUniqueId());
}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onBlockPlace(BlockPlaceEvent event) {
if (event.getBlock().equals(place.getBlock())) {
if (event.getBlock().getType().equals(Material.WOOL)) {
if (((Wool) event.getBlock().getState().getData()).getColor().equals(color)) {
if (TeamUtils.getTeamByPlayer(event.getPlayer()) == team) {
this.complete = true;
if (this.show) ChatUtils.getGlobalChannel().sendLocalizedMessage(new UnlocalizedChatMessage(ChatColor.WHITE + "{0}", new LocalizedChatMessage(ChatConstant.UI_OBJECTIVE_PLACED, team.getColor() + event.getPlayer().getName() + ChatColor.WHITE, team.getCompleteName() + ChatColor.WHITE, MiscUtils.convertDyeColorToChatColor(color) + name.toUpperCase().replaceAll("_", " ") + ChatColor.WHITE)));
FireworkUtil.spawnFirework(event.getPlayer().getLocation(), event.getPlayer().getWorld());
ObjectiveCompleteEvent compEvent = new ObjectiveCompleteEvent(this, event.getPlayer());
Bukkit.getServer().getPluginManager().callEvent(compEvent);
event.setCancelled(false);
} else {
event.setCancelled(true);
if (this.show) ChatUtils.sendWarningMessage(event.getPlayer(), "You may not complete the other team's objective.");
}
} else {
event.setCancelled(true);
if (this.show) ChatUtils.sendWarningMessage(event.getPlayer(), new LocalizedChatMessage(ChatConstant.ERROR_BLOCK_PLACE, MiscUtils.convertDyeColorToChatColor(color) + color.name().toUpperCase().replaceAll("_", " ") + " WOOL" + ChatColor.RED));
}
} else {
event.setCancelled(true);
if (this.show) ChatUtils.sendWarningMessage(event.getPlayer(), new LocalizedChatMessage(ChatConstant.ERROR_BLOCK_PLACE, MiscUtils.convertDyeColorToChatColor(color) + color.name().toUpperCase().replaceAll("_", " ") + " WOOL" + ChatColor.RED));
}
}
}
@EventHandler
public void onBlockBreak(BlockBreakEvent event) {
if (event.getBlock().equals(place.getBlock())) {
event.setCancelled(true);
}
}
@EventHandler
public void onCraftWool(CraftItemEvent event) {
if (event.getRecipe().getResult().equals(new ItemStack(Material.WOOL, 1, color.getData())) && !this.craftable) {
event.setCancelled(true);
}
}
@EventHandler
public void onCardinalDeath(CardinalDeathEvent event) {
if (event.getKiller() != null && location != null && GameHandler.getGameHandler().getMatch().isRunning() && !this.touched && TeamUtils.getTeamByPlayer(event.getKiller()) != null && TeamUtils.getTeamByPlayer(event.getKiller()) == this.team) {
if (event.getKiller().getLocation().toVector().distance(location) < proximity) {
proximity = event.getKiller().getLocation().toVector().distance(location);
Bukkit.getServer().getPluginManager().callEvent(new ScoreboardUpdateEvent());
}
}
}
@EventHandler(priority = EventPriority.MONITOR)
public void onSafetyPlace(BlockPlaceEvent event) {
if (!event.isCancelled() && this.touched) {
if (event.getBlock().getType().equals(Material.WOOL)) {
if (((Wool) event.getBlock().getState().getData()).getColor().equals(color)) {
if (TeamUtils.getTeamByPlayer(event.getPlayer()) == team) {
if (event.getBlockPlaced().getLocation().distance(place.getLocation()) < proximity) {
proximity = event.getBlockPlaced().getLocation().distance(place.getLocation());
Bukkit.getServer().getPluginManager().callEvent(new ScoreboardUpdateEvent());
}
Bukkit.getServer().getPluginManager().callEvent(new ScoreboardUpdateEvent());
}
}
}
}
}
public double getProximity() {
return proximity;
}
public boolean showProximity() {
return location != null;
}
@EventHandler
public void onWoolTouch(ObjectiveTouchEvent event) {
if (event.getObjective().equals(this) && event.displayTouchMessage()) {
Bukkit.getServer().getPluginManager().callEvent(new SnowflakeChangeEvent(event.getPlayer(), Snowflakes.ChangeReason.WOOL_TOUCH, 8, MiscUtils.convertDyeColorToChatColor(color) + name.toUpperCase().replaceAll("_", " ") + ChatColor.GRAY));
}
}
@EventHandler
public void onWoolPlace(ObjectiveCompleteEvent event) {
if (event.getObjective().equals(this) && event.getObjective().showOnScoreboard()) {
Bukkit.getServer().getPluginManager().callEvent(new SnowflakeChangeEvent(event.getPlayer(), Snowflakes.ChangeReason.WOOL_PLACE, 15, MiscUtils.convertDyeColorToChatColor(color) + name.toUpperCase().replaceAll("_", " ") + ChatColor.GRAY));
}
}
}
| |
package io.cattle.platform.agent.instance.service.impl;
import io.cattle.platform.agent.instance.dao.AgentInstanceDao;
import io.cattle.platform.agent.instance.factory.AgentInstanceFactory;
import io.cattle.platform.agent.instance.service.AgentInstanceManager;
import io.cattle.platform.agent.instance.service.InstanceNicLookup;
import io.cattle.platform.agent.instance.service.NetworkServiceInfo;
import io.cattle.platform.core.constants.AgentConstants;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.dao.GenericResourceDao;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.NetworkServiceProvider;
import io.cattle.platform.core.model.Nic;
import io.cattle.platform.core.model.Vnet;
import io.cattle.platform.deferred.util.DeferredUtils;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.resource.ResourceMonitor;
import io.cattle.platform.object.resource.ResourcePredicate;
import io.cattle.platform.util.type.CollectionUtils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
public class AgentInstanceManagerImpl implements AgentInstanceManager {
ObjectManager objectManager;
ObjectProcessManager processManager;
AgentInstanceFactory agentInstanceFactory;
AgentInstanceDao agentInstanceDao;
GenericResourceDao genericResourceDao;
ResourceMonitor resourceMonitor;
List<InstanceNicLookup> nicLookups;
@Override
public Map<NetworkServiceProvider, Instance> getAgentInstances(Nic nic) {
return getAgentInstances(nic, false);
}
@Override
public Map<NetworkServiceProvider, Instance> getAgentInstances(Nic nic, boolean includeNetworkAgent) {
Map<NetworkServiceProvider, Instance> result = new HashMap<NetworkServiceProvider, Instance>();
Vnet vnet = objectManager.loadResource(Vnet.class, nic.getVnetId());
if (vnet == null || nic.getNetworkId() == null) {
return result;
}
Instance instance = objectManager.loadResource(Instance.class, nic.getInstanceId());
if (instance == null) {
return result;
}
Account account = objectManager.loadResource(Account.class, instance.getAccountId());
List<String> goodStates = Arrays.asList(CommonStatesConstants.ACTIVATING, CommonStatesConstants.ACTIVE);
if (account == null || !goodStates.contains(account.getState())) {
return result;
}
if (!includeNetworkAgent && instance.getAgentId() != null) {
if (StringUtils.equalsIgnoreCase(instance.getSystemContainer(),
InstanceConstants.SYSTEM_CONTAINER_NETWORK_AGENT)) {
return result;
}
}
for (NetworkServiceProvider provider : agentInstanceDao.getProviders(nic.getNetworkId())) {
if (result.containsKey(provider)) {
continue;
}
Instance agentInstance = agentInstanceDao.getAgentInstance(provider, nic);
if (agentInstance == null) {
agentInstance = agentInstanceFactory.newBuilder()
.withNetworkServiceProvider(provider)
.withInstance(instance)
.withAccountId(instance.getAccountId())
.withPrivileged(true).forVnetId(nic.getVnetId())
.withSystemContainerType(InstanceConstants.SYSTEM_CONTAINER_NETWORK_AGENT)
.withParameters(CollectionUtils.asMap(InstanceConstants.FIELD_DATA_VOLUMES,
Arrays.asList(AgentConstants.AGENT_INSTANCE_BIND_MOUNT)))
.build();
} else {
start(agentInstance);
}
if (agentInstance != null) {
result.put(provider, agentInstance);
}
}
return result;
}
protected void start(final Instance agentInstance) {
if (InstanceConstants.STATE_STOPPED.equals(agentInstance.getState())) {
DeferredUtils.nest(new Callable<Object>() {
@Override
public Object call() throws Exception {
processManager.scheduleProcessInstance(InstanceConstants.PROCESS_START, agentInstance, null);
return null;
}
});
}
}
@Override
public NetworkServiceInfo getNetworkService(Instance instance, String kind, boolean waitForStart) {
if (instance == null || kind == null) {
return null;
}
NetworkServiceInfo info = agentInstanceDao.getNetworkServiceInfo(instance.getId(), kind);
if (info == null) {
return null;
}
Map<NetworkServiceProvider, Instance> instances = getAgentInstances(info.getClientNic());
for (Map.Entry<NetworkServiceProvider, Instance> entry : instances.entrySet()) {
if (entry.getKey().getId().equals(info.getNetworkServiceProvider().getId())) {
info.setAgentInstance(entry.getValue());
break;
}
}
if (info.getAgentInstance() == null) {
return info;
}
if (waitForStart) {
start(info.getAgentInstance());
instance = resourceMonitor.waitFor(info.getAgentInstance(), new ResourcePredicate<Instance>() {
@Override
public boolean evaluate(Instance obj) {
return InstanceConstants.STATE_RUNNING.equals(obj.getState());
}
@Override
public String getMessage() {
return "running";
}
});
info.setAgentInstance(instance);
}
agentInstanceDao.populateNicAndIp(info);
return info;
}
@Override
public List<? extends Agent> getAgents(NetworkServiceProvider provider) {
return agentInstanceDao.getAgents(provider);
}
@Override
public List<? extends Nic> getNicsFromResource(Object resource) {
if (resource instanceof Nic) {
return Arrays.asList((Nic) resource);
}
List<? extends Nic> nics = null;
for (InstanceNicLookup lookup : nicLookups) {
nics = lookup.getNics(resource);
if (nics != null) {
break;
}
}
return nics;
}
public ObjectManager getObjectManager() {
return objectManager;
}
@Inject
public void setObjectManager(ObjectManager objectManager) {
this.objectManager = objectManager;
}
public AgentInstanceFactory getAgentInstanceFactory() {
return agentInstanceFactory;
}
@Inject
public void setAgentInstanceFactory(AgentInstanceFactory agentInstanceFactory) {
this.agentInstanceFactory = agentInstanceFactory;
}
public AgentInstanceDao getAgentInstanceDao() {
return agentInstanceDao;
}
@Inject
public void setAgentInstanceDao(AgentInstanceDao agentInstanceDao) {
this.agentInstanceDao = agentInstanceDao;
}
public GenericResourceDao getGenericResourceDao() {
return genericResourceDao;
}
@Inject
public void setGenericResourceDao(GenericResourceDao genericResourceDao) {
this.genericResourceDao = genericResourceDao;
}
public ObjectProcessManager getProcessManager() {
return processManager;
}
@Inject
public void setProcessManager(ObjectProcessManager processManager) {
this.processManager = processManager;
}
public ResourceMonitor getResourceMonitor() {
return resourceMonitor;
}
@Inject
public void setResourceMonitor(ResourceMonitor resourceMonitor) {
this.resourceMonitor = resourceMonitor;
}
public List<InstanceNicLookup> getNicLookups() {
return nicLookups;
}
public void setNicLookups(List<InstanceNicLookup> nicLookups) {
this.nicLookups = nicLookups;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.text;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamField;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Currency;
import java.util.Locale;
/**
* Encapsulates the set of symbols (such as the decimal separator, the grouping
* separator, and so on) needed by {@code DecimalFormat} to format numbers.
* {@code DecimalFormat} internally creates an instance of
* {@code DecimalFormatSymbols} from its locale data. If you need to change any
* of these symbols, you can get the {@code DecimalFormatSymbols} object from
* your {@code DecimalFormat} and modify it.
*
* @see java.util.Locale
* @see DecimalFormat
*/
public final class DecimalFormatSymbols implements Cloneable, Serializable {
private static final long serialVersionUID = 5772796243397350300L;
private final int ZeroDigit = 0, Digit = 1, DecimalSeparator = 2,
GroupingSeparator = 3, PatternSeparator = 4, Percent = 5,
PerMill = 6, Exponent = 7, MonetaryDecimalSeparator = 8,
MinusSign = 9;
transient char[] patternChars;
private transient Currency currency;
private transient Locale locale;
private String infinity, NaN, currencySymbol, intlCurrencySymbol;
/**
* Constructs a new {@code DecimalFormatSymbols} containing the symbols for
* the default locale. Best practice is to create a {@code DecimalFormat}
* and then to get the {@code DecimalFormatSymbols} from that object by
* calling {@link DecimalFormat#getDecimalFormatSymbols()}.
*/
public DecimalFormatSymbols() {
this(Locale.getDefault());
}
/**
* Constructs a new DecimalFormatSymbols containing the symbols for the
* specified Locale. Best practice is to create a {@code DecimalFormat}
* and then to get the {@code DecimalFormatSymbols} from that object by
* calling {@link DecimalFormat#getDecimalFormatSymbols()}.
*
* @param locale
* the locale.
*/
public DecimalFormatSymbols(Locale locale) {
this(locale, new com.ibm.icu.text.DecimalFormatSymbols(locale));
}
transient private com.ibm.icu.text.DecimalFormatSymbols icuSymbols;
DecimalFormatSymbols(Locale locale,
com.ibm.icu.text.DecimalFormatSymbols icuSymbols) {
this.icuSymbols = icuSymbols;
infinity = icuSymbols.getInfinity();
NaN = icuSymbols.getNaN();
this.locale = locale;
currencySymbol = icuSymbols.getCurrencySymbol();
intlCurrencySymbol = icuSymbols.getInternationalCurrencySymbol();
if (locale.getCountry().length() == 0) {
currency = Currency.getInstance("XXX"); //$NON-NLS-1$
} else {
currency = Currency.getInstance(locale);
}
patternChars = new char[10];
patternChars[ZeroDigit] = icuSymbols.getZeroDigit();
patternChars[Digit] = icuSymbols.getDigit();
patternChars[DecimalSeparator] = icuSymbols.getDecimalSeparator();
patternChars[GroupingSeparator] = icuSymbols.getGroupingSeparator();
patternChars[PatternSeparator] = icuSymbols.getPatternSeparator();
patternChars[Percent] = icuSymbols.getPercent();
patternChars[PerMill] = icuSymbols.getPerMill();
patternChars[Exponent] = icuSymbols.getExponentSeparator().charAt(0);
patternChars[MonetaryDecimalSeparator] = icuSymbols
.getMonetaryDecimalSeparator();
patternChars[MinusSign] = icuSymbols.getMinusSign();
}
/**
* Returns a new {@code DecimalFormatSymbols} with the same symbols as this
* {@code DecimalFormatSymbols}.
*
* @return a shallow copy of this {@code DecimalFormatSymbols}.
*
* @see java.lang.Cloneable
*/
@Override
public Object clone() {
try {
DecimalFormatSymbols symbols = (DecimalFormatSymbols) super.clone();
symbols.patternChars = patternChars.clone();
return symbols;
} catch (CloneNotSupportedException e) {
return null;
}
}
/**
* Compares the specified object to this {@code DecimalFormatSymbols} and
* indicates if they are equal. In order to be equal, {@code object} must be
* an instance of {@code DecimalFormatSymbols} and contain the same symbols.
*
* @param object
* the object to compare with this object.
* @return {@code true} if the specified object is equal to this
* {@code DecimalFormatSymbols}; {@code false} otherwise.
* @see #hashCode
*/
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (!(object instanceof DecimalFormatSymbols)) {
return false;
}
DecimalFormatSymbols obj = (DecimalFormatSymbols) object;
return Arrays.equals(patternChars, obj.patternChars)
&& infinity.equals(obj.infinity) && NaN.equals(obj.NaN)
&& currencySymbol.equals(obj.currencySymbol)
&& intlCurrencySymbol.equals(obj.intlCurrencySymbol);
}
/**
* Returns the currency.
* <p>
* {@code null} is returned if {@code setInternationalCurrencySymbol()} has
* been previously called with a value that is not a valid ISO 4217 currency
* code.
* <p>
*
* @return the currency that was set in the constructor or by calling
* {@code setCurrency()} or {@code setInternationalCurrencySymbol()},
* or {@code null} if an invalid currency was set.
* @see #setCurrency(Currency)
* @see #setInternationalCurrencySymbol(String)
*/
public Currency getCurrency() {
return currency;
}
/**
* Returns the international currency symbol.
*
* @return the international currency symbol as string.
*/
public String getInternationalCurrencySymbol() {
return intlCurrencySymbol;
}
/**
* Returns the currency symbol.
*
* @return the currency symbol as string.
*/
public String getCurrencySymbol() {
return currencySymbol;
}
/**
* Returns the character which represents the decimal point in a number.
*
* @return the decimal separator character.
*/
public char getDecimalSeparator() {
return patternChars[DecimalSeparator];
}
/**
* Returns the character which represents a single digit in a format
* pattern.
*
* @return the digit pattern character.
*/
public char getDigit() {
return patternChars[Digit];
}
/**
* Returns the character used as the thousands separator in a number.
*
* @return the thousands separator character.
*/
public char getGroupingSeparator() {
return patternChars[GroupingSeparator];
}
/**
* Returns the string which represents infinity.
*
* @return the infinity symbol as a string.
*/
public String getInfinity() {
return infinity;
}
String getLocalPatternChars() {
// Don't include the MonetaryDecimalSeparator or the MinusSign
return new String(patternChars, 0, patternChars.length - 2);
}
/**
* Returns the minus sign character.
*
* @return the minus sign as a character.
*/
public char getMinusSign() {
return patternChars[MinusSign];
}
/**
* Returns the character which represents the decimal point in a monetary
* value.
*
* @return the monetary decimal point as a character.
*/
public char getMonetaryDecimalSeparator() {
return patternChars[MonetaryDecimalSeparator];
}
/**
* Returns the string which represents NaN.
*
* @return the symbol NaN as a string.
*/
public String getNaN() {
return NaN;
}
/**
* Returns the character which separates the positive and negative patterns
* in a format pattern.
*
* @return the pattern separator character.
*/
public char getPatternSeparator() {
return patternChars[PatternSeparator];
}
/**
* Returns the percent character.
*
* @return the percent character.
*/
public char getPercent() {
return patternChars[Percent];
}
/**
* Returns the per mill sign character.
*
* @return the per mill sign character.
*/
public char getPerMill() {
return patternChars[PerMill];
}
/**
* Returns the character which represents zero.
*
* @return the zero character.
*/
public char getZeroDigit() {
return patternChars[ZeroDigit];
}
char getExponential() {
return patternChars[Exponent];
}
@Override
public int hashCode() {
return new String(patternChars).hashCode() + infinity.hashCode()
+ NaN.hashCode() + currencySymbol.hashCode()
+ intlCurrencySymbol.hashCode();
}
/**
* Sets the currency.
* <p>
* The international currency symbol and the currency symbol are updated,
* but the min and max number of fraction digits stays the same.
* <p>
*
* @param currency
* the new currency.
* @throws NullPointerException
* if {@code currency} is {@code null}.
*/
public void setCurrency(Currency currency) {
if (currency == null) {
throw new NullPointerException();
}
if (currency == this.currency) {
return;
}
this.currency = currency;
intlCurrencySymbol = currency.getCurrencyCode();
currencySymbol = currency.getSymbol(locale);
}
/**
* Sets the international currency symbol.
* <p>
* The currency and currency symbol are also updated if {@code value} is a
* valid ISO4217 currency code.
* <p>
* The min and max number of fraction digits stay the same.
*
* @param value
* the currency code.
*/
public void setInternationalCurrencySymbol(String value) {
if (value == null) {
currency = null;
intlCurrencySymbol = null;
return;
}
if (value.equals(intlCurrencySymbol)) {
return;
}
try {
currency = Currency.getInstance(value);
currencySymbol = currency.getSymbol(locale);
} catch (IllegalArgumentException e) {
currency = null;
}
intlCurrencySymbol = value;
}
/**
* Sets the currency symbol.
*
* @param value
* the currency symbol.
*/
public void setCurrencySymbol(String value) {
currencySymbol = value;
}
/**
* Sets the character which represents the decimal point in a number.
*
* @param value
* the decimal separator character.
*/
public void setDecimalSeparator(char value) {
patternChars[DecimalSeparator] = value;
}
/**
* Sets the character which represents a single digit in a format pattern.
*
* @param value
* the digit character.
*/
public void setDigit(char value) {
patternChars[Digit] = value;
}
/**
* Sets the character used as the thousands separator in a number.
*
* @param value
* the grouping separator character.
*/
public void setGroupingSeparator(char value) {
patternChars[GroupingSeparator] = value;
}
/**
* Sets the string which represents infinity.
*
* @param value
* the string representing infinity.
*/
public void setInfinity(String value) {
infinity = value;
}
/**
* Sets the minus sign character.
*
* @param value
* the minus sign character.
*/
public void setMinusSign(char value) {
patternChars[MinusSign] = value;
}
/**
* Sets the character which represents the decimal point in a monetary
* value.
*
* @param value
* the monetary decimal separator character.
*/
public void setMonetaryDecimalSeparator(char value) {
patternChars[MonetaryDecimalSeparator] = value;
}
/**
* Sets the string which represents NaN.
*
* @param value
* the string representing NaN.
*/
public void setNaN(String value) {
NaN = value;
}
/**
* Sets the character which separates the positive and negative patterns in
* a format pattern.
*
* @param value
* the pattern separator character.
*/
public void setPatternSeparator(char value) {
patternChars[PatternSeparator] = value;
}
/**
* Sets the percent character.
*
* @param value
* the percent character.
*/
public void setPercent(char value) {
patternChars[Percent] = value;
}
/**
* Sets the per mill sign character.
*
* @param value
* the per mill character.
*/
public void setPerMill(char value) {
patternChars[PerMill] = value;
}
/**
* Sets the character which represents zero.
*
* @param value
* the zero digit character.
*/
public void setZeroDigit(char value) {
patternChars[ZeroDigit] = value;
}
void setExponential(char value) {
patternChars[Exponent] = value;
}
private static final ObjectStreamField[] serialPersistentFields = {
new ObjectStreamField("currencySymbol", String.class), //$NON-NLS-1$
new ObjectStreamField("decimalSeparator", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("digit", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("exponential", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("groupingSeparator", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("infinity", String.class), //$NON-NLS-1$
new ObjectStreamField("intlCurrencySymbol", String.class), //$NON-NLS-1$
new ObjectStreamField("minusSign", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("monetarySeparator", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("NaN", String.class), //$NON-NLS-1$
new ObjectStreamField("patternSeparator", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("percent", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("perMill", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("serialVersionOnStream", Integer.TYPE), //$NON-NLS-1$
new ObjectStreamField("zeroDigit", Character.TYPE), //$NON-NLS-1$
new ObjectStreamField("locale", Locale.class), }; //$NON-NLS-1$
private void writeObject(ObjectOutputStream stream) throws IOException {
ObjectOutputStream.PutField fields = stream.putFields();
fields.put("currencySymbol", currencySymbol); //$NON-NLS-1$
fields.put("decimalSeparator", getDecimalSeparator()); //$NON-NLS-1$
fields.put("digit", getDigit()); //$NON-NLS-1$
fields.put("exponential", getExponential()); //$NON-NLS-1$
fields.put("groupingSeparator", getGroupingSeparator()); //$NON-NLS-1$
fields.put("infinity", infinity); //$NON-NLS-1$
fields.put("intlCurrencySymbol", intlCurrencySymbol); //$NON-NLS-1$
fields.put("minusSign", getMinusSign()); //$NON-NLS-1$
fields.put("monetarySeparator", getMonetaryDecimalSeparator()); //$NON-NLS-1$
fields.put("NaN", NaN); //$NON-NLS-1$
fields.put("patternSeparator", getPatternSeparator()); //$NON-NLS-1$
fields.put("percent", getPercent()); //$NON-NLS-1$
fields.put("perMill", getPerMill()); //$NON-NLS-1$
fields.put("serialVersionOnStream", 1); //$NON-NLS-1$
fields.put("zeroDigit", getZeroDigit()); //$NON-NLS-1$
fields.put("locale", locale); //$NON-NLS-1$
stream.writeFields();
}
private void readObject(ObjectInputStream stream) throws IOException,
ClassNotFoundException {
ObjectInputStream.GetField fields = stream.readFields();
patternChars = new char[10];
currencySymbol = (String) fields.get("currencySymbol", ""); //$NON-NLS-1$ //$NON-NLS-2$
setDecimalSeparator(fields.get("decimalSeparator", '.')); //$NON-NLS-1$
setDigit(fields.get("digit", '#')); //$NON-NLS-1$
setGroupingSeparator(fields.get("groupingSeparator", ',')); //$NON-NLS-1$
infinity = (String) fields.get("infinity", ""); //$NON-NLS-1$ //$NON-NLS-2$
intlCurrencySymbol = (String) fields.get("intlCurrencySymbol", ""); //$NON-NLS-1$ //$NON-NLS-2$
setMinusSign(fields.get("minusSign", '-')); //$NON-NLS-1$
NaN = (String) fields.get("NaN", ""); //$NON-NLS-1$ //$NON-NLS-2$
setPatternSeparator(fields.get("patternSeparator", ';')); //$NON-NLS-1$
setPercent(fields.get("percent", '%')); //$NON-NLS-1$
setPerMill(fields.get("perMill", '\u2030')); //$NON-NLS-1$
setZeroDigit(fields.get("zeroDigit", '0')); //$NON-NLS-1$
locale = (Locale) fields.get("locale", null); //$NON-NLS-1$
if (fields.get("serialVersionOnStream", 0) == 0) { //$NON-NLS-1$
setMonetaryDecimalSeparator(getDecimalSeparator());
setExponential('E');
} else {
setMonetaryDecimalSeparator(fields.get("monetarySeparator", '.')); //$NON-NLS-1$
setExponential(fields.get("exponential", 'E')); //$NON-NLS-1$
}
try {
currency = Currency.getInstance(intlCurrencySymbol);
} catch (IllegalArgumentException e) {
currency = null;
}
}
Locale getLocale(){
return locale;
}
com.ibm.icu.text.DecimalFormatSymbols getIcuSymbols() {
return icuSymbols;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.appservice.implementation;
import com.microsoft.azure.management.appservice.AppServicePlanRestrictions;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.azure.Resource;
/**
* Premier add-on offer.
*/
@JsonFlatten
public class PremierAddOnOfferInner extends Resource {
/**
* SKU.
*/
@JsonProperty(value = "properties.sku")
private String sku;
/**
* Product.
*/
@JsonProperty(value = "properties.product")
private String product;
/**
* Vendor.
*/
@JsonProperty(value = "properties.vendor")
private String vendor;
/**
* Name.
*/
@JsonProperty(value = "properties.name")
private String premierAddOnOfferName;
/**
* <code>true</code> if promotion code is required; otherwise,
* <code>false</code>.
*/
@JsonProperty(value = "properties.promoCodeRequired")
private Boolean promoCodeRequired;
/**
* Quota.
*/
@JsonProperty(value = "properties.quota")
private Integer quota;
/**
* App Service plans this offer is restricted to. Possible values include:
* 'None', 'Free', 'Shared', 'Basic', 'Standard', 'Premium'.
*/
@JsonProperty(value = "properties.webHostingPlanRestrictions")
private AppServicePlanRestrictions webHostingPlanRestrictions;
/**
* Privacy policy URL.
*/
@JsonProperty(value = "properties.privacyPolicyUrl")
private String privacyPolicyUrl;
/**
* Legal terms URL.
*/
@JsonProperty(value = "properties.legalTermsUrl")
private String legalTermsUrl;
/**
* Marketplace publisher.
*/
@JsonProperty(value = "properties.marketplacePublisher")
private String marketplacePublisher;
/**
* Marketplace offer.
*/
@JsonProperty(value = "properties.marketplaceOffer")
private String marketplaceOffer;
/**
* Get the sku value.
*
* @return the sku value
*/
public String sku() {
return this.sku;
}
/**
* Set the sku value.
*
* @param sku the sku value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withSku(String sku) {
this.sku = sku;
return this;
}
/**
* Get the product value.
*
* @return the product value
*/
public String product() {
return this.product;
}
/**
* Set the product value.
*
* @param product the product value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withProduct(String product) {
this.product = product;
return this;
}
/**
* Get the vendor value.
*
* @return the vendor value
*/
public String vendor() {
return this.vendor;
}
/**
* Set the vendor value.
*
* @param vendor the vendor value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withVendor(String vendor) {
this.vendor = vendor;
return this;
}
/**
* Get the premierAddOnOfferName value.
*
* @return the premierAddOnOfferName value
*/
public String premierAddOnOfferName() {
return this.premierAddOnOfferName;
}
/**
* Set the premierAddOnOfferName value.
*
* @param premierAddOnOfferName the premierAddOnOfferName value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withPremierAddOnOfferName(String premierAddOnOfferName) {
this.premierAddOnOfferName = premierAddOnOfferName;
return this;
}
/**
* Get the promoCodeRequired value.
*
* @return the promoCodeRequired value
*/
public Boolean promoCodeRequired() {
return this.promoCodeRequired;
}
/**
* Set the promoCodeRequired value.
*
* @param promoCodeRequired the promoCodeRequired value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withPromoCodeRequired(Boolean promoCodeRequired) {
this.promoCodeRequired = promoCodeRequired;
return this;
}
/**
* Get the quota value.
*
* @return the quota value
*/
public Integer quota() {
return this.quota;
}
/**
* Set the quota value.
*
* @param quota the quota value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withQuota(Integer quota) {
this.quota = quota;
return this;
}
/**
* Get the webHostingPlanRestrictions value.
*
* @return the webHostingPlanRestrictions value
*/
public AppServicePlanRestrictions webHostingPlanRestrictions() {
return this.webHostingPlanRestrictions;
}
/**
* Set the webHostingPlanRestrictions value.
*
* @param webHostingPlanRestrictions the webHostingPlanRestrictions value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withWebHostingPlanRestrictions(AppServicePlanRestrictions webHostingPlanRestrictions) {
this.webHostingPlanRestrictions = webHostingPlanRestrictions;
return this;
}
/**
* Get the privacyPolicyUrl value.
*
* @return the privacyPolicyUrl value
*/
public String privacyPolicyUrl() {
return this.privacyPolicyUrl;
}
/**
* Set the privacyPolicyUrl value.
*
* @param privacyPolicyUrl the privacyPolicyUrl value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withPrivacyPolicyUrl(String privacyPolicyUrl) {
this.privacyPolicyUrl = privacyPolicyUrl;
return this;
}
/**
* Get the legalTermsUrl value.
*
* @return the legalTermsUrl value
*/
public String legalTermsUrl() {
return this.legalTermsUrl;
}
/**
* Set the legalTermsUrl value.
*
* @param legalTermsUrl the legalTermsUrl value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withLegalTermsUrl(String legalTermsUrl) {
this.legalTermsUrl = legalTermsUrl;
return this;
}
/**
* Get the marketplacePublisher value.
*
* @return the marketplacePublisher value
*/
public String marketplacePublisher() {
return this.marketplacePublisher;
}
/**
* Set the marketplacePublisher value.
*
* @param marketplacePublisher the marketplacePublisher value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withMarketplacePublisher(String marketplacePublisher) {
this.marketplacePublisher = marketplacePublisher;
return this;
}
/**
* Get the marketplaceOffer value.
*
* @return the marketplaceOffer value
*/
public String marketplaceOffer() {
return this.marketplaceOffer;
}
/**
* Set the marketplaceOffer value.
*
* @param marketplaceOffer the marketplaceOffer value to set
* @return the PremierAddOnOfferInner object itself.
*/
public PremierAddOnOfferInner withMarketplaceOffer(String marketplaceOffer) {
this.marketplaceOffer = marketplaceOffer;
return this;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.anonymousToInner;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.impl.PsiDiamondTypeUtil;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.FileTypeUtils;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.HelpID;
import com.intellij.refactoring.RefactoringActionHandler;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.classMembers.ElementNeedsThis;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class AnonymousToInnerHandler implements RefactoringActionHandler {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.anonymousToInner.AnonymousToInnerHandler");
static final String REFACTORING_NAME = RefactoringBundle.message("anonymousToInner.refactoring.name");
private Project myProject;
private PsiManager myManager;
private PsiAnonymousClass myAnonClass;
private PsiClass myTargetClass;
protected String myNewClassName;
private VariableInfo[] myVariableInfos;
protected boolean myMakeStatic;
private final Set<PsiTypeParameter> myTypeParametersToCreate = new LinkedHashSet<PsiTypeParameter>();
public void invoke(@NotNull Project project, @NotNull PsiElement[] elements, DataContext dataContext) {
if (elements.length == 1 && elements[0] instanceof PsiAnonymousClass) {
invoke(project, CommonDataKeys.EDITOR.getData(dataContext), (PsiAnonymousClass)elements[0]);
}
}
public void invoke(@NotNull final Project project, Editor editor, final PsiFile file, DataContext dataContext) {
if (!CommonRefactoringUtil.checkReadOnlyStatus(project, file)) return;
final int offset = editor.getCaretModel().getOffset();
editor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE);
final PsiAnonymousClass anonymousClass = findAnonymousClass(file, offset);
if (anonymousClass == null) {
showErrorMessage(editor, RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("error.wrong.caret.position.anonymous")));
return;
}
final PsiElement parent = anonymousClass.getParent();
if (parent instanceof PsiEnumConstant) {
showErrorMessage(editor, RefactoringBundle.getCannotRefactorMessage("Enum constant can't be converted to inner class"));
return;
}
invoke(project, editor, anonymousClass);
}
private void showErrorMessage(Editor editor, String message) {
CommonRefactoringUtil.showErrorHint(myProject, editor, message, REFACTORING_NAME, HelpID.ANONYMOUS_TO_INNER);
}
public void invoke(final Project project, Editor editor, final PsiAnonymousClass anonymousClass) {
myProject = project;
myManager = PsiManager.getInstance(myProject);
myAnonClass = anonymousClass;
PsiClassType baseRef = myAnonClass.getBaseClassType();
if (baseRef.resolve() == null) {
String message = RefactoringBundle.message("error.cannot.resolve", baseRef.getCanonicalText());
showErrorMessage(editor, message);
return;
}
PsiElement targetContainer = findTargetContainer(myAnonClass);
if (FileTypeUtils.isInServerPageFile(targetContainer) && targetContainer instanceof PsiFile) {
String message = RefactoringBundle.message("error.not.supported.for.jsp", REFACTORING_NAME);
showErrorMessage(editor, message);
return;
}
LOG.assertTrue(targetContainer instanceof PsiClass);
myTargetClass = (PsiClass) targetContainer;
if (!CommonRefactoringUtil.checkReadOnlyStatus(project, myTargetClass)) return;
Map<PsiVariable,VariableInfo> variableInfoMap = new LinkedHashMap<PsiVariable, VariableInfo>();
collectUsedVariables(variableInfoMap, myAnonClass);
final VariableInfo[] infos = variableInfoMap.values().toArray(new VariableInfo[variableInfoMap.values().size()]);
myVariableInfos = infos;
Arrays.sort(myVariableInfos, new Comparator<VariableInfo>() {
@Override
public int compare(VariableInfo o1, VariableInfo o2) {
final PsiType type1 = o1.variable.getType();
final PsiType type2 = o2.variable.getType();
if (type1 instanceof PsiEllipsisType) {
return 1;
}
if (type2 instanceof PsiEllipsisType) {
return -1;
}
return ArrayUtil.find(infos, o1) > ArrayUtil.find(infos, o2) ? 1 : -1;
}
});
if (!showRefactoringDialog()) return;
CommandProcessor.getInstance().executeCommand(
myProject, new Runnable() {
public void run() {
final Runnable action = new Runnable() {
public void run() {
try {
doRefactoring();
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
};
ApplicationManager.getApplication().runWriteAction(action);
}
},
REFACTORING_NAME,
null
);
}
protected boolean showRefactoringDialog() {
final boolean anInterface = myTargetClass.isInterface();
final boolean needsThis = needsThis() || PsiUtil.isInnerClass(myTargetClass);
final AnonymousToInnerDialog dialog = new AnonymousToInnerDialog(
myProject,
myAnonClass,
myVariableInfos,
needsThis || anInterface);
if (!dialog.showAndGet()) {
return false;
}
myNewClassName = dialog.getClassName();
myVariableInfos = dialog.getVariableInfos();
myMakeStatic = !needsThis && (anInterface || dialog.isMakeStatic());
return true;
}
private void doRefactoring() throws IncorrectOperationException {
calculateTypeParametersToCreate();
PsiClass aClass = createClass(myNewClassName);
myTargetClass.add(aClass);
PsiNewExpression newExpr = (PsiNewExpression) myAnonClass.getParent();
@NonNls StringBuffer buf = new StringBuffer();
buf.append("new ");
buf.append(aClass.getName());
if (!myTypeParametersToCreate.isEmpty()) {
buf.append("<");
int idx = 0;
//noinspection ForLoopThatDoesntUseLoopVariable
for (Iterator<PsiTypeParameter> it = myTypeParametersToCreate.iterator(); it.hasNext(); idx++) {
if (idx > 0) buf.append(", ");
String typeParamName = it.next().getName();
buf.append(typeParamName);
}
buf.append(">");
}
buf.append("(");
boolean isFirstParameter = true;
for (VariableInfo info : myVariableInfos) {
if (info.passAsParameter) {
if (isFirstParameter) {
isFirstParameter = false;
}
else {
buf.append(",");
}
buf.append(info.variable.getName());
}
}
buf.append(")");
PsiNewExpression newClassExpression =
(PsiNewExpression)JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory().createExpressionFromText(buf.toString(), null);
newClassExpression = (PsiNewExpression)newExpr.replace(newClassExpression);
if (PsiDiamondTypeUtil.canCollapseToDiamond(newClassExpression, newClassExpression, newClassExpression.getType())) {
PsiDiamondTypeUtil.replaceExplicitWithDiamond(newClassExpression.getClassOrAnonymousClassReference().getParameterList());
}
}
@Nullable
public static PsiAnonymousClass findAnonymousClass(PsiFile file, int offset) {
PsiElement element = file.findElementAt(offset);
while (element != null) {
if (element instanceof PsiAnonymousClass) {
return (PsiAnonymousClass) element;
}
if (element instanceof PsiNewExpression) {
final PsiNewExpression newExpression = (PsiNewExpression)element;
if (newExpression.getAnonymousClass() != null) {
return newExpression.getAnonymousClass();
}
}
element = element.getParent();
}
return null;
}
public static PsiElement findTargetContainer(PsiAnonymousClass anonClass) {
PsiElement parent = anonClass.getParent();
while (true) {
if (parent instanceof PsiClass && !(parent instanceof PsiAnonymousClass)) {
return parent;
}
if (parent instanceof PsiFile) {
return parent;
}
parent = parent.getParent();
}
}
private void collectUsedVariables(final Map<PsiVariable, VariableInfo> variableInfoMap,
PsiElement scope) {
scope.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
if (expression.getQualifierExpression() == null) {
PsiElement refElement = expression.resolve();
if (refElement instanceof PsiVariable && !(refElement instanceof PsiField)) {
PsiVariable var = (PsiVariable)refElement;
final PsiClass containingClass = PsiTreeUtil.getParentOfType(var, PsiClass.class);
if (PsiTreeUtil.isAncestor(containingClass, myAnonClass, true)) {
saveVariable(variableInfoMap, var, expression);
}
}
}
super.visitReferenceExpression(expression);
}
});
}
private Boolean cachedNeedsThis = null;
public boolean needsThis() {
if(cachedNeedsThis == null) {
ElementNeedsThis memberNeedsThis = new ElementNeedsThis(myTargetClass, myAnonClass);
myAnonClass.accept(memberNeedsThis);
class HasExplicitThis extends JavaRecursiveElementWalkingVisitor {
boolean hasExplicitThis = false;
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
}
@Override public void visitThisExpression(PsiThisExpression expression) {
hasExplicitThis = true;
}
}
final HasExplicitThis hasExplicitThis = new HasExplicitThis();
PsiExpressionList argList = myAnonClass.getArgumentList();
if (argList != null) argList.accept(hasExplicitThis);
cachedNeedsThis = memberNeedsThis.usesMembers() || hasExplicitThis.hasExplicitThis;
}
return cachedNeedsThis.booleanValue();
}
private void saveVariable(Map<PsiVariable, VariableInfo> variableInfoMap,
PsiVariable var,
PsiReferenceExpression usage) {
VariableInfo info = variableInfoMap.get(var);
if (info == null) {
info = new VariableInfo(var);
variableInfoMap.put(var, info);
}
info.saveInField |= !isUsedInInitializer(usage);
}
private boolean isUsedInInitializer(PsiElement usage) {
PsiElement parent = usage.getParent();
while (!myAnonClass.equals(parent)) {
if (parent instanceof PsiExpressionList) {
PsiExpressionList expressionList = (PsiExpressionList) parent;
if (myAnonClass.equals(expressionList.getParent())) {
return true;
}
} else if (parent instanceof PsiClassInitializer && myAnonClass.equals(((PsiClassInitializer)parent).getContainingClass())) {
//class initializers will be moved to constructor to be generated
return true;
}
parent = parent.getParent();
}
return false;
}
private PsiClass createClass(String name) throws IncorrectOperationException {
PsiElementFactory factory = JavaPsiFacade.getInstance(myAnonClass.getProject()).getElementFactory();
CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myProject);
final PsiNewExpression newExpression = (PsiNewExpression) myAnonClass.getParent();
final PsiMethod superConstructor = newExpression.resolveConstructor();
PsiClass aClass = factory.createClass(name);
final PsiTypeParameterList typeParameterList = aClass.getTypeParameterList();
LOG.assertTrue(typeParameterList != null);
for (PsiTypeParameter typeParameter : myTypeParametersToCreate) {
typeParameterList.add((typeParameter));
}
if (!myTargetClass.isInterface()) {
PsiUtil.setModifierProperty(aClass, PsiModifier.PRIVATE, true);
PsiModifierListOwner owner = PsiTreeUtil.getParentOfType(myAnonClass, PsiModifierListOwner.class);
if (owner != null && owner.hasModifierProperty(PsiModifier.STATIC)) {
PsiUtil.setModifierProperty(aClass, PsiModifier.STATIC, true);
}
} else {
PsiUtil.setModifierProperty(aClass, PsiModifier.PACKAGE_LOCAL, true);
}
PsiJavaCodeReferenceElement baseClassRef = myAnonClass.getBaseClassReference();
PsiClass baseClass = (PsiClass)baseClassRef.resolve();
if (baseClass == null || !CommonClassNames.JAVA_LANG_OBJECT.equals(baseClass.getQualifiedName())) {
PsiReferenceList refList = baseClass != null && baseClass.isInterface() ?
aClass.getImplementsList() :
aClass.getExtendsList();
if (refList != null) refList.add(baseClassRef);
}
renameReferences(myAnonClass);
copyClassBody(myAnonClass, aClass, myVariableInfos.length > 0);
if (myVariableInfos.length > 0) {
createFields(aClass);
}
PsiExpressionList argList = newExpression.getArgumentList();
assert argList != null;
PsiExpression[] originalExpressions = argList.getExpressions();
final PsiReferenceList superConstructorThrowsList =
superConstructor != null && superConstructor.getThrowsList().getReferencedTypes().length > 0
? superConstructor.getThrowsList()
: null;
if (myVariableInfos.length > 0 || originalExpressions.length > 0 || superConstructorThrowsList != null) {
PsiMethod constructor = factory.createConstructor();
if (superConstructorThrowsList != null) {
constructor.getThrowsList().replace(superConstructorThrowsList);
}
if (originalExpressions.length > 0) {
createSuperStatement(constructor, originalExpressions);
}
if (myVariableInfos.length > 0) {
fillParameterList(constructor);
createAssignmentStatements(constructor);
appendInitializers(constructor);
}
constructor = (PsiMethod) codeStyleManager.reformat(constructor);
aClass.add(constructor);
}
if (!needsThis() && myMakeStatic && !myTargetClass.isInterface()) {
PsiUtil.setModifierProperty(aClass, PsiModifier.STATIC, true);
}
PsiElement lastChild = aClass.getLastChild();
if (lastChild instanceof PsiJavaToken && ((PsiJavaToken)lastChild).getTokenType() == JavaTokenType.SEMICOLON) {
lastChild.delete();
}
return aClass;
}
private void appendInitializers(final PsiMethod constructor) throws IncorrectOperationException {
PsiCodeBlock constructorBody = constructor.getBody();
assert constructorBody != null;
List<PsiElement> toAdd = new ArrayList<PsiElement>();
for (PsiClassInitializer initializer : myAnonClass.getInitializers()) {
if (!initializer.hasModifierProperty(PsiModifier.STATIC)) {
toAdd.add(initializer);
}
}
for (PsiField field : myAnonClass.getFields()) {
if (!field.hasModifierProperty(PsiModifier.STATIC) && field.getInitializer() != null) {
toAdd.add(field);
}
}
Collections.sort(toAdd, new Comparator<PsiElement>() {
public int compare(PsiElement e1, PsiElement e2) {
return e1.getTextRange().getStartOffset() - e2.getTextRange().getStartOffset();
}
});
for (PsiElement element : toAdd) {
if (element instanceof PsiClassInitializer) {
PsiClassInitializer initializer = (PsiClassInitializer) element;
final PsiCodeBlock initializerBody = initializer.getBody();
PsiElement firstBodyElement = initializerBody.getFirstBodyElement();
if (firstBodyElement != null) {
constructorBody.addRange(firstBodyElement, initializerBody.getLastBodyElement());
}
} else {
PsiField field = (PsiField) element;
final PsiExpressionStatement statement = (PsiExpressionStatement)JavaPsiFacade.getInstance(myManager.getProject())
.getElementFactory()
.createStatementFromText(field.getName() + "= 0;", null);
PsiExpression rightExpression = ((PsiAssignmentExpression) statement.getExpression()).getRExpression();
assert rightExpression != null;
PsiExpression fieldInitializer = field.getInitializer();
assert fieldInitializer != null;
rightExpression.replace(fieldInitializer);
constructorBody.add(statement);
}
}
}
private static void copyClassBody(PsiClass sourceClass,
PsiClass targetClass,
boolean appendInitializersToConstructor) throws IncorrectOperationException {
PsiElement lbrace = sourceClass.getLBrace();
PsiElement rbrace = sourceClass.getRBrace();
if (lbrace != null) {
targetClass.addRange(lbrace.getNextSibling(), rbrace != null ? rbrace.getPrevSibling() : sourceClass.getLastChild());
if (appendInitializersToConstructor) { //see SCR 41692
final PsiClassInitializer[] initializers = targetClass.getInitializers();
for (PsiClassInitializer initializer : initializers) {
if (!initializer.hasModifierProperty(PsiModifier.STATIC)) initializer.delete();
}
final PsiField[] fields = targetClass.getFields();
for (PsiField field : fields) {
PsiExpression initializer = field.getInitializer();
if (!field.hasModifierProperty(PsiModifier.STATIC) && initializer != null) {
initializer.delete();
}
}
}
}
}
private void fillParameterList(PsiMethod constructor) throws IncorrectOperationException {
PsiElementFactory factory = JavaPsiFacade.getInstance(constructor.getProject()).getElementFactory();
PsiParameterList parameterList = constructor.getParameterList();
for (VariableInfo info : myVariableInfos) {
if (info.passAsParameter) {
parameterList.add(factory.createParameter(info.parameterName, info.variable.getType()));
}
}
}
private void createFields(PsiClass aClass) throws IncorrectOperationException {
PsiElementFactory factory = JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory();
for (VariableInfo info : myVariableInfos) {
if (info.saveInField) {
PsiType type = info.variable.getType();
if (type instanceof PsiEllipsisType) type = ((PsiEllipsisType)type).toArrayType();
PsiField field = factory.createField(info.fieldName, type);
PsiUtil.setModifierProperty(field, PsiModifier.FINAL, true);
aClass.add(field);
}
}
}
private void createAssignmentStatements(PsiMethod constructor) throws IncorrectOperationException {
PsiElementFactory factory = JavaPsiFacade.getInstance(constructor.getProject()).getElementFactory();
for (VariableInfo info : myVariableInfos) {
if (info.saveInField) {
@NonNls String text = info.fieldName + "=a;";
boolean useThis = info.passAsParameter && info.parameterName.equals(info.fieldName);
if (useThis) {
text = "this." + text;
}
PsiExpressionStatement statement = (PsiExpressionStatement)factory.createStatementFromText(text, null);
statement = (PsiExpressionStatement)CodeStyleManager.getInstance(myProject).reformat(statement);
// in order for "..." trick to work, the statement must be added to constructor first
PsiCodeBlock constructorBody = constructor.getBody();
assert constructorBody != null;
statement = (PsiExpressionStatement)constructorBody.add(statement);
PsiAssignmentExpression assignment = (PsiAssignmentExpression)statement.getExpression();
PsiReferenceExpression rExpr = (PsiReferenceExpression)assignment.getRExpression();
assert rExpr != null;
if (info.passAsParameter) {
rExpr.replace(factory.createExpressionFromText(info.parameterName, null));
}
else {
rExpr.delete();
}
}
}
}
private void renameReferences(PsiElement scope) throws IncorrectOperationException {
PsiElementFactory factory = JavaPsiFacade.getInstance(myManager.getProject()).getElementFactory();
for (VariableInfo info : myVariableInfos) {
for (PsiReference reference : ReferencesSearch.search(info.variable, new LocalSearchScope(scope))) {
PsiElement ref = reference.getElement();
PsiIdentifier identifier = (PsiIdentifier)((PsiJavaCodeReferenceElement)ref).getReferenceNameElement();
assert identifier != null;
boolean renameToFieldName = !isUsedInInitializer(ref);
PsiIdentifier newNameIdentifier = factory.createIdentifier(renameToFieldName ? info.fieldName : info.parameterName);
if (renameToFieldName) {
identifier.replace(newNameIdentifier);
}
else {
if (info.passAsParameter) {
identifier.replace(newNameIdentifier);
}
}
}
}
}
private void createSuperStatement(PsiMethod constructor, PsiExpression[] paramExpressions) throws IncorrectOperationException {
PsiCodeBlock body = constructor.getBody();
assert body != null;
final PsiElementFactory factory = JavaPsiFacade.getInstance(constructor.getProject()).getElementFactory();
PsiStatement statement = factory.createStatementFromText("super();", null);
statement = (PsiStatement) CodeStyleManager.getInstance(myProject).reformat(statement);
statement = (PsiStatement) body.add(statement);
PsiMethodCallExpression methodCall = (PsiMethodCallExpression) ((PsiExpressionStatement) statement).getExpression();
PsiExpressionList exprList = methodCall.getArgumentList();
{
final PsiThisExpression qualifiedThis =
(PsiThisExpression) factory.createExpressionFromText("A.this", null);
final PsiJavaCodeReferenceElement targetClassRef = factory.createClassReferenceElement(myTargetClass);
PsiJavaCodeReferenceElement thisQualifier = qualifiedThis.getQualifier();
assert thisQualifier != null;
thisQualifier.replace(targetClassRef);
for (PsiExpression expr : paramExpressions) {
ChangeContextUtil.encodeContextInfo(expr, true);
final PsiElement newExpr = exprList.add(expr);
ChangeContextUtil.decodeContextInfo(newExpr, myTargetClass, qualifiedThis);
}
}
class SupersConvertor extends JavaRecursiveElementVisitor {
@Override public void visitThisExpression(PsiThisExpression expression) {
try {
final PsiThisExpression qualifiedThis =
(PsiThisExpression) factory.createExpressionFromText("A.this", null);
final PsiJavaCodeReferenceElement targetClassRef = factory.createClassReferenceElement(myTargetClass);
PsiJavaCodeReferenceElement thisQualifier = qualifiedThis.getQualifier();
assert thisQualifier != null;
thisQualifier.replace(targetClassRef);
expression.replace(qualifiedThis);
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
}
}
final SupersConvertor supersConvertor = new SupersConvertor();
methodCall.getArgumentList().accept(supersConvertor);
}
private void calculateTypeParametersToCreate () {
myAnonClass.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
final PsiElement resolved = reference.resolve();
if (resolved instanceof PsiTypeParameter) {
final PsiTypeParameterListOwner owner = ((PsiTypeParameter)resolved).getOwner();
if (owner != null && !PsiTreeUtil.isAncestor(myAnonClass, owner, false)) {
myTypeParametersToCreate.add((PsiTypeParameter)resolved);
}
}
}
});
}
}
| |
package se.emilsjolander.stickylistheaders;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.drawable.Drawable;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Checkable;
import android.widget.ListAdapter;
/**
* A {@link ListAdapter} which wraps a {@link StickyListHeadersAdapter} and
* automatically handles wrapping the result of
* {@link StickyListHeadersAdapter#getView(int, android.view.View, android.view.ViewGroup)}
* and
* {@link StickyListHeadersAdapter#getHeaderView(int, android.view.View, android.view.ViewGroup)}
* appropriately.
*
* @author Jake Wharton (jakewharton@gmail.com)
*/
class AdapterWrapper extends BaseAdapter implements StickyListHeadersAdapter {
interface OnHeaderClickListener {
void onHeaderClick(View header, int itemPosition, long headerId);
}
StickyListHeadersAdapter mDelegate;
private final List<View> mHeaderCache = new LinkedList<View>();
private final Context mContext;
private Drawable mDivider;
private int mDividerHeight;
private OnHeaderClickListener mOnHeaderClickListener;
private DataSetObserver mDataSetObserver = new DataSetObserver() {
@Override
public void onInvalidated() {
mHeaderCache.clear();
AdapterWrapper.super.notifyDataSetInvalidated();
}
@Override
public void onChanged() {
AdapterWrapper.super.notifyDataSetChanged();
}
};
AdapterWrapper(Context context,
StickyListHeadersAdapter delegate) {
this.mContext = context;
this.mDelegate = delegate;
delegate.registerDataSetObserver(mDataSetObserver);
}
void setDivider(Drawable divider, int dividerHeight) {
this.mDivider = divider;
this.mDividerHeight = dividerHeight;
notifyDataSetChanged();
}
@Override
public boolean areAllItemsEnabled() {
return mDelegate.areAllItemsEnabled();
}
@Override
public boolean isEnabled(int position) {
return mDelegate.isEnabled(position);
}
@Override
public int getCount() {
return mDelegate.getCount();
}
@Override
public Object getItem(int position) {
return mDelegate.getItem(position);
}
@Override
public long getItemId(int position) {
return mDelegate.getItemId(position);
}
@Override
public boolean hasStableIds() {
return mDelegate.hasStableIds();
}
@Override
public int getItemViewType(int position) {
return mDelegate.getItemViewType(position);
}
@Override
public int getViewTypeCount() {
return mDelegate.getViewTypeCount();
}
@Override
public boolean isEmpty() {
return mDelegate.isEmpty();
}
/**
* Will recycle header from {@link WrapperView} if it exists
*/
private void recycleHeaderIfExists(WrapperView wv) {
View header = wv.mHeader;
if (header != null) {
// reset the headers visibility when adding it to the cache
header.setVisibility(View.VISIBLE);
mHeaderCache.add(header);
}
}
/**
* Get a header view. This optionally pulls a header from the supplied
* {@link WrapperView} and will also recycle the divider if it exists.
*/
private View configureHeader(WrapperView wv, final int position) {
View header = wv.mHeader == null ? popHeader() : wv.mHeader;
header = mDelegate.getHeaderView(position, header, wv);
if (header == null) {
throw new NullPointerException("Header view must not be null.");
}
//if the header isn't clickable, the listselector will be drawn on top of the header
header.setClickable(true);
header.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if(mOnHeaderClickListener != null){
long headerId = mDelegate.getHeaderId(position);
mOnHeaderClickListener.onHeaderClick(v, position, headerId);
}
}
});
return header;
}
private View popHeader() {
if(mHeaderCache.size() > 0) {
return mHeaderCache.remove(0);
}
return null;
}
/** Returns {@code true} if the previous position has the same header ID. */
private boolean previousPositionHasSameHeader(int position) {
return position != 0
&& mDelegate.getHeaderId(position) == mDelegate
.getHeaderId(position - 1);
}
@Override
public WrapperView getView(int position, View convertView, ViewGroup parent) {
WrapperView wv = (convertView == null) ? new WrapperView(mContext) : (WrapperView) convertView;
View item = mDelegate.getView(position, wv.mItem, parent);
View header = null;
if (previousPositionHasSameHeader(position)) {
recycleHeaderIfExists(wv);
} else {
header = configureHeader(wv, position);
}
if((item instanceof Checkable) && !(wv instanceof CheckableWrapperView)) {
// Need to create Checkable subclass of WrapperView for ListView to work correctly
wv = new CheckableWrapperView(mContext);
} else if(!(item instanceof Checkable) && (wv instanceof CheckableWrapperView)) {
wv = new WrapperView(mContext);
}
wv.update(item, header, mDivider, mDividerHeight);
return wv;
}
public void setOnHeaderClickListener(OnHeaderClickListener onHeaderClickListener){
this.mOnHeaderClickListener = onHeaderClickListener;
}
@Override
public boolean equals(Object o) {
return mDelegate.equals(o);
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
return ((BaseAdapter) mDelegate).getDropDownView(position, convertView, parent);
}
@Override
public int hashCode() {
return mDelegate.hashCode();
}
@Override
public void notifyDataSetChanged() {
((BaseAdapter) mDelegate).notifyDataSetChanged();
}
@Override
public void notifyDataSetInvalidated() {
((BaseAdapter) mDelegate).notifyDataSetInvalidated();
}
@Override
public String toString() {
return mDelegate.toString();
}
@Override
public View getHeaderView(int position, View convertView, ViewGroup parent) {
return mDelegate.getHeaderView(position, convertView, parent);
}
@Override
public long getHeaderId(int position) {
return mDelegate.getHeaderId(position);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.rmcontainer;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRunningOnNodeEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerAllocatedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerFinishedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeCleanContainerEvent;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
@SuppressWarnings({"unchecked", "rawtypes"})
public class RMContainerImpl implements RMContainer {
private static final Log LOG = LogFactory.getLog(RMContainerImpl.class);
private static final StateMachineFactory<RMContainerImpl, RMContainerState,
RMContainerEventType, RMContainerEvent>
stateMachineFactory = new StateMachineFactory<RMContainerImpl,
RMContainerState, RMContainerEventType, RMContainerEvent>(
RMContainerState.NEW)
// Transitions from NEW state
.addTransition(RMContainerState.NEW, RMContainerState.ALLOCATED,
RMContainerEventType.START, new ContainerStartedTransition())
.addTransition(RMContainerState.NEW, RMContainerState.KILLED,
RMContainerEventType.KILL)
.addTransition(RMContainerState.NEW, RMContainerState.RESERVED,
RMContainerEventType.RESERVED, new ContainerReservedTransition())
.addTransition(RMContainerState.NEW,
EnumSet.of(RMContainerState.RUNNING, RMContainerState.COMPLETED),
RMContainerEventType.RECOVER, new ContainerRecoveredTransition())
// Transitions from RESERVED state
.addTransition(RMContainerState.RESERVED, RMContainerState.RESERVED,
RMContainerEventType.RESERVED, new ContainerReservedTransition())
.addTransition(RMContainerState.RESERVED, RMContainerState.ALLOCATED,
RMContainerEventType.START, new ContainerStartedTransition())
.addTransition(RMContainerState.RESERVED, RMContainerState.KILLED,
RMContainerEventType.KILL) // nothing to do
.addTransition(RMContainerState.RESERVED, RMContainerState.RELEASED,
RMContainerEventType.RELEASED) // nothing to do
// Transitions from ALLOCATED state
.addTransition(RMContainerState.ALLOCATED, RMContainerState.ACQUIRED,
RMContainerEventType.ACQUIRED, new AcquiredTransition())
.addTransition(RMContainerState.ALLOCATED, RMContainerState.EXPIRED,
RMContainerEventType.EXPIRE, new FinishedTransition())
.addTransition(RMContainerState.ALLOCATED, RMContainerState.KILLED,
RMContainerEventType.KILL, new FinishedTransition())
// Transitions from ACQUIRED state
.addTransition(RMContainerState.ACQUIRED, RMContainerState.RUNNING,
RMContainerEventType.LAUNCHED, new LaunchedTransition())
.addTransition(RMContainerState.ACQUIRED, RMContainerState.COMPLETED,
RMContainerEventType.FINISHED, new ContainerFinishedAtAcquiredState())
.addTransition(RMContainerState.ACQUIRED, RMContainerState.RELEASED,
RMContainerEventType.RELEASED, new KillTransition())
.addTransition(RMContainerState.ACQUIRED, RMContainerState.EXPIRED,
RMContainerEventType.EXPIRE, new KillTransition())
.addTransition(RMContainerState.ACQUIRED, RMContainerState.KILLED,
RMContainerEventType.KILL, new KillTransition())
// Transitions from RUNNING state
.addTransition(RMContainerState.RUNNING, RMContainerState.COMPLETED,
RMContainerEventType.FINISHED, new FinishedTransition())
.addTransition(RMContainerState.RUNNING, RMContainerState.KILLED,
RMContainerEventType.KILL, new KillTransition())
.addTransition(RMContainerState.RUNNING, RMContainerState.RELEASED,
RMContainerEventType.RELEASED, new KillTransition())
.addTransition(RMContainerState.RUNNING, RMContainerState.RUNNING,
RMContainerEventType.EXPIRE)
// Transitions from COMPLETED state
.addTransition(RMContainerState.COMPLETED, RMContainerState.COMPLETED,
EnumSet.of(RMContainerEventType.EXPIRE, RMContainerEventType.RELEASED,
RMContainerEventType.KILL))
// Transitions from EXPIRED state
.addTransition(RMContainerState.EXPIRED, RMContainerState.EXPIRED,
EnumSet.of(RMContainerEventType.RELEASED, RMContainerEventType.KILL))
// Transitions from RELEASED state
.addTransition(RMContainerState.RELEASED, RMContainerState.RELEASED,
EnumSet.of(RMContainerEventType.EXPIRE, RMContainerEventType.RELEASED,
RMContainerEventType.KILL, RMContainerEventType.FINISHED))
// Transitions from KILLED state
.addTransition(RMContainerState.KILLED, RMContainerState.KILLED,
EnumSet.of(RMContainerEventType.EXPIRE, RMContainerEventType.RELEASED,
RMContainerEventType.KILL, RMContainerEventType.FINISHED))
// create the topology tables
.installTopology();
private final StateMachine<RMContainerState, RMContainerEventType,
RMContainerEvent> stateMachine;
private final ReadLock readLock;
private final WriteLock writeLock;
private final ContainerId containerId;
private final ApplicationAttemptId appAttemptId;
private final NodeId nodeId;
private final Container container;
private final RMContext rmContext;
private final EventHandler eventHandler;
private final ContainerAllocationExpirer containerAllocationExpirer;
private final String user;
private Resource reservedResource;
private NodeId reservedNode;
private Priority reservedPriority;
private long creationTime;
private long finishTime;
private ContainerStatus finishedStatus;
private boolean isAMContainer;
private List<ResourceRequest> resourceRequests;
public RMContainerImpl(Container container,
ApplicationAttemptId appAttemptId, NodeId nodeId, String user,
RMContext rmContext) {
this(container, appAttemptId, nodeId, user, rmContext, System
.currentTimeMillis());
}
public RMContainerImpl(Container container,
ApplicationAttemptId appAttemptId, NodeId nodeId,
String user, RMContext rmContext, long creationTime) {
this.stateMachine = stateMachineFactory.make(this);
this.containerId = container.getId();
this.nodeId = nodeId;
this.container = container;
this.appAttemptId = appAttemptId;
this.user = user;
this.creationTime = creationTime;
this.rmContext = rmContext;
this.eventHandler = rmContext.getDispatcher().getEventHandler();
this.containerAllocationExpirer = rmContext.getContainerAllocationExpirer();
this.isAMContainer = false;
this.resourceRequests = null;
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
this.readLock = lock.readLock();
this.writeLock = lock.writeLock();
rmContext.getRMApplicationHistoryWriter().containerStarted(this);
rmContext.getSystemMetricsPublisher().containerCreated(
this, this.creationTime);
}
@Override
public ContainerId getContainerId() {
return this.containerId;
}
@Override
public ApplicationAttemptId getApplicationAttemptId() {
return this.appAttemptId;
}
@Override
public Container getContainer() {
return this.container;
}
@Override
public RMContainerState getState() {
this.readLock.lock();
try {
return this.stateMachine.getCurrentState();
} finally {
this.readLock.unlock();
}
}
@Override
public Resource getReservedResource() {
return reservedResource;
}
@Override
public NodeId getReservedNode() {
return reservedNode;
}
@Override
public Priority getReservedPriority() {
return reservedPriority;
}
@Override
public Resource getAllocatedResource() {
return container.getResource();
}
@Override
public NodeId getAllocatedNode() {
return container.getNodeId();
}
@Override
public Priority getAllocatedPriority() {
return container.getPriority();
}
@Override
public long getCreationTime() {
return creationTime;
}
@Override
public long getFinishTime() {
try {
readLock.lock();
return finishTime;
} finally {
readLock.unlock();
}
}
@Override
public String getDiagnosticsInfo() {
try {
readLock.lock();
if (getFinishedStatus() != null) {
return getFinishedStatus().getDiagnostics();
} else {
return null;
}
} finally {
readLock.unlock();
}
}
@Override
public String getLogURL() {
try {
readLock.lock();
StringBuilder logURL = new StringBuilder();
logURL.append(WebAppUtils.getHttpSchemePrefix(rmContext
.getYarnConfiguration()));
logURL.append(WebAppUtils.getRunningLogURL(
container.getNodeHttpAddress(), ConverterUtils.toString(containerId),
user));
return logURL.toString();
} finally {
readLock.unlock();
}
}
@Override
public int getContainerExitStatus() {
try {
readLock.lock();
if (getFinishedStatus() != null) {
return getFinishedStatus().getExitStatus();
} else {
return 0;
}
} finally {
readLock.unlock();
}
}
@Override
public ContainerState getContainerState() {
try {
readLock.lock();
if (getFinishedStatus() != null) {
return getFinishedStatus().getState();
} else {
return ContainerState.RUNNING;
}
} finally {
readLock.unlock();
}
}
@Override
public List<ResourceRequest> getResourceRequests() {
try {
readLock.lock();
return resourceRequests;
} finally {
readLock.unlock();
}
}
public void setResourceRequests(List<ResourceRequest> requests) {
try {
writeLock.lock();
this.resourceRequests = requests;
} finally {
writeLock.unlock();
}
}
@Override
public String toString() {
return containerId.toString();
}
@Override
public boolean isAMContainer() {
try {
readLock.lock();
return isAMContainer;
} finally {
readLock.unlock();
}
}
public void setAMContainer(boolean isAMContainer) {
try {
writeLock.lock();
this.isAMContainer = isAMContainer;
} finally {
writeLock.unlock();
}
}
@Override
public void handle(RMContainerEvent event) {
LOG.debug("Processing " + event.getContainerId() + " of type " + event.getType());
try {
writeLock.lock();
RMContainerState oldState = getState();
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitonException e) {
LOG.error("Can't handle this event at current state", e);
LOG.error("Invalid event " + event.getType() +
" on container " + this.containerId);
}
if (oldState != getState()) {
LOG.info(event.getContainerId() + " Container Transitioned from "
+ oldState + " to " + getState());
}
}
finally {
writeLock.unlock();
}
}
public ContainerStatus getFinishedStatus() {
return finishedStatus;
}
private static class BaseTransition implements
SingleArcTransition<RMContainerImpl, RMContainerEvent> {
@Override
public void transition(RMContainerImpl cont, RMContainerEvent event) {
}
}
private static final class ContainerRecoveredTransition
implements
MultipleArcTransition<RMContainerImpl, RMContainerEvent, RMContainerState> {
@Override
public RMContainerState transition(RMContainerImpl container,
RMContainerEvent event) {
NMContainerStatus report =
((RMContainerRecoverEvent) event).getContainerReport();
if (report.getContainerState().equals(ContainerState.COMPLETE)) {
ContainerStatus status =
ContainerStatus.newInstance(report.getContainerId(),
report.getContainerState(), report.getDiagnostics(),
report.getContainerExitStatus());
new FinishedTransition().transition(container,
new RMContainerFinishedEvent(container.containerId, status,
RMContainerEventType.FINISHED));
return RMContainerState.COMPLETED;
} else if (report.getContainerState().equals(ContainerState.RUNNING)) {
// Tell the app
container.eventHandler.handle(new RMAppRunningOnNodeEvent(container
.getApplicationAttemptId().getApplicationId(), container.nodeId));
return RMContainerState.RUNNING;
} else {
// This can never happen.
LOG.warn("RMContainer received unexpected recover event with container"
+ " state " + report.getContainerState() + " while recovering.");
return RMContainerState.RUNNING;
}
}
}
private static final class ContainerReservedTransition extends
BaseTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
RMContainerReservedEvent e = (RMContainerReservedEvent)event;
container.reservedResource = e.getReservedResource();
container.reservedNode = e.getReservedNode();
container.reservedPriority = e.getReservedPriority();
}
}
private static final class ContainerStartedTransition extends
BaseTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
container.eventHandler.handle(new RMAppAttemptContainerAllocatedEvent(
container.appAttemptId));
}
}
private static final class AcquiredTransition extends BaseTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
// Clear ResourceRequest stored in RMContainer
container.setResourceRequests(null);
// Register with containerAllocationExpirer.
container.containerAllocationExpirer.register(container.getContainerId());
// Tell the app
container.eventHandler.handle(new RMAppRunningOnNodeEvent(container
.getApplicationAttemptId().getApplicationId(), container.nodeId));
}
}
private static final class LaunchedTransition extends BaseTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
// Unregister from containerAllocationExpirer.
container.containerAllocationExpirer.unregister(container
.getContainerId());
}
}
private static class FinishedTransition extends BaseTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
RMContainerFinishedEvent finishedEvent = (RMContainerFinishedEvent) event;
container.finishTime = System.currentTimeMillis();
container.finishedStatus = finishedEvent.getRemoteContainerStatus();
// Inform AppAttempt
// container.getContainer() can return null when a RMContainer is a
// reserved container
updateAttemptMetrics(container);
container.eventHandler.handle(new RMAppAttemptContainerFinishedEvent(
container.appAttemptId, finishedEvent.getRemoteContainerStatus(),
container.getAllocatedNode()));
container.rmContext.getRMApplicationHistoryWriter().containerFinished(
container);
container.rmContext.getSystemMetricsPublisher().containerFinished(
container, container.finishTime);
}
private static void updateAttemptMetrics(RMContainerImpl container) {
// If this is a preempted container, update preemption metrics
Resource resource = container.getContainer().getResource();
RMAppAttempt rmAttempt = container.rmContext.getRMApps()
.get(container.getApplicationAttemptId().getApplicationId())
.getCurrentAppAttempt();
if (ContainerExitStatus.PREEMPTED == container.finishedStatus
.getExitStatus()) {
rmAttempt.getRMAppAttemptMetrics().updatePreemptionInfo(resource,
container);
}
if (rmAttempt != null) {
long usedMillis = container.finishTime - container.creationTime;
long memorySeconds = resource.getMemory()
* usedMillis / DateUtils.MILLIS_PER_SECOND;
long vcoreSeconds = resource.getVirtualCores()
* usedMillis / DateUtils.MILLIS_PER_SECOND;
rmAttempt.getRMAppAttemptMetrics()
.updateAggregateAppResourceUsage(memorySeconds,vcoreSeconds);
}
}
}
private static final class ContainerFinishedAtAcquiredState extends
FinishedTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
// Unregister from containerAllocationExpirer.
container.containerAllocationExpirer.unregister(container
.getContainerId());
// Inform AppAttempt
super.transition(container, event);
}
}
private static final class KillTransition extends FinishedTransition {
@Override
public void transition(RMContainerImpl container, RMContainerEvent event) {
// Unregister from containerAllocationExpirer.
container.containerAllocationExpirer.unregister(container
.getContainerId());
// Inform node
container.eventHandler.handle(new RMNodeCleanContainerEvent(
container.nodeId, container.containerId));
// Inform appAttempt
super.transition(container, event);
}
}
@Override
public ContainerReport createContainerReport() {
this.readLock.lock();
ContainerReport containerReport = null;
try {
containerReport = ContainerReport.newInstance(this.getContainerId(),
this.getAllocatedResource(), this.getAllocatedNode(),
this.getAllocatedPriority(), this.getCreationTime(),
this.getFinishTime(), this.getDiagnosticsInfo(), this.getLogURL(),
this.getContainerExitStatus(), this.getContainerState(),
this.getNodeHttpAddress());
} finally {
this.readLock.unlock();
}
return containerReport;
}
@Override
public String getNodeHttpAddress() {
try {
readLock.lock();
if (container.getNodeHttpAddress() != null) {
StringBuilder httpAddress = new StringBuilder();
httpAddress.append(WebAppUtils.getHttpSchemePrefix(rmContext
.getYarnConfiguration()));
httpAddress.append(container.getNodeHttpAddress());
return httpAddress.toString();
} else {
return null;
}
} finally {
readLock.unlock();
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.jglfw;
import static com.badlogic.jglfw.Glfw.*;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.OverlayLayout;
import javax.swing.SwingUtilities;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.InputEventQueue;
import com.badlogic.gdx.Input.Buttons;
import com.badlogic.gdx.utils.IntSet;
import com.badlogic.jglfw.GlfwCallbackAdapter;
/** An implementation of the {@link Input} interface hooking GLFW panel for input.
* @author mzechner
* @author Nathan Sweet */
public class JglfwInput implements Input {
final JglfwApplication app;
final InputEventQueue processorQueue;
InputProcessor processor;
int pressedKeys = 0;
boolean keyJustPressed = false;
boolean[] justPressedKeys = new boolean[256];
boolean justTouched;
int deltaX, deltaY;
long currentEventTime;
public JglfwInput (final JglfwApplication app, boolean queueEvents) {
this.app = app;
InputProcessor inputProcessor = new InputProcessor() {
private int mouseX, mouseY;
public boolean keyDown (int keycode) {
pressedKeys++;
keyJustPressed = true;
justPressedKeys[keycode] = true;
app.graphics.requestRendering();
return processor != null ? processor.keyDown(keycode) : false;
}
public boolean keyUp (int keycode) {
pressedKeys--;
app.graphics.requestRendering();
return processor != null ? processor.keyUp(keycode) : false;
}
public boolean keyTyped (char character) {
app.graphics.requestRendering();
return processor != null ? processor.keyTyped(character) : false;
}
public boolean touchDown (int screenX, int screenY, int pointer, int button) {
justTouched = true;
app.graphics.requestRendering();
return processor != null ? processor.touchDown(screenX, screenY, pointer, button) : false;
}
public boolean touchUp (int screenX, int screenY, int pointer, int button) {
app.graphics.requestRendering();
return processor != null ? processor.touchUp(screenX, screenY, pointer, button) : false;
}
public boolean touchDragged (int screenX, int screenY, int pointer) {
deltaX = screenX - mouseX;
deltaY = screenY - mouseY;
mouseX = screenX;
mouseY = screenY;
app.graphics.requestRendering();
return processor != null ? processor.touchDragged(mouseX, mouseY, 0) : false;
}
public boolean mouseMoved (int screenX, int screenY) {
deltaX = screenX - mouseX;
deltaY = screenY - mouseY;
mouseX = screenX;
mouseY = screenY;
app.graphics.requestRendering();
return processor != null ? processor.mouseMoved(mouseX, mouseY) : false;
}
public boolean scrolled (int amount) {
app.graphics.requestRendering();
return processor != null ? processor.scrolled(amount) : false;
}
};
if (queueEvents)
inputProcessor = processorQueue = new InputEventQueue(inputProcessor);
else
processorQueue = null;
app.getCallbacks().add(new GlfwInputProcessor(inputProcessor));
}
public void update () {
deltaX = 0;
deltaY = 0;
justTouched = false;
if (keyJustPressed) {
keyJustPressed = false;
for (int i = 0; i < justPressedKeys.length; i++) {
justPressedKeys[i] = false;
}
}
if (processorQueue != null)
processorQueue.drain(); // Main loop is handled elsewhere and events are queued.
else {
currentEventTime = System.nanoTime();
glfwPollEvents(); // Use GLFW main loop to process events.
}
}
public float getAccelerometerX () {
return 0;
}
public float getAccelerometerY () {
return 0;
}
public float getAccelerometerZ () {
return 0;
}
public int getX () {
return glfwGetCursorPosX(app.graphics.window);
}
public int getX (int pointer) {
return pointer > 0 ? 0 : getX();
}
public int getY () {
return glfwGetCursorPosY(app.graphics.window);
}
public int getY (int pointer) {
return pointer > 0 ? 0 : getY();
}
public int getDeltaX () {
return deltaX;
}
public int getDeltaX (int pointer) {
return pointer > 0 ? 0 : deltaX;
}
public int getDeltaY () {
return deltaY;
}
public int getDeltaY (int pointer) {
return pointer > 0 ? 0 : deltaY;
}
public boolean isTouched () {
return glfwGetMouseButton(app.graphics.window, 0) || glfwGetMouseButton(app.graphics.window, 1)
|| glfwGetMouseButton(app.graphics.window, 2);
}
public boolean isTouched (int pointer) {
return pointer > 0 ? false : isTouched();
}
public boolean justTouched () {
return justTouched;
}
public boolean isButtonPressed (int button) {
return glfwGetMouseButton(app.graphics.window, button);
}
public boolean isKeyPressed (int key) {
if (key == Input.Keys.ANY_KEY) return pressedKeys > 0;
if (key == Input.Keys.SYM)
return glfwGetKey(app.graphics.window, GLFW_KEY_LEFT_SUPER) || glfwGetKey(app.graphics.window, GLFW_KEY_RIGHT_SUPER);
return glfwGetKey(app.graphics.window, getJglfwKeyCode(key));
}
@Override
public boolean isKeyJustPressed (int key) {
if (key == Input.Keys.ANY_KEY) {
return keyJustPressed;
}
if (key < 0 || key > 256) {
return false;
}
return justPressedKeys[key];
}
public void setOnscreenKeyboardVisible (boolean visible) {
}
public void vibrate (int milliseconds) {
}
public void vibrate (long[] pattern, int repeat) {
}
public void cancelVibrate () {
}
public float getAzimuth () {
return 0;
}
public float getPitch () {
return 0;
}
public float getRoll () {
return 0;
}
public void getRotationMatrix (float[] matrix) {
}
public long getCurrentEventTime () {
return processorQueue != null ? processorQueue.getCurrentEventTime() : currentEventTime;
}
public void setCatchBackKey (boolean catchBack) {
}
public boolean isCatchBackKey () {
return false;
}
public void setCatchMenuKey (boolean catchMenu) {
}
@Override
public boolean isCatchMenuKey () {
return false;
}
public void setInputProcessor (InputProcessor processor) {
this.processor = processor;
}
public InputProcessor getInputProcessor () {
return processor;
}
public boolean isPeripheralAvailable (Peripheral peripheral) {
return peripheral == Peripheral.HardwareKeyboard;
}
public int getRotation () {
return 0;
}
public Orientation getNativeOrientation () {
return Orientation.Landscape;
}
public void setCursorCatched (boolean captured) {
glfwSetInputMode(app.graphics.window, GLFW_CURSOR_MODE, captured ? GLFW_CURSOR_CAPTURED : GLFW_CURSOR_NORMAL);
}
public boolean isCursorCatched () {
return glfwGetInputMode(app.graphics.window, GLFW_CURSOR_MODE) == GLFW_CURSOR_CAPTURED;
}
public void setCursorPosition (int x, int y) {
glfwSetCursorPos(app.graphics.window, x, y);
}
public void getTextInput (final TextInputListener listener, final String title, final String text, final String hint) {
SwingUtilities.invokeLater(new Runnable() {
public void run () {
JPanel panel = new JPanel(new FlowLayout());
JPanel textPanel = new JPanel() {
public boolean isOptimizedDrawingEnabled () {
return false;
};
};
textPanel.setLayout(new OverlayLayout(textPanel));
panel.add(textPanel);
final JTextField textField = new JTextField(20);
textField.setText(text);
textField.setAlignmentX(0.0f);
textPanel.add(textField);
final JLabel placeholderLabel = new JLabel(hint);
placeholderLabel.setForeground(Color.GRAY);
placeholderLabel.setAlignmentX(0.0f);
textPanel.add(placeholderLabel, 0);
textField.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate (DocumentEvent event) {
this.updated();
}
public void insertUpdate (DocumentEvent event) {
this.updated();
}
public void changedUpdate (DocumentEvent event) {
this.updated();
}
private void updated () {
placeholderLabel.setVisible(textField.getText().length() == 0);
}
});
JOptionPane pane = new JOptionPane(panel, JOptionPane.QUESTION_MESSAGE, JOptionPane.OK_CANCEL_OPTION, null, null,
null);
pane.setComponentOrientation(JOptionPane.getRootFrame().getComponentOrientation());
pane.selectInitialValue();
placeholderLabel.setBorder(new EmptyBorder(textField.getBorder().getBorderInsets(textField)));
JDialog dialog = pane.createDialog(null, title);
dialog.addWindowFocusListener(new WindowFocusListener() {
public void windowLostFocus (WindowEvent arg0) {
}
public void windowGainedFocus (WindowEvent arg0) {
textField.requestFocusInWindow();
}
});
dialog.setVisible(true);
dialog.dispose();
Object selectedValue = pane.getValue();
if (selectedValue != null && (selectedValue instanceof Integer) && (Integer)selectedValue == JOptionPane.OK_OPTION)
listener.input(textField.getText());
else
listener.canceled();
}
});
}
static char characterForKeyCode (int key) {
// Map certain key codes to character codes.
switch (key) {
case Keys.BACKSPACE:
return 8;
case Keys.TAB:
return '\t';
case Keys.FORWARD_DEL:
return 127;
}
return 0;
}
static public int getGdxKeyCode (int lwjglKeyCode) {
switch (lwjglKeyCode) {
case GLFW_KEY_SPACE:
return Input.Keys.SPACE;
case GLFW_KEY_APOSTROPHE:
return Input.Keys.APOSTROPHE;
case GLFW_KEY_COMMA:
return Input.Keys.COMMA;
case GLFW_KEY_MINUS:
return Input.Keys.MINUS;
case GLFW_KEY_PERIOD:
return Input.Keys.PERIOD;
case GLFW_KEY_SLASH:
return Input.Keys.SLASH;
case GLFW_KEY_0:
return Input.Keys.NUM_0;
case GLFW_KEY_1:
return Input.Keys.NUM_1;
case GLFW_KEY_2:
return Input.Keys.NUM_2;
case GLFW_KEY_3:
return Input.Keys.NUM_3;
case GLFW_KEY_4:
return Input.Keys.NUM_4;
case GLFW_KEY_5:
return Input.Keys.NUM_5;
case GLFW_KEY_6:
return Input.Keys.NUM_6;
case GLFW_KEY_7:
return Input.Keys.NUM_7;
case GLFW_KEY_8:
return Input.Keys.NUM_8;
case GLFW_KEY_9:
return Input.Keys.NUM_9;
case GLFW_KEY_SEMICOLON:
return Input.Keys.SEMICOLON;
case GLFW_KEY_EQUAL:
return Input.Keys.EQUALS;
case GLFW_KEY_A:
return Input.Keys.A;
case GLFW_KEY_B:
return Input.Keys.B;
case GLFW_KEY_C:
return Input.Keys.C;
case GLFW_KEY_D:
return Input.Keys.D;
case GLFW_KEY_E:
return Input.Keys.E;
case GLFW_KEY_F:
return Input.Keys.F;
case GLFW_KEY_G:
return Input.Keys.G;
case GLFW_KEY_H:
return Input.Keys.H;
case GLFW_KEY_I:
return Input.Keys.I;
case GLFW_KEY_J:
return Input.Keys.J;
case GLFW_KEY_K:
return Input.Keys.K;
case GLFW_KEY_L:
return Input.Keys.L;
case GLFW_KEY_M:
return Input.Keys.M;
case GLFW_KEY_N:
return Input.Keys.N;
case GLFW_KEY_O:
return Input.Keys.O;
case GLFW_KEY_P:
return Input.Keys.P;
case GLFW_KEY_Q:
return Input.Keys.Q;
case GLFW_KEY_R:
return Input.Keys.R;
case GLFW_KEY_S:
return Input.Keys.S;
case GLFW_KEY_T:
return Input.Keys.T;
case GLFW_KEY_U:
return Input.Keys.U;
case GLFW_KEY_V:
return Input.Keys.V;
case GLFW_KEY_W:
return Input.Keys.W;
case GLFW_KEY_X:
return Input.Keys.X;
case GLFW_KEY_Y:
return Input.Keys.Y;
case GLFW_KEY_Z:
return Input.Keys.Z;
case GLFW_KEY_LEFT_BRACKET:
return Input.Keys.LEFT_BRACKET;
case GLFW_KEY_BACKSLASH:
return Input.Keys.BACKSLASH;
case GLFW_KEY_RIGHT_BRACKET:
return Input.Keys.RIGHT_BRACKET;
case GLFW_KEY_GRAVE_ACCENT:
return Input.Keys.GRAVE;
case GLFW_KEY_WORLD_1:
case GLFW_KEY_WORLD_2:
return Input.Keys.UNKNOWN;
case GLFW_KEY_ESCAPE:
return Input.Keys.ESCAPE;
case GLFW_KEY_ENTER:
return Input.Keys.ENTER;
case GLFW_KEY_TAB:
return Input.Keys.TAB;
case GLFW_KEY_BACKSPACE:
return Input.Keys.BACKSPACE;
case GLFW_KEY_INSERT:
return Input.Keys.INSERT;
case GLFW_KEY_DELETE:
return Input.Keys.FORWARD_DEL;
case GLFW_KEY_RIGHT:
return Input.Keys.RIGHT;
case GLFW_KEY_LEFT:
return Input.Keys.LEFT;
case GLFW_KEY_DOWN:
return Input.Keys.DOWN;
case GLFW_KEY_UP:
return Input.Keys.UP;
case GLFW_KEY_PAGE_UP:
return Input.Keys.PAGE_UP;
case GLFW_KEY_PAGE_DOWN:
return Input.Keys.PAGE_DOWN;
case GLFW_KEY_HOME:
return Input.Keys.HOME;
case GLFW_KEY_END:
return Input.Keys.END;
case GLFW_KEY_CAPS_LOCK:
case GLFW_KEY_SCROLL_LOCK:
case GLFW_KEY_NUM_LOCK:
case GLFW_KEY_PRINT_SCREEN:
case GLFW_KEY_PAUSE:
return Input.Keys.UNKNOWN;
case GLFW_KEY_F1:
return Input.Keys.F1;
case GLFW_KEY_F2:
return Input.Keys.F2;
case GLFW_KEY_F3:
return Input.Keys.F3;
case GLFW_KEY_F4:
return Input.Keys.F4;
case GLFW_KEY_F5:
return Input.Keys.F5;
case GLFW_KEY_F6:
return Input.Keys.F6;
case GLFW_KEY_F7:
return Input.Keys.F7;
case GLFW_KEY_F8:
return Input.Keys.F8;
case GLFW_KEY_F9:
return Input.Keys.F9;
case GLFW_KEY_F10:
return Input.Keys.F10;
case GLFW_KEY_F11:
return Input.Keys.F11;
case GLFW_KEY_F12:
return Input.Keys.F12;
case GLFW_KEY_F13:
case GLFW_KEY_F14:
case GLFW_KEY_F15:
case GLFW_KEY_F16:
case GLFW_KEY_F17:
case GLFW_KEY_F18:
case GLFW_KEY_F19:
case GLFW_KEY_F20:
case GLFW_KEY_F21:
case GLFW_KEY_F22:
case GLFW_KEY_F23:
case GLFW_KEY_F24:
case GLFW_KEY_F25:
return Input.Keys.UNKNOWN;
case GLFW_KEY_KP_0:
return Input.Keys.NUMPAD_0;
case GLFW_KEY_KP_1:
return Input.Keys.NUMPAD_1;
case GLFW_KEY_KP_2:
return Input.Keys.NUMPAD_2;
case GLFW_KEY_KP_3:
return Input.Keys.NUMPAD_3;
case GLFW_KEY_KP_4:
return Input.Keys.NUMPAD_4;
case GLFW_KEY_KP_5:
return Input.Keys.NUMPAD_5;
case GLFW_KEY_KP_6:
return Input.Keys.NUMPAD_6;
case GLFW_KEY_KP_7:
return Input.Keys.NUMPAD_7;
case GLFW_KEY_KP_8:
return Input.Keys.NUMPAD_8;
case GLFW_KEY_KP_9:
return Input.Keys.NUMPAD_9;
case GLFW_KEY_KP_DECIMAL:
return Input.Keys.PERIOD;
case GLFW_KEY_KP_DIVIDE:
return Input.Keys.SLASH;
case GLFW_KEY_KP_MULTIPLY:
return Input.Keys.STAR;
case GLFW_KEY_KP_SUBTRACT:
return Input.Keys.MINUS;
case GLFW_KEY_KP_ADD:
return Input.Keys.PLUS;
case GLFW_KEY_KP_ENTER:
return Input.Keys.ENTER;
case GLFW_KEY_KP_EQUAL:
return Input.Keys.EQUALS;
case GLFW_KEY_LEFT_SHIFT:
return Input.Keys.SHIFT_LEFT;
case GLFW_KEY_LEFT_CONTROL:
return Input.Keys.CONTROL_LEFT;
case GLFW_KEY_LEFT_ALT:
return Input.Keys.ALT_LEFT;
case GLFW_KEY_LEFT_SUPER:
return Input.Keys.SYM;
case GLFW_KEY_RIGHT_SHIFT:
return Input.Keys.SHIFT_RIGHT;
case GLFW_KEY_RIGHT_CONTROL:
return Input.Keys.CONTROL_RIGHT;
case GLFW_KEY_RIGHT_ALT:
return Input.Keys.ALT_RIGHT;
case GLFW_KEY_RIGHT_SUPER:
return Input.Keys.SYM;
case GLFW_KEY_MENU:
return Input.Keys.MENU;
default:
return Input.Keys.UNKNOWN;
}
}
static public int getJglfwKeyCode (int gdxKeyCode) {
switch (gdxKeyCode) {
case Input.Keys.SPACE:
return GLFW_KEY_SPACE;
case Input.Keys.APOSTROPHE:
return GLFW_KEY_APOSTROPHE;
case Input.Keys.COMMA:
return GLFW_KEY_COMMA;
case Input.Keys.PERIOD:
return GLFW_KEY_PERIOD;
case Input.Keys.NUM_0:
return GLFW_KEY_0;
case Input.Keys.NUM_1:
return GLFW_KEY_1;
case Input.Keys.NUM_2:
return GLFW_KEY_2;
case Input.Keys.NUM_3:
return GLFW_KEY_3;
case Input.Keys.NUM_4:
return GLFW_KEY_4;
case Input.Keys.NUM_5:
return GLFW_KEY_5;
case Input.Keys.NUM_6:
return GLFW_KEY_6;
case Input.Keys.NUM_7:
return GLFW_KEY_7;
case Input.Keys.NUM_8:
return GLFW_KEY_8;
case Input.Keys.NUM_9:
return GLFW_KEY_9;
case Input.Keys.SEMICOLON:
return GLFW_KEY_SEMICOLON;
case Input.Keys.EQUALS:
return GLFW_KEY_EQUAL;
case Input.Keys.A:
return GLFW_KEY_A;
case Input.Keys.B:
return GLFW_KEY_B;
case Input.Keys.C:
return GLFW_KEY_C;
case Input.Keys.D:
return GLFW_KEY_D;
case Input.Keys.E:
return GLFW_KEY_E;
case Input.Keys.F:
return GLFW_KEY_F;
case Input.Keys.G:
return GLFW_KEY_G;
case Input.Keys.H:
return GLFW_KEY_H;
case Input.Keys.I:
return GLFW_KEY_I;
case Input.Keys.J:
return GLFW_KEY_J;
case Input.Keys.K:
return GLFW_KEY_K;
case Input.Keys.L:
return GLFW_KEY_L;
case Input.Keys.M:
return GLFW_KEY_M;
case Input.Keys.N:
return GLFW_KEY_N;
case Input.Keys.O:
return GLFW_KEY_O;
case Input.Keys.P:
return GLFW_KEY_P;
case Input.Keys.Q:
return GLFW_KEY_Q;
case Input.Keys.R:
return GLFW_KEY_R;
case Input.Keys.S:
return GLFW_KEY_S;
case Input.Keys.T:
return GLFW_KEY_T;
case Input.Keys.U:
return GLFW_KEY_U;
case Input.Keys.V:
return GLFW_KEY_V;
case Input.Keys.W:
return GLFW_KEY_W;
case Input.Keys.X:
return GLFW_KEY_X;
case Input.Keys.Y:
return GLFW_KEY_Y;
case Input.Keys.Z:
return GLFW_KEY_Z;
case Input.Keys.LEFT_BRACKET:
return GLFW_KEY_LEFT_BRACKET;
case Input.Keys.BACKSLASH:
return GLFW_KEY_BACKSLASH;
case Input.Keys.RIGHT_BRACKET:
return GLFW_KEY_RIGHT_BRACKET;
case Input.Keys.GRAVE:
return GLFW_KEY_GRAVE_ACCENT;
case Input.Keys.ESCAPE:
return GLFW_KEY_ESCAPE;
case Input.Keys.ENTER:
return GLFW_KEY_ENTER;
case Input.Keys.TAB:
return GLFW_KEY_TAB;
case Input.Keys.BACKSPACE:
return GLFW_KEY_BACKSPACE;
case Input.Keys.INSERT:
return GLFW_KEY_INSERT;
case Input.Keys.FORWARD_DEL:
return GLFW_KEY_DELETE;
case Input.Keys.RIGHT:
return GLFW_KEY_RIGHT;
case Input.Keys.LEFT:
return GLFW_KEY_LEFT;
case Input.Keys.DOWN:
return GLFW_KEY_DOWN;
case Input.Keys.UP:
return GLFW_KEY_UP;
case Input.Keys.PAGE_UP:
return GLFW_KEY_PAGE_UP;
case Input.Keys.PAGE_DOWN:
return GLFW_KEY_PAGE_DOWN;
case Input.Keys.HOME:
return GLFW_KEY_HOME;
case Input.Keys.END:
return GLFW_KEY_END;
case Input.Keys.F1:
return GLFW_KEY_F1;
case Input.Keys.F2:
return GLFW_KEY_F2;
case Input.Keys.F3:
return GLFW_KEY_F3;
case Input.Keys.F4:
return GLFW_KEY_F4;
case Input.Keys.F5:
return GLFW_KEY_F5;
case Input.Keys.F6:
return GLFW_KEY_F6;
case Input.Keys.F7:
return GLFW_KEY_F7;
case Input.Keys.F8:
return GLFW_KEY_F8;
case Input.Keys.F9:
return GLFW_KEY_F9;
case Input.Keys.F10:
return GLFW_KEY_F10;
case Input.Keys.F11:
return GLFW_KEY_F11;
case Input.Keys.F12:
return GLFW_KEY_F12;
case Input.Keys.NUMPAD_0:
return GLFW_KEY_KP_0;
case Input.Keys.NUMPAD_1:
return GLFW_KEY_KP_1;
case Input.Keys.NUMPAD_2:
return GLFW_KEY_KP_2;
case Input.Keys.NUMPAD_3:
return GLFW_KEY_KP_3;
case Input.Keys.NUMPAD_4:
return GLFW_KEY_KP_4;
case Input.Keys.NUMPAD_5:
return GLFW_KEY_KP_5;
case Input.Keys.NUMPAD_6:
return GLFW_KEY_KP_6;
case Input.Keys.NUMPAD_7:
return GLFW_KEY_KP_7;
case Input.Keys.NUMPAD_8:
return GLFW_KEY_KP_8;
case Input.Keys.NUMPAD_9:
return GLFW_KEY_KP_9;
case Input.Keys.SLASH:
return GLFW_KEY_KP_DIVIDE;
case Input.Keys.STAR:
return GLFW_KEY_KP_MULTIPLY;
case Input.Keys.MINUS:
return GLFW_KEY_KP_SUBTRACT;
case Input.Keys.PLUS:
return GLFW_KEY_KP_ADD;
case Input.Keys.SHIFT_LEFT:
return GLFW_KEY_LEFT_SHIFT;
case Input.Keys.CONTROL_LEFT:
return GLFW_KEY_LEFT_CONTROL;
case Input.Keys.ALT_LEFT:
return GLFW_KEY_LEFT_ALT;
case Input.Keys.SYM:
return GLFW_KEY_LEFT_SUPER;
case Input.Keys.SHIFT_RIGHT:
return GLFW_KEY_RIGHT_SHIFT;
case Input.Keys.CONTROL_RIGHT:
return GLFW_KEY_RIGHT_CONTROL;
case Input.Keys.ALT_RIGHT:
return GLFW_KEY_RIGHT_ALT;
case Input.Keys.MENU:
return GLFW_KEY_MENU;
default:
return 0;
}
}
/** Receives GLFW input and calls InputProcessor methods.
* @author Nathan Sweet */
static class GlfwInputProcessor extends GlfwCallbackAdapter {
private int mouseX, mouseY, mousePressed;
private char lastCharacter;
private InputProcessor processor;
public GlfwInputProcessor (InputProcessor processor) {
if (processor == null) throw new IllegalArgumentException("processor cannot be null.");
this.processor = processor;
}
public void key (long window, int key, int action) {
switch (action) {
case GLFW_PRESS:
key = getGdxKeyCode(key);
processor.keyDown(key);
lastCharacter = 0;
char character = characterForKeyCode(key);
if (character != 0) character(window, character);
break;
case GLFW_RELEASE:
processor.keyUp(getGdxKeyCode(key));
break;
case GLFW_REPEAT:
if (lastCharacter != 0) processor.keyTyped(lastCharacter);
break;
}
}
public void character (long window, char character) {
if ((character & 0xff00) == 0xf700) return;
lastCharacter = character;
processor.keyTyped(character);
}
public void scroll (long window, double scrollX, double scrollY) {
processor.scrolled((int)-Math.signum(scrollY));
}
private int toGdxButton (int button) {
if (button == 0) return Buttons.LEFT;
if (button == 1) return Buttons.RIGHT;
if (button == 2) return Buttons.MIDDLE;
if (button == 3) return Buttons.BACK;
if (button == 4) return Buttons.FORWARD;
return -1;
}
public void mouseButton (long window, int button, boolean pressed) {
int gdxButton = toGdxButton(button);
if (button != -1 && gdxButton == -1) return; // Ignore unknown button.
if (pressed) {
mousePressed++;
processor.touchDown(mouseX, mouseY, 0, gdxButton);
} else {
mousePressed = Math.max(0, mousePressed - 1);
processor.touchUp(mouseX, mouseY, 0, gdxButton);
}
}
public void cursorPos (long window, int x, int y) {
mouseX = x;
mouseY = y;
if (mousePressed > 0)
processor.touchDragged(x, y, 0);
else
processor.mouseMoved(x, y);
}
}
@Override
public float getGyroscopeX () {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getGyroscopeY () {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getGyroscopeZ () {
// TODO Auto-generated method stub
return 0;
}
}
| |
// Copyright 2016 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.statemgr.zookeeper.curator;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.Message;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.api.BackgroundCallback;
import org.apache.curator.framework.api.CuratorEvent;
import org.apache.curator.framework.api.DeleteBuilder;
import org.apache.curator.framework.recipes.locks.InterProcessSemaphoreMutex;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.Watcher;
import com.twitter.heron.api.generated.TopologyAPI;
import com.twitter.heron.common.basics.Pair;
import com.twitter.heron.proto.ckptmgr.CheckpointManager;
import com.twitter.heron.proto.scheduler.Scheduler;
import com.twitter.heron.proto.system.ExecutionEnvironment;
import com.twitter.heron.proto.system.PackingPlans;
import com.twitter.heron.proto.system.PhysicalPlans;
import com.twitter.heron.proto.tmaster.TopologyMaster;
import com.twitter.heron.spi.common.Config;
import com.twitter.heron.spi.common.Context;
import com.twitter.heron.spi.common.Key;
import com.twitter.heron.spi.statemgr.Lock;
import com.twitter.heron.spi.statemgr.WatchCallback;
import com.twitter.heron.spi.utils.NetworkUtils;
import com.twitter.heron.statemgr.FileSystemStateManager;
import com.twitter.heron.statemgr.zookeeper.ZkContext;
import com.twitter.heron.statemgr.zookeeper.ZkUtils;
import com.twitter.heron.statemgr.zookeeper.ZkWatcherCallback;
public class CuratorStateManager extends FileSystemStateManager {
private static final Logger LOG = Logger.getLogger(CuratorStateManager.class.getName());
private CuratorFramework client;
private String connectionString;
private boolean isSchedulerService;
private List<Process> tunnelProcesses;
private Config config;
@Override
public void initialize(Config newConfig) {
super.initialize(newConfig);
this.config = newConfig;
this.connectionString = Context.stateManagerConnectionString(newConfig);
this.isSchedulerService = Context.schedulerService(newConfig);
this.tunnelProcesses = new ArrayList<>();
NetworkUtils.TunnelConfig tunnelConfig =
NetworkUtils.TunnelConfig.build(config, NetworkUtils.HeronSystem.STATE_MANAGER);
if (tunnelConfig.isTunnelNeeded()) {
Pair<String, List<Process>> tunneledResults = setupZkTunnel(tunnelConfig);
String newConnectionString = tunneledResults.first;
if (newConnectionString.isEmpty()) {
throw new IllegalArgumentException("Failed to connect to tunnel host '"
+ tunnelConfig.getTunnelHost() + "'");
}
// Use the new connection string
connectionString = newConnectionString;
tunnelProcesses.addAll(tunneledResults.second);
}
// Start it
client = getCuratorClient();
LOG.info("Starting Curator client connecting to: " + connectionString);
client.start();
try {
if (!client.blockUntilConnected(ZkContext.connectionTimeoutMs(newConfig),
TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Failed to initialize CuratorClient");
}
} catch (InterruptedException e) {
throw new RuntimeException("Failed to initialize CuratorClient", e);
}
if (ZkContext.isInitializeTree(newConfig)) {
initTree();
}
}
/**
* Lock backed by {@code InterProcessSemaphoreMutex}. Guaranteed to atomically get a
* distributed ephemeral lock backed by zookeeper. The lock should be explicitly released to
* avoid unnecessary waiting by other threads waiting on it.
*/
private final class DistributedLock implements Lock {
private String path;
private InterProcessSemaphoreMutex lock;
private DistributedLock(CuratorFramework client, String path) {
this.path = path;
this.lock = new InterProcessSemaphoreMutex(client, path);
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
try {
return this.lock.acquire(timeout, unit);
} catch (InterruptedException e) {
throw e;
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
throw new RuntimeException("Error while trying to acquire distributed lock at " + path, e);
}
}
@Override
public void unlock() {
try {
this.lock.release();
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
throw new RuntimeException("Error while trying to release distributed lock at " + path, e);
}
}
}
protected CuratorFramework getCuratorClient() {
// these are reasonable arguments for the ExponentialBackoffRetry. The first
// retry will wait 1 second - the second will wait up to 2 seconds - the
// third will wait up to 4 seconds.
ExponentialBackoffRetry retryPolicy = new ExponentialBackoffRetry(
ZkContext.retryIntervalMs(config), ZkContext.retryCount(config));
// using the CuratorFrameworkFactory.builder() gives fine grained control
// over creation options. See the CuratorFrameworkFactory.Builder javadoc
// details
return CuratorFrameworkFactory.builder()
.connectString(connectionString)
.retryPolicy(retryPolicy)
.connectionTimeoutMs(ZkContext.connectionTimeoutMs(config))
.sessionTimeoutMs(ZkContext.sessionTimeoutMs(config))
// etc. etc.
.build();
}
protected Pair<String, List<Process>> setupZkTunnel(NetworkUtils.TunnelConfig tunnelConfig) {
return ZkUtils.setupZkTunnel(config, tunnelConfig);
}
protected void initTree() {
// Make necessary directories
for (StateLocation location : StateLocation.values()) {
LOG.fine(String.format("%s directory: %s", location.getName(), getStateDirectory(location)));
}
try {
for (StateLocation location : StateLocation.values()) {
client.createContainers(getStateDirectory(location));
}
// Suppress it since createContainers() throws Exception
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
throw new RuntimeException("Failed to initialize tree", e);
}
LOG.info("Directory tree initialized.");
}
@Override
public void close() {
if (client != null) {
LOG.info("Closing the CuratorClient to: " + connectionString);
client.close();
}
// Close the tunneling
LOG.info("Closing the tunnel processes");
if (tunnelProcesses != null) {
for (Process process : tunnelProcesses) {
process.destroy();
}
}
}
public String getConnectionString() {
return connectionString;
}
// Make utils class protected for easy unit testing
@Override
protected ListenableFuture<Boolean> nodeExists(String path) {
final SettableFuture<Boolean> result = SettableFuture.create();
try {
LOG.info("Checking existence of path: " + path);
result.set(client.checkExists().forPath(path) != null);
// Suppress it since forPath() throws Exception
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
result.setException(new RuntimeException("Could not check Exist", e));
}
return result;
}
protected ListenableFuture<Boolean> createNode(
StateLocation location, String topologyName,
byte[] data,
boolean isEphemeral) {
return createNode(getStatePath(location, topologyName), data, isEphemeral);
}
@VisibleForTesting
protected ListenableFuture<Boolean> createNode(
String path,
byte[] data,
boolean isEphemeral) {
final SettableFuture<Boolean> result = SettableFuture.create();
try {
client.create().
withMode(isEphemeral ? CreateMode.EPHEMERAL : CreateMode.PERSISTENT)
.forPath(path, data);
LOG.info("Created node for path: " + path);
result.set(true);
// Suppress it since forPath() throws Exception
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
result.setException(new RuntimeException("Could not createNode:", e));
}
return result;
}
@Override
protected ListenableFuture<Boolean> deleteNode(String path, boolean deleteChildrenIfNecessary) {
final SettableFuture<Boolean> result = SettableFuture.create();
try {
DeleteBuilder deleteBuilder = client.delete();
if (deleteChildrenIfNecessary) {
deleteBuilder = (DeleteBuilder) deleteBuilder.deletingChildrenIfNeeded();
}
deleteBuilder.withVersion(-1).forPath(path);
LOG.info("Deleted node for path: " + path);
result.set(true);
} catch (KeeperException e) {
if (KeeperException.Code.NONODE.equals(e.code())) {
result.set(true);
} else {
result.setException(new RuntimeException("Could not deleteNode", e));
}
// Suppress it since forPath() throws Exception
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
result.setException(new RuntimeException("Could not deleteNode", e));
}
return result;
}
@Override
protected <M extends Message> ListenableFuture<M> getNodeData(
WatchCallback watcher,
String path,
final Message.Builder builder) {
final SettableFuture<M> future = SettableFuture.create();
Watcher wc = ZkWatcherCallback.makeZkWatcher(watcher);
BackgroundCallback cb = new BackgroundCallback() {
@Override
@SuppressWarnings("unchecked") // we don't know what M is until runtime
public void processResult(CuratorFramework aClient, CuratorEvent event) throws Exception {
byte[] data;
if (event != null & (data = event.getData()) != null) {
builder.mergeFrom(data);
future.set((M) builder.build());
} else {
future.setException(new RuntimeException("Failed to fetch data from path: "
+ event.getPath()));
}
}
};
try {
client.getData().usingWatcher(wc).inBackground(cb).forPath(path);
// Suppress it since forPath() throws Exception
// SUPPRESS CHECKSTYLE IllegalCatch
} catch (Exception e) {
future.setException(new RuntimeException("Could not getNodeData", e));
}
return future;
}
@Override
protected Lock getLock(String path) {
return new DistributedLock(this.client, path);
}
@Override
public ListenableFuture<Boolean> setTMasterLocation(
TopologyMaster.TMasterLocation location,
String topologyName) {
return createNode(StateLocation.TMASTER_LOCATION, topologyName, location.toByteArray(), true);
}
@Override
public ListenableFuture<Boolean> setMetricsCacheLocation(
TopologyMaster.MetricsCacheLocation location,
String topologyName) {
return createNode(
StateLocation.METRICSCACHE_LOCATION, topologyName, location.toByteArray(), true);
}
@Override
public ListenableFuture<Boolean> setExecutionState(
ExecutionEnvironment.ExecutionState executionState,
String topologyName) {
return createNode(
StateLocation.EXECUTION_STATE, topologyName, executionState.toByteArray(), false);
}
@Override
public ListenableFuture<Boolean> setTopology(
TopologyAPI.Topology topology,
String topologyName) {
return createNode(StateLocation.TOPOLOGY, topologyName, topology.toByteArray(), false);
}
@Override
public ListenableFuture<Boolean> setPhysicalPlan(
PhysicalPlans.PhysicalPlan physicalPlan,
String topologyName) {
return createNode(StateLocation.PHYSICAL_PLAN, topologyName, physicalPlan.toByteArray(), false);
}
@Override
public ListenableFuture<Boolean> setPackingPlan(
PackingPlans.PackingPlan packingPlan,
String topologyName) {
return createNode(StateLocation.PACKING_PLAN, topologyName, packingPlan.toByteArray(), false);
}
@Override
public ListenableFuture<Boolean> setStatefulCheckpoints(
CheckpointManager.StatefulConsistentCheckpoints checkpoint,
String topologyName) {
return createNode(StateLocation.STATEFUL_CHECKPOINT, topologyName,
checkpoint.toByteArray(), false);
}
@Override
public ListenableFuture<Boolean> setSchedulerLocation(
Scheduler.SchedulerLocation location,
String topologyName) {
// if isService, set the node as ephemeral node; set as persistent node otherwise
return createNode(StateLocation.SCHEDULER_LOCATION, topologyName,
location.toByteArray(),
isSchedulerService);
}
@Override
public ListenableFuture<Boolean> deleteTMasterLocation(String topologyName) {
// It is a EPHEMERAL node and would be removed automatically
final SettableFuture<Boolean> result = SettableFuture.create();
result.set(true);
return result;
}
@Override
public ListenableFuture<Boolean> deleteMetricsCacheLocation(String topologyName) {
// It is a EPHEMERAL node and would be removed automatically
final SettableFuture<Boolean> result = SettableFuture.create();
result.set(true);
return result;
}
@Override
public ListenableFuture<Boolean> deleteSchedulerLocation(String topologyName) {
// if scheduler is service, the znode is ephemeral and it's deleted automatically
if (isSchedulerService) {
final SettableFuture<Boolean> result = SettableFuture.create();
result.set(true);
return result;
} else {
return deleteNode(getStatePath(StateLocation.SCHEDULER_LOCATION, topologyName), false);
}
}
public static void main(String[] args) throws ExecutionException, InterruptedException,
IllegalAccessException, ClassNotFoundException, InstantiationException {
if (args.length < 2) {
throw new RuntimeException("Expects arguments: <topology_name> <zookeeper_hostname>");
}
String zookeeperHostname = args[1];
Config config = Config.newBuilder()
.put(Key.STATEMGR_ROOT_PATH, "/storm/heron/states")
.put(Key.STATEMGR_CONNECTION_STRING, zookeeperHostname)
.build();
CuratorStateManager stateManager = new CuratorStateManager();
stateManager.doMain(args, config);
}
}
| |
package de.zib.gndms.model.common;
/*
* Copyright 2008-2011 Zuse Institute Berlin (ZIB)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import de.zib.gndms.common.model.gorfx.types.FutureTime;
import de.zib.gndms.common.model.gorfx.types.Quote;
import de.zib.gndms.stuff.copy.Copier;
import de.zib.gndms.stuff.copy.Copyable;
import de.zib.gndms.stuff.copy.Copyable.CopyMode;
import org.apache.openjpa.persistence.Externalizer;
import org.apache.openjpa.persistence.Factory;
import org.jetbrains.annotations.NotNull;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
import javax.persistence.Embeddable;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import java.io.Serializable;
/**
*
* A PersistenContract can be transformed to an Quote
*
*
* @see de.zib.gndms.common.model.gorfx.types.Quote
*
* @author try ste fan pla nti kow zib
* @version $Id$
*
* User: stepn Date: 24.11.2008 Time: 15:22:43
*/
@Embeddable @Copyable(CopyMode.MOLD)
public class PersistentContract implements Serializable {
private static final long serialVersionUID = -7695057432890400329L;
private DateTime accepted;
private DateTime deadline;
private DateTime resultValidity;
// expected size of task in case of a transfer or staging
private Long expectedSize;
public static PersistentContract acceptQuoteAt( DateTime dt, Quote quote ) {
quote.setAccepted( dt );
return acceptQuoteAsIs( quote );
}
public static PersistentContract acceptQuoteNow( Quote quote ) {
return acceptQuoteAt( new DateTime(), quote );
}
/**
* Creates a persistent-contract form this contract by fixing future time using the accepted time stamp.
*
* @return A persistent-contract representing this contract.
* <p/>
* todo maybe set fixed values here
* @note The created contract may be invalid wrt its jpa constraints.
* @param quote
*/
@SuppressWarnings( { "FeatureEnvy" } )
protected static PersistentContract acceptQuoteAsIs( Quote quote ) {
PersistentContract pc = new PersistentContract();
pc.setAccepted( quote.getAccepted() );
DateTime fixedDeadline = null;
if ( quote.hasDeadline() ) {
fixedDeadline = quote.getDeadline().fixedWith( quote.getAccepted() ).getFixedTime();
pc.setDeadline( fixedDeadline );
}
if ( quote.hasResultValidity() ) {
DateTime fixedResultValidity;
if ( fixedDeadline != null )
fixedResultValidity = quote.getResultValidity().fixedWith( fixedDeadline ).getFixedTime();
else {
fixedResultValidity = quote.getResultValidity().fixedWith( quote.getAccepted() ).getFixedTime();
}
pc.setResultValidity( fixedResultValidity );
}
if ( quote.hasExpectedSize() )
pc.setExpectedSize( quote.getExpectedSize() );
return pc;
}
/**
* @see de.zib.gndms.stuff.mold.Molder
*/
public void mold(final @NotNull PersistentContract instance) {
instance.accepted = Copier.copy(true, accepted);
instance.deadline = Copier.copy(true, deadline);
instance.resultValidity = Copier.copy(true, resultValidity);
setExpectedSize(Copier.copy(true, getExpectedSize()));
}
/**
* Transforms {@code this} to a {@code TransientContract} and returns the newly created instance.
* Transformation is done by setting all fields of the {@code TransientContract} instance
* to the field values of {@code this}.
*
* @return a corresponding Quote object out of {@code this}.
*/
@SuppressWarnings({ "FeatureEnvy" })
public @NotNull
Quote toTransientContract() {
final Quote tc = new Quote();
final DateTime acceptedDt = new DateTime(getAccepted());
tc.setAccepted(acceptedDt);
tc.setDeadline(FutureTime.atFixedTime(getDeadline()));
tc.setResultValidity( FutureTime.atFixedTime( resultValidity ));
if (hasExpectedSize())
tc.setExpectedSize(getExpectedSize());
return tc;
}
/**
* Compares {@link #deadline} and {@link #resultValidity} and returns the lastest of both times.
*
* @return the latest time, when comparing {@link #deadline} and {@link #resultValidity}
*/
@Transient
public DateTime getCurrentTerminationTime() {
final DateTime curDeadline = getDeadline();
final DateTime curRV = getResultValidity();
return curDeadline.compareTo(curRV) > 0 ? curDeadline : curRV;
}
/**
* Returns a clone of {@link #accepted}
*
* @return a clone of {@link #accepted}
*/
@Temporal(value = TemporalType.TIMESTAMP )
@Factory( "de.zib.gndms.model.util.JodaTimeForJPA.toDateTime" )
@Externalizer( "de.zib.gndms.model.util.JodaTimeForJPA.fromDateTime" )
public DateTime getAccepted() {
return nullSafeClone(accepted);
}
/**
* Sets {@link #accepted}. Note that a clone of {@code acceptedParam} will be stored
*
* @param acceptedParam a chosen value for {@link #accepted}
*/
public void setAccepted(final DateTime acceptedParam) {
accepted = nullSafeClone(acceptedParam);
}
/**
* Returns a clone of {@link #deadline}
*
* @return a clone of {@link #deadline}
*/
@Temporal(value = TemporalType.TIMESTAMP)
@Factory( "de.zib.gndms.model.util.JodaTimeForJPA.toDateTime" )
@Externalizer( "de.zib.gndms.model.util.JodaTimeForJPA.fromDateTime" )
public DateTime getDeadline() {
return nullSafeClone(deadline);
}
/**
* Sets {@link #deadline}. Note that a clone of {@code deadlineParam} will be stored
*
* @param deadlineParam a chosen value for {@link #deadline}
*/
public void setDeadline(final DateTime deadlineParam) {
deadline = nullSafeClone(deadlineParam);
}
/**
* Returns a clone of {@link #resultValidity}
*
* @return a clone of {@link #resultValidity}
*/
@Temporal(value = TemporalType.TIMESTAMP)
@Factory( "de.zib.gndms.model.util.JodaTimeForJPA.toDateTime" )
@Externalizer( "de.zib.gndms.model.util.JodaTimeForJPA.fromDateTime" )
public DateTime getResultValidity() {
return nullSafeClone(resultValidity);
}
/**
* Sets {@link #resultValidity}. Note that a clone of {@code resultValidityParam} will be stored
*
* @param resultValidityParam a chosen value for {@link #resultValidity}
*/
public void setResultValidity(final DateTime resultValidityParam) {
resultValidity = nullSafeClone(resultValidityParam);
}
public boolean hasExpectedSize( ) {
return getExpectedSize() != null;
}
public Long getExpectedSize() {
return expectedSize;
}
public void setExpectedSize(final Long expectedSizeParam) {
if( expectedSizeParam != null )
if ( expectedSizeParam < 0)
throw new IllegalArgumentException();
expectedSize = expectedSizeParam;
}
/**
* Checks if this contract is valid. A valid contract has to meet the following requirements
*
* accepted, deadline and resultValidity != null
* accepted < deadline
* accepted < resultValidity
*
* If strict also
* deadline < resultValidity
* must apply.
*
* @param strict Activates strict (s.a.) checking
* @return True if the contact is valid.
*/
public boolean isValid( boolean strict ) {
return accepted != null
&& deadline != null
&& resultValidity != null
&& accepted.compareTo( deadline ) < 0
&& accepted.compareTo( resultValidity ) < 0
&& ( !strict || deadline.compareTo( resultValidity ) < 0 );
}
@Override
public String toString( ) {
final Long theExpectedSize = getExpectedSize();
return "Accepted: " + isoForDateTime( accepted )
+ "; Deadline: " + isoForDateTime( deadline )
+ "; ResultValidity: " + isoForDateTime( resultValidity )
+ "; ExpectedSize: "
+ ( theExpectedSize != null ? theExpectedSize.toString() : "null" );
}
/**
* Returns either {@code null} if {@code cal} is null or if not, a clone of {@code cal}
*
* @param cal a DateTime which may be cloned.
* @return
*/
private static DateTime nullSafeClone(DateTime cal) {
return cal == null ? null : new DateTime( cal );
}
/**
* Returns a String representation of {@code cal}, using {@link org.joda.time.format.ISODateTimeFormat},
* or "null" if {@code cal==null} is {@code true}.
*
* @param cal a DateTime to be printed as a String, in ISO format
* @return
*/
private static String isoForDateTime( DateTime cal ) {
if( cal == null )
return "null";
return ISODateTimeFormat.dateTime().print( cal );
}
}
| |
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package ebay.apis.eblbasecomponents;
import java.io.Serializable;
import com.leansoft.nano.annotation.*;
import java.util.List;
import java.util.Date;
/**
*
* Details about a specific shipping service.
*
*/
public class ShippingServiceDetailsType implements Serializable {
private static final long serialVersionUID = -1L;
@Element(name = "Description")
private String description;
@Element(name = "ExpeditedService")
private Boolean expeditedService;
@Element(name = "InternationalService")
private Boolean internationalService;
@Element(name = "ShippingService")
private String shippingService;
@Element(name = "ShippingServiceID")
private Integer shippingServiceID;
@Element(name = "ShippingTimeMax")
private Integer shippingTimeMax;
@Element(name = "ShippingTimeMin")
private Integer shippingTimeMin;
@Element(name = "ShippingServiceCode")
private ShippingServiceCodeType shippingServiceCode;
@Element(name = "ServiceType")
private List<ShippingTypeCodeType> serviceType;
@Element(name = "ShippingPackage")
private List<ShippingPackageCodeType> shippingPackage;
@Element(name = "DimensionsRequired")
private Boolean dimensionsRequired;
@Element(name = "ValidForSellingFlow")
private Boolean validForSellingFlow;
@Element(name = "SurchargeApplicable")
private Boolean surchargeApplicable;
@Element(name = "ShippingCarrier")
private List<ShippingCarrierCodeType> shippingCarrier;
@Element(name = "CODService")
private Boolean codService;
@Element(name = "DeprecationDetails")
private List<AnnouncementMessageType> deprecationDetails;
@Element(name = "MappedToShippingServiceID")
private Integer mappedToShippingServiceID;
@Element(name = "CostGroupFlat")
private String costGroupFlat;
@Element(name = "ShippingServicePackageDetails")
private List<ShippingServicePackageDetailsType> shippingServicePackageDetails;
@Element(name = "WeightRequired")
private Boolean weightRequired;
@Element(name = "DetailVersion")
private String detailVersion;
@Element(name = "UpdateTime")
private Date updateTime;
@Element(name = "ShippingCategory")
private String shippingCategory;
/**
* public getter
*
*
* Display string that applications can use to present a list of shipping service
* options in a more user-friendly format (such as in a drop-down list).
*
*
* @returns java.lang.String
*/
public String getDescription() {
return this.description;
}
/**
* public setter
*
*
* Display string that applications can use to present a list of shipping service
* options in a more user-friendly format (such as in a drop-down list).
*
*
* @param java.lang.String
*/
public void setDescription(String description) {
this.description = description;
}
/**
* public getter
*
*
* Indicates whether the shipping service is an expedited shipping service.
* See Enabling Get It Fast. Only returned for sites for which the Get It
* Fast feature is enabled and only if true.
*
*
* @returns java.lang.Boolean
*/
public Boolean getExpeditedService() {
return this.expeditedService;
}
/**
* public setter
*
*
* Indicates whether the shipping service is an expedited shipping service.
* See Enabling Get It Fast. Only returned for sites for which the Get It
* Fast feature is enabled and only if true.
*
*
* @param java.lang.Boolean
*/
public void setExpeditedService(Boolean expeditedService) {
this.expeditedService = expeditedService;
}
/**
* public getter
*
*
* Indicates whether the shipping service is an international shipping service.
* An international shipping service option is required if an item is being
* shipped from one country (origin) to another (destination).
*
*
* @returns java.lang.Boolean
*/
public Boolean getInternationalService() {
return this.internationalService;
}
/**
* public setter
*
*
* Indicates whether the shipping service is an international shipping service.
* An international shipping service option is required if an item is being
* shipped from one country (origin) to another (destination).
*
*
* @param java.lang.Boolean
*/
public void setInternationalService(Boolean internationalService) {
this.internationalService = internationalService;
}
/**
* public getter
*
*
* The name of shipping service option. The
* ShippingServiceDetails.ValidForSellingFlow flag must also be present. Otherwise,
* that particular shipping service option is no longer valid and cannot be offered
* to buyers through a listing.
*
*
* @returns java.lang.String
*/
public String getShippingService() {
return this.shippingService;
}
/**
* public setter
*
*
* The name of shipping service option. The
* ShippingServiceDetails.ValidForSellingFlow flag must also be present. Otherwise,
* that particular shipping service option is no longer valid and cannot be offered
* to buyers through a listing.
*
*
* @param java.lang.String
*/
public void setShippingService(String shippingService) {
this.shippingService = shippingService;
}
/**
* public getter
*
*
* Numeric identifier. A value greater than 50000 represents an
* international shipping service (confirmed by
* InternationalShippingService being true). Some applications use this ID
* to look up shipping services more efficiently.
* Also useful for applications that have migrated from the legacy XML API.
*
*
* @returns java.lang.Integer
*/
public Integer getShippingServiceID() {
return this.shippingServiceID;
}
/**
* public setter
*
*
* Numeric identifier. A value greater than 50000 represents an
* international shipping service (confirmed by
* InternationalShippingService being true). Some applications use this ID
* to look up shipping services more efficiently.
* Also useful for applications that have migrated from the legacy XML API.
*
*
* @param java.lang.Integer
*/
public void setShippingServiceID(Integer shippingServiceID) {
this.shippingServiceID = shippingServiceID;
}
/**
* public getter
*
*
* The maximum guaranteed number of days the shipping carrier will
* take to ship an item (not including the time it takes the
* seller to deliver the item to the shipping carrier). Always
* returned when ExpeditedService is true or if defined for a particular service.
* See Enabling Get It Fast feature.
*
*
* @returns java.lang.Integer
*/
public Integer getShippingTimeMax() {
return this.shippingTimeMax;
}
/**
* public setter
*
*
* The maximum guaranteed number of days the shipping carrier will
* take to ship an item (not including the time it takes the
* seller to deliver the item to the shipping carrier). Always
* returned when ExpeditedService is true or if defined for a particular service.
* See Enabling Get It Fast feature.
*
*
* @param java.lang.Integer
*/
public void setShippingTimeMax(Integer shippingTimeMax) {
this.shippingTimeMax = shippingTimeMax;
}
/**
* public getter
*
*
* The minimum guaranteed number of days the shipping carrier will
* take to ship an item (not including the time it takes the
* seller to deliver the item to the shipping carrier). Always
* returned when ExpeditedService is true or if defined for a
* particular service.
* See Enabling Get It Fast feature.
*
*
* @returns java.lang.Integer
*/
public Integer getShippingTimeMin() {
return this.shippingTimeMin;
}
/**
* public setter
*
*
* The minimum guaranteed number of days the shipping carrier will
* take to ship an item (not including the time it takes the
* seller to deliver the item to the shipping carrier). Always
* returned when ExpeditedService is true or if defined for a
* particular service.
* See Enabling Get It Fast feature.
*
*
* @param java.lang.Integer
*/
public void setShippingTimeMin(Integer shippingTimeMin) {
this.shippingTimeMin = shippingTimeMin;
}
/**
* public getter
*
*
* For future use.
*
*
* @returns ebay.apis.eblbasecomponents.ShippingServiceCodeType
*/
public ShippingServiceCodeType getShippingServiceCode() {
return this.shippingServiceCode;
}
/**
* public setter
*
*
* For future use.
*
*
* @param ebay.apis.eblbasecomponents.ShippingServiceCodeType
*/
public void setShippingServiceCode(ShippingServiceCodeType shippingServiceCode) {
this.shippingServiceCode = shippingServiceCode;
}
/**
* public getter
*
*
* The types of shipping that this shipping service supports.
*
*
* @returns java.util.List<ebay.apis.eblbasecomponents.ShippingTypeCodeType>
*/
public List<ShippingTypeCodeType> getServiceType() {
return this.serviceType;
}
/**
* public setter
*
*
* The types of shipping that this shipping service supports.
*
*
* @param java.util.List<ebay.apis.eblbasecomponents.ShippingTypeCodeType>
*/
public void setServiceType(List<ShippingTypeCodeType> serviceType) {
this.serviceType = serviceType;
}
/**
* public getter
*
*
* The kinds of packages supported by this shipping service.
*
*
* @returns java.util.List<ebay.apis.eblbasecomponents.ShippingPackageCodeType>
*/
public List<ShippingPackageCodeType> getShippingPackage() {
return this.shippingPackage;
}
/**
* public setter
*
*
* The kinds of packages supported by this shipping service.
*
*
* @param java.util.List<ebay.apis.eblbasecomponents.ShippingPackageCodeType>
*/
public void setShippingPackage(List<ShippingPackageCodeType> shippingPackage) {
this.shippingPackage = shippingPackage;
}
/**
* public getter
*
*
* This field is only returned if the shipping service option requires that package
* dimensions are provided by the seller. If it is returned, it is always returned
* as 'true'.
*
*
* @returns java.lang.Boolean
*/
public Boolean getDimensionsRequired() {
return this.dimensionsRequired;
}
/**
* public setter
*
*
* This field is only returned if the shipping service option requires that package
* dimensions are provided by the seller. If it is returned, it is always returned
* as 'true'.
*
*
* @param java.lang.Boolean
*/
public void setDimensionsRequired(Boolean dimensionsRequired) {
this.dimensionsRequired = dimensionsRequired;
}
/**
* public getter
*
*
* If this field is returned as 'true', the shipping service option can be used in a
* Add/Revise/Relist API call. If this field is returned as 'false', the shipping
* service option is not currently supported and cannot be used in a
* Add/Revise/Relist API call.
*
*
* @returns java.lang.Boolean
*/
public Boolean getValidForSellingFlow() {
return this.validForSellingFlow;
}
/**
* public setter
*
*
* If this field is returned as 'true', the shipping service option can be used in a
* Add/Revise/Relist API call. If this field is returned as 'false', the shipping
* service option is not currently supported and cannot be used in a
* Add/Revise/Relist API call.
*
*
* @param java.lang.Boolean
*/
public void setValidForSellingFlow(Boolean validForSellingFlow) {
this.validForSellingFlow = validForSellingFlow;
}
/**
* public getter
*
*
* True if a surcharge applies for any region that this service ships to.
*
*
* @returns java.lang.Boolean
*/
public Boolean getSurchargeApplicable() {
return this.surchargeApplicable;
}
/**
* public setter
*
*
* True if a surcharge applies for any region that this service ships to.
*
*
* @param java.lang.Boolean
*/
public void setSurchargeApplicable(Boolean surchargeApplicable) {
this.surchargeApplicable = surchargeApplicable;
}
/**
* public getter
*
*
* The kinds of carrier supported by this shipping service.
*
*
* @returns java.util.List<ebay.apis.eblbasecomponents.ShippingCarrierCodeType>
*/
public List<ShippingCarrierCodeType> getShippingCarrier() {
return this.shippingCarrier;
}
/**
* public setter
*
*
* The kinds of carrier supported by this shipping service.
*
*
* @param java.util.List<ebay.apis.eblbasecomponents.ShippingCarrierCodeType>
*/
public void setShippingCarrier(List<ShippingCarrierCodeType> shippingCarrier) {
this.shippingCarrier = shippingCarrier;
}
/**
* public getter
*
*
* Applies to the Italy site (site ID 101) only. Indicates whether the shipping service
* is a Cash On Delivery shipping service.
* Returned only if true.
*
*
* @returns java.lang.Boolean
*/
public Boolean getCodService() {
return this.codService;
}
/**
* public setter
*
*
* Applies to the Italy site (site ID 101) only. Indicates whether the shipping service
* is a Cash On Delivery shipping service.
* Returned only if true.
*
*
* @param java.lang.Boolean
*/
public void setCodService(Boolean codService) {
this.codService = codService;
}
/**
* public getter
*
*
* A mechanism by which details about deprecation of a shipping service is
* announced. See also MappedToShippingServiceID.
* If this container is empty, it means that there is no mapping for this
* shipping service and that the shipping service will be dropped from the
* listing without an accompanying warning message from the eBay API.
*
*
* @returns java.util.List<ebay.apis.eblbasecomponents.AnnouncementMessageType>
*/
public List<AnnouncementMessageType> getDeprecationDetails() {
return this.deprecationDetails;
}
/**
* public setter
*
*
* A mechanism by which details about deprecation of a shipping service is
* announced. See also MappedToShippingServiceID.
* If this container is empty, it means that there is no mapping for this
* shipping service and that the shipping service will be dropped from the
* listing without an accompanying warning message from the eBay API.
*
*
* @param java.util.List<ebay.apis.eblbasecomponents.AnnouncementMessageType>
*/
public void setDeprecationDetails(List<AnnouncementMessageType> deprecationDetails) {
this.deprecationDetails = deprecationDetails;
}
/**
* public getter
*
*
* The ID of another shipping service that will be used when a
* shipping service is deprecated. See also DeprecationDetails.
*
*
* @returns java.lang.Integer
*/
public Integer getMappedToShippingServiceID() {
return this.mappedToShippingServiceID;
}
/**
* public setter
*
*
* The ID of another shipping service that will be used when a
* shipping service is deprecated. See also DeprecationDetails.
*
*
* @param java.lang.Integer
*/
public void setMappedToShippingServiceID(Integer mappedToShippingServiceID) {
this.mappedToShippingServiceID = mappedToShippingServiceID;
}
/**
* public getter
*
*
* If returned, this is the shipping service group to which the shipping service belongs.
* Valid values are found in CostGroupFlatCodeType.
*
*
* @returns java.lang.String
*/
public String getCostGroupFlat() {
return this.costGroupFlat;
}
/**
* public setter
*
*
* If returned, this is the shipping service group to which the shipping service belongs.
* Valid values are found in CostGroupFlatCodeType.
*
*
* @param java.lang.String
*/
public void setCostGroupFlat(String costGroupFlat) {
this.costGroupFlat = costGroupFlat;
}
/**
* public getter
*
*
* Shipping Package level details for the enclosing shipping service, this
* complex type replaces the existing ShippingPackage type.
*
*
* @returns java.util.List<ebay.apis.eblbasecomponents.ShippingServicePackageDetailsType>
*/
public List<ShippingServicePackageDetailsType> getShippingServicePackageDetails() {
return this.shippingServicePackageDetails;
}
/**
* public setter
*
*
* Shipping Package level details for the enclosing shipping service, this
* complex type replaces the existing ShippingPackage type.
*
*
* @param java.util.List<ebay.apis.eblbasecomponents.ShippingServicePackageDetailsType>
*/
public void setShippingServicePackageDetails(List<ShippingServicePackageDetailsType> shippingServicePackageDetails) {
this.shippingServicePackageDetails = shippingServicePackageDetails;
}
/**
* public getter
*
*
* If true, a seller who selects this package type for the listing and then offers
* this shipping service must specify WeightMajor and WeightMinor in the item definition.
* If not returned, WeightRequired is false.
*
*
* @returns java.lang.Boolean
*/
public Boolean getWeightRequired() {
return this.weightRequired;
}
/**
* public setter
*
*
* If true, a seller who selects this package type for the listing and then offers
* this shipping service must specify WeightMajor and WeightMinor in the item definition.
* If not returned, WeightRequired is false.
*
*
* @param java.lang.Boolean
*/
public void setWeightRequired(Boolean weightRequired) {
this.weightRequired = weightRequired;
}
/**
* public getter
*
*
* Returns the latest version number for this field. The version can be
* used to determine if and when to refresh cached client data.
*
*
* @returns java.lang.String
*/
public String getDetailVersion() {
return this.detailVersion;
}
/**
* public setter
*
*
* Returns the latest version number for this field. The version can be
* used to determine if and when to refresh cached client data.
*
*
* @param java.lang.String
*/
public void setDetailVersion(String detailVersion) {
this.detailVersion = detailVersion;
}
/**
* public getter
*
*
* Gives the time in GMT that the feature flags for the details were last
* updated. This timestamp can be used to determine if and when to refresh
* cached client data.
*
*
* @returns java.util.Date
*/
public Date getUpdateTime() {
return this.updateTime;
}
/**
* public setter
*
*
* Gives the time in GMT that the feature flags for the details were last
* updated. This timestamp can be used to determine if and when to refresh
* cached client data.
*
*
* @param java.util.Date
*/
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
/**
* public getter
*
*
* Indicates the shipping category. Shipping categories include the following:
* ECONOMY, STANDARD, EXPEDITED, ONE_DAY, PICKUP, OTHER, and NONE. International
* shipping services are generally grouped into the NONE category. For more
* information on these shipping categories, see the
* <a href="http://pages.ebay.com/sellerinformation/shipping/chooseservice.html">Shipping Basics</a> page on the eBay Shipping Center site.
*
*
* @returns java.lang.String
*/
public String getShippingCategory() {
return this.shippingCategory;
}
/**
* public setter
*
*
* Indicates the shipping category. Shipping categories include the following:
* ECONOMY, STANDARD, EXPEDITED, ONE_DAY, PICKUP, OTHER, and NONE. International
* shipping services are generally grouped into the NONE category. For more
* information on these shipping categories, see the
* <a href="http://pages.ebay.com/sellerinformation/shipping/chooseservice.html">Shipping Basics</a> page on the eBay Shipping Center site.
*
*
* @param java.lang.String
*/
public void setShippingCategory(String shippingCategory) {
this.shippingCategory = shippingCategory;
}
}
| |
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.test.utils.robolectric;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runMainLooperUntil;
import android.os.Looper;
import androidx.media3.common.Player;
import androidx.media3.common.Timeline;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.ExoPlayer;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/**
* Helper methods to block the calling thread until the provided {@link ExoPlayer} instance reaches
* a particular state.
*/
@UnstableApi
public class TestPlayerRunHelper {
private TestPlayerRunHelper() {}
/**
* Runs tasks of the main {@link Looper} until {@link Player#getPlaybackState()} matches the
* expected state or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param expectedState The expected {@link Player.State}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilPlaybackState(Player player, @Player.State int expectedState)
throws TimeoutException {
verifyMainTestThread(player);
runMainLooperUntil(
() -> player.getPlaybackState() == expectedState || player.getPlayerError() != null);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Runs tasks of the main {@link Looper} until {@link Player#getPlayWhenReady()} matches the
* expected value or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param expectedPlayWhenReady The expected value for {@link Player#getPlayWhenReady()}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilPlayWhenReady(Player player, boolean expectedPlayWhenReady)
throws TimeoutException {
verifyMainTestThread(player);
runMainLooperUntil(
() ->
player.getPlayWhenReady() == expectedPlayWhenReady || player.getPlayerError() != null);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Runs tasks of the main {@link Looper} until {@link Player#getCurrentTimeline()} matches the
* expected timeline or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param expectedTimeline The expected {@link Timeline}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilTimelineChanged(Player player, Timeline expectedTimeline)
throws TimeoutException {
verifyMainTestThread(player);
runMainLooperUntil(
() ->
expectedTimeline.equals(player.getCurrentTimeline())
|| player.getPlayerError() != null);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Runs tasks of the main {@link Looper} until a timeline change or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @return The new {@link Timeline}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static Timeline runUntilTimelineChanged(Player player) throws TimeoutException {
verifyMainTestThread(player);
AtomicReference<@NullableType Timeline> receivedTimeline = new AtomicReference<>();
Player.Listener listener =
new Player.Listener() {
@Override
public void onTimelineChanged(Timeline timeline, int reason) {
receivedTimeline.set(timeline);
}
};
player.addListener(listener);
runMainLooperUntil(() -> receivedTimeline.get() != null || player.getPlayerError() != null);
player.removeListener(listener);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
return checkNotNull(receivedTimeline.get());
}
/**
* Runs tasks of the main {@link Looper} until {@link
* Player.Listener#onPositionDiscontinuity(Player.PositionInfo, Player.PositionInfo, int)} is
* called with the specified {@link Player.DiscontinuityReason} or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param expectedReason The expected {@link Player.DiscontinuityReason}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilPositionDiscontinuity(
Player player, @Player.DiscontinuityReason int expectedReason) throws TimeoutException {
verifyMainTestThread(player);
AtomicBoolean receivedCallback = new AtomicBoolean(false);
Player.Listener listener =
new Player.Listener() {
@Override
public void onPositionDiscontinuity(
Player.PositionInfo oldPosition, Player.PositionInfo newPosition, int reason) {
if (reason == expectedReason) {
receivedCallback.set(true);
}
}
};
player.addListener(listener);
runMainLooperUntil(() -> receivedCallback.get() || player.getPlayerError() != null);
player.removeListener(listener);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Runs tasks of the main {@link Looper} until a player error occurs.
*
* @param player The {@link Player}.
* @return The raised {@link ExoPlaybackException}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static ExoPlaybackException runUntilError(ExoPlayer player) throws TimeoutException {
verifyMainTestThread(player);
runMainLooperUntil(() -> player.getPlayerError() != null);
return checkNotNull(player.getPlayerError());
}
/**
* Runs tasks of the main {@link Looper} until {@link
* ExoPlayer.AudioOffloadListener#onExperimentalOffloadSchedulingEnabledChanged} is called or a
* playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @return The new offloadSchedulingEnabled state.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static boolean runUntilReceiveOffloadSchedulingEnabledNewState(ExoPlayer player)
throws TimeoutException {
verifyMainTestThread(player);
AtomicReference<@NullableType Boolean> offloadSchedulingEnabledReceiver =
new AtomicReference<>();
ExoPlayer.AudioOffloadListener listener =
new ExoPlayer.AudioOffloadListener() {
@Override
public void onExperimentalOffloadSchedulingEnabledChanged(
boolean offloadSchedulingEnabled) {
offloadSchedulingEnabledReceiver.set(offloadSchedulingEnabled);
}
};
player.addAudioOffloadListener(listener);
runMainLooperUntil(
() -> offloadSchedulingEnabledReceiver.get() != null || player.getPlayerError() != null);
player.removeAudioOffloadListener(listener);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
return checkNotNull(offloadSchedulingEnabledReceiver.get());
}
/**
* Runs tasks of the main {@link Looper} until {@link
* ExoPlayer.AudioOffloadListener#onExperimentalSleepingForOffloadChanged(boolean)} is called or a
* playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param expectedSleepForOffload The expected sleep of offload state.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilSleepingForOffload(ExoPlayer player, boolean expectedSleepForOffload)
throws TimeoutException {
verifyMainTestThread(player);
AtomicBoolean receiverCallback = new AtomicBoolean(false);
ExoPlayer.AudioOffloadListener listener =
new ExoPlayer.AudioOffloadListener() {
@Override
public void onExperimentalSleepingForOffloadChanged(boolean sleepingForOffload) {
if (sleepingForOffload == expectedSleepForOffload) {
receiverCallback.set(true);
}
}
};
player.addAudioOffloadListener(listener);
runMainLooperUntil(() -> receiverCallback.get() || player.getPlayerError() != null);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Runs tasks of the main {@link Looper} until the {@link Player.Listener#onRenderedFirstFrame}
* callback is called or a playback error occurs.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}..
*
* @param player The {@link Player}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilRenderedFirstFrame(ExoPlayer player) throws TimeoutException {
verifyMainTestThread(player);
AtomicBoolean receivedCallback = new AtomicBoolean(false);
Player.Listener listener =
new Player.Listener() {
@Override
public void onRenderedFirstFrame() {
receivedCallback.set(true);
}
};
player.addListener(listener);
runMainLooperUntil(() -> receivedCallback.get() || player.getPlayerError() != null);
player.removeListener(listener);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Calls {@link Player#play()}, runs tasks of the main {@link Looper} until the {@code player}
* reaches the specified position or a playback error occurs, and then pauses the {@code player}.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param mediaItemIndex The index of the media item.
* @param positionMs The position within the media item, in milliseconds.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void playUntilPosition(ExoPlayer player, int mediaItemIndex, long positionMs)
throws TimeoutException {
verifyMainTestThread(player);
Looper applicationLooper = Util.getCurrentOrMainLooper();
AtomicBoolean messageHandled = new AtomicBoolean(false);
player
.createMessage(
(messageType, payload) -> {
// Block playback thread until pause command has been sent from test thread.
ConditionVariable blockPlaybackThreadCondition = new ConditionVariable();
player
.getClock()
.createHandler(applicationLooper, /* callback= */ null)
.post(
() -> {
player.pause();
messageHandled.set(true);
blockPlaybackThreadCondition.open();
});
try {
player.getClock().onThreadBlocked();
blockPlaybackThreadCondition.block();
} catch (InterruptedException e) {
// Ignore.
}
})
.setPosition(mediaItemIndex, positionMs)
.send();
player.play();
runMainLooperUntil(() -> messageHandled.get() || player.getPlayerError() != null);
if (player.getPlayerError() != null) {
throw new IllegalStateException(player.getPlayerError());
}
}
/**
* Calls {@link Player#play()}, runs tasks of the main {@link Looper} until the {@code player}
* reaches the specified media item or a playback error occurs, and then pauses the {@code
* player}.
*
* <p>If a playback error occurs it will be thrown wrapped in an {@link IllegalStateException}.
*
* @param player The {@link Player}.
* @param mediaItemIndex The index of the media item.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void playUntilStartOfMediaItem(ExoPlayer player, int mediaItemIndex)
throws TimeoutException {
playUntilPosition(player, mediaItemIndex, /* positionMs= */ 0);
}
/**
* Runs tasks of the main {@link Looper} until the player completely handled all previously issued
* commands on the internal playback thread.
*
* @param player The {@link Player}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
*/
public static void runUntilPendingCommandsAreFullyHandled(ExoPlayer player)
throws TimeoutException {
verifyMainTestThread(player);
// Send message to player that will arrive after all other pending commands. Thus, the message
// execution on the app thread will also happen after all other pending command
// acknowledgements have arrived back on the app thread.
AtomicBoolean receivedMessageCallback = new AtomicBoolean(false);
player
.createMessage((type, data) -> receivedMessageCallback.set(true))
.setLooper(Util.getCurrentOrMainLooper())
.send();
runMainLooperUntil(receivedMessageCallback::get);
}
private static void verifyMainTestThread(Player player) {
if (Looper.myLooper() != Looper.getMainLooper()
|| player.getApplicationLooper() != Looper.getMainLooper()) {
throw new IllegalStateException();
}
}
}
| |
/**
*/
package gluemodel.substationStandard.LNNodes.LNGroupP.impl;
import gluemodel.substationStandard.Dataclasses.ACD;
import gluemodel.substationStandard.Dataclasses.ACT;
import gluemodel.substationStandard.Dataclasses.ASG;
import gluemodel.substationStandard.Dataclasses.ING;
import gluemodel.substationStandard.Dataclasses.SPS;
import gluemodel.substationStandard.LNNodes.LNGroupP.LNGroupPPackage;
import gluemodel.substationStandard.LNNodes.LNGroupP.PFRC;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>PFRC</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getStr <em>Str</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getOp <em>Op</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getBlkV <em>Blk V</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getStrVal <em>Str Val</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getBlkVal <em>Blk Val</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getOpDiTmms <em>Op Di Tmms</em>}</li>
* <li>{@link gluemodel.substationStandard.LNNodes.LNGroupP.impl.PFRCImpl#getRsDiTmms <em>Rs Di Tmms</em>}</li>
* </ul>
*
* @generated
*/
public class PFRCImpl extends GroupPImpl implements PFRC {
/**
* The cached value of the '{@link #getStr() <em>Str</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStr()
* @generated
* @ordered
*/
protected ACD str;
/**
* The cached value of the '{@link #getOp() <em>Op</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOp()
* @generated
* @ordered
*/
protected ACT op;
/**
* The cached value of the '{@link #getBlkV() <em>Blk V</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBlkV()
* @generated
* @ordered
*/
protected SPS blkV;
/**
* The cached value of the '{@link #getStrVal() <em>Str Val</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStrVal()
* @generated
* @ordered
*/
protected ASG strVal;
/**
* The cached value of the '{@link #getBlkVal() <em>Blk Val</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBlkVal()
* @generated
* @ordered
*/
protected ASG blkVal;
/**
* The cached value of the '{@link #getOpDiTmms() <em>Op Di Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOpDiTmms()
* @generated
* @ordered
*/
protected ING opDiTmms;
/**
* The cached value of the '{@link #getRsDiTmms() <em>Rs Di Tmms</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRsDiTmms()
* @generated
* @ordered
*/
protected ING rsDiTmms;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PFRCImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return LNGroupPPackage.Literals.PFRC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACD getStr() {
if (str != null && str.eIsProxy()) {
InternalEObject oldStr = (InternalEObject)str;
str = (ACD)eResolveProxy(oldStr);
if (str != oldStr) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__STR, oldStr, str));
}
}
return str;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACD basicGetStr() {
return str;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStr(ACD newStr) {
ACD oldStr = str;
str = newStr;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__STR, oldStr, str));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACT getOp() {
if (op != null && op.eIsProxy()) {
InternalEObject oldOp = (InternalEObject)op;
op = (ACT)eResolveProxy(oldOp);
if (op != oldOp) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__OP, oldOp, op));
}
}
return op;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ACT basicGetOp() {
return op;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOp(ACT newOp) {
ACT oldOp = op;
op = newOp;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__OP, oldOp, op));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SPS getBlkV() {
if (blkV != null && blkV.eIsProxy()) {
InternalEObject oldBlkV = (InternalEObject)blkV;
blkV = (SPS)eResolveProxy(oldBlkV);
if (blkV != oldBlkV) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__BLK_V, oldBlkV, blkV));
}
}
return blkV;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SPS basicGetBlkV() {
return blkV;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setBlkV(SPS newBlkV) {
SPS oldBlkV = blkV;
blkV = newBlkV;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__BLK_V, oldBlkV, blkV));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG getStrVal() {
if (strVal != null && strVal.eIsProxy()) {
InternalEObject oldStrVal = (InternalEObject)strVal;
strVal = (ASG)eResolveProxy(oldStrVal);
if (strVal != oldStrVal) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__STR_VAL, oldStrVal, strVal));
}
}
return strVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG basicGetStrVal() {
return strVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStrVal(ASG newStrVal) {
ASG oldStrVal = strVal;
strVal = newStrVal;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__STR_VAL, oldStrVal, strVal));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG getBlkVal() {
if (blkVal != null && blkVal.eIsProxy()) {
InternalEObject oldBlkVal = (InternalEObject)blkVal;
blkVal = (ASG)eResolveProxy(oldBlkVal);
if (blkVal != oldBlkVal) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__BLK_VAL, oldBlkVal, blkVal));
}
}
return blkVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ASG basicGetBlkVal() {
return blkVal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setBlkVal(ASG newBlkVal) {
ASG oldBlkVal = blkVal;
blkVal = newBlkVal;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__BLK_VAL, oldBlkVal, blkVal));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getOpDiTmms() {
if (opDiTmms != null && opDiTmms.eIsProxy()) {
InternalEObject oldOpDiTmms = (InternalEObject)opDiTmms;
opDiTmms = (ING)eResolveProxy(oldOpDiTmms);
if (opDiTmms != oldOpDiTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__OP_DI_TMMS, oldOpDiTmms, opDiTmms));
}
}
return opDiTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetOpDiTmms() {
return opDiTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOpDiTmms(ING newOpDiTmms) {
ING oldOpDiTmms = opDiTmms;
opDiTmms = newOpDiTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__OP_DI_TMMS, oldOpDiTmms, opDiTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING getRsDiTmms() {
if (rsDiTmms != null && rsDiTmms.eIsProxy()) {
InternalEObject oldRsDiTmms = (InternalEObject)rsDiTmms;
rsDiTmms = (ING)eResolveProxy(oldRsDiTmms);
if (rsDiTmms != oldRsDiTmms) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupPPackage.PFRC__RS_DI_TMMS, oldRsDiTmms, rsDiTmms));
}
}
return rsDiTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ING basicGetRsDiTmms() {
return rsDiTmms;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRsDiTmms(ING newRsDiTmms) {
ING oldRsDiTmms = rsDiTmms;
rsDiTmms = newRsDiTmms;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, LNGroupPPackage.PFRC__RS_DI_TMMS, oldRsDiTmms, rsDiTmms));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case LNGroupPPackage.PFRC__STR:
if (resolve) return getStr();
return basicGetStr();
case LNGroupPPackage.PFRC__OP:
if (resolve) return getOp();
return basicGetOp();
case LNGroupPPackage.PFRC__BLK_V:
if (resolve) return getBlkV();
return basicGetBlkV();
case LNGroupPPackage.PFRC__STR_VAL:
if (resolve) return getStrVal();
return basicGetStrVal();
case LNGroupPPackage.PFRC__BLK_VAL:
if (resolve) return getBlkVal();
return basicGetBlkVal();
case LNGroupPPackage.PFRC__OP_DI_TMMS:
if (resolve) return getOpDiTmms();
return basicGetOpDiTmms();
case LNGroupPPackage.PFRC__RS_DI_TMMS:
if (resolve) return getRsDiTmms();
return basicGetRsDiTmms();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case LNGroupPPackage.PFRC__STR:
setStr((ACD)newValue);
return;
case LNGroupPPackage.PFRC__OP:
setOp((ACT)newValue);
return;
case LNGroupPPackage.PFRC__BLK_V:
setBlkV((SPS)newValue);
return;
case LNGroupPPackage.PFRC__STR_VAL:
setStrVal((ASG)newValue);
return;
case LNGroupPPackage.PFRC__BLK_VAL:
setBlkVal((ASG)newValue);
return;
case LNGroupPPackage.PFRC__OP_DI_TMMS:
setOpDiTmms((ING)newValue);
return;
case LNGroupPPackage.PFRC__RS_DI_TMMS:
setRsDiTmms((ING)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case LNGroupPPackage.PFRC__STR:
setStr((ACD)null);
return;
case LNGroupPPackage.PFRC__OP:
setOp((ACT)null);
return;
case LNGroupPPackage.PFRC__BLK_V:
setBlkV((SPS)null);
return;
case LNGroupPPackage.PFRC__STR_VAL:
setStrVal((ASG)null);
return;
case LNGroupPPackage.PFRC__BLK_VAL:
setBlkVal((ASG)null);
return;
case LNGroupPPackage.PFRC__OP_DI_TMMS:
setOpDiTmms((ING)null);
return;
case LNGroupPPackage.PFRC__RS_DI_TMMS:
setRsDiTmms((ING)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case LNGroupPPackage.PFRC__STR:
return str != null;
case LNGroupPPackage.PFRC__OP:
return op != null;
case LNGroupPPackage.PFRC__BLK_V:
return blkV != null;
case LNGroupPPackage.PFRC__STR_VAL:
return strVal != null;
case LNGroupPPackage.PFRC__BLK_VAL:
return blkVal != null;
case LNGroupPPackage.PFRC__OP_DI_TMMS:
return opDiTmms != null;
case LNGroupPPackage.PFRC__RS_DI_TMMS:
return rsDiTmms != null;
}
return super.eIsSet(featureID);
}
} //PFRCImpl
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.kits;
import com.google.bitcoin.core.*;
import com.google.bitcoin.net.discovery.DnsDiscovery;
import com.google.bitcoin.store.BlockStoreException;
import com.google.bitcoin.store.SPVBlockStore;
import com.google.bitcoin.store.WalletProtobufSerializer;
import com.google.common.util.concurrent.AbstractIdleService;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* <p>Utility class that wraps the boilerplate needed to set up a new SPV bitcoinj app. Instantiate it with a directory
* and file prefix, optionally configure a few things, then use start or startAndWait. The object will construct and
* configure a {@link BlockChain}, {@link SPVBlockStore}, {@link Wallet} and {@link PeerGroup}. Depending on the value
* of the blockingStartup property, startup will be considered complete once the block chain has fully synchronized,
* so it can take a while.</p>
*
* <p>To add listeners and modify the objects that are constructed, you can either do that by overriding the
* {@link #onSetupCompleted()} method (which will run on a background thread) and make your changes there,
* or by waiting for the service to start and then accessing the objects from wherever you want. However, you cannot
* access the objects this class creates until startup is complete.</p>
*
* <p>The asynchronous design of this class may seem puzzling (just use {@link #startAndWait()} if you don't want that).
* It is to make it easier to fit bitcoinj into GUI apps, which require a high degree of responsiveness on their main
* thread which handles all the animation and user interaction. Even when blockingStart is false, initializing bitcoinj
* means doing potentially blocking file IO, generating keys and other potentially intensive operations. By running it
* on a background thread, there's no risk of accidentally causing UI lag.</p>
*
* <p>Note that {@link #startAndWait()} can throw an unchecked {@link com.google.common.util.concurrent.UncheckedExecutionException}
* if anything goes wrong during startup - you should probably handle it and use {@link Exception#getCause()} to figure
* out what went wrong more precisely. Same thing if you use the async start() method.</p>
*/
public class WalletAppKit extends AbstractIdleService {
private final String filePrefix;
private final NetworkParameters params;
private volatile BlockChain vChain;
private volatile SPVBlockStore vStore;
private volatile Wallet vWallet;
private volatile PeerGroup vPeerGroup;
private final File directory;
private volatile File vWalletFile;
private boolean useAutoSave = true;
private PeerAddress[] peerAddresses;
private PeerEventListener downloadListener;
private boolean autoStop = true;
private InputStream checkpoints;
private boolean blockingStartup = true;
private String userAgent, version;
public WalletAppKit(NetworkParameters params, File directory, String filePrefix) {
this.params = checkNotNull(params);
this.directory = checkNotNull(directory);
this.filePrefix = checkNotNull(filePrefix);
}
/** Will only connect to the given addresses. Cannot be called after startup. */
public WalletAppKit setPeerNodes(PeerAddress... addresses) {
checkState(state() == State.NEW, "Cannot call after startup");
this.peerAddresses = addresses;
return this;
}
/** Will only connect to localhost. Cannot be called after startup. */
public WalletAppKit connectToLocalHost() {
try {
final InetAddress localHost = InetAddress.getLocalHost();
return setPeerNodes(new PeerAddress(localHost, params.getPort()));
} catch (UnknownHostException e) {
// Borked machine with no loopback adapter configured properly.
throw new RuntimeException(e);
}
}
/** If true, the wallet will save itself to disk automatically whenever it changes. */
public WalletAppKit setAutoSave(boolean value) {
checkState(state() == State.NEW, "Cannot call after startup");
useAutoSave = value;
return this;
}
/**
* If you want to learn about the sync process, you can provide a listener here. For instance, a
* {@link DownloadListener} is a good choice.
*/
public WalletAppKit setDownloadListener(PeerEventListener listener) {
this.downloadListener = listener;
return this;
}
/** If true, will register a shutdown hook to stop the library. Defaults to true. */
public WalletAppKit setAutoStop(boolean autoStop) {
this.autoStop = autoStop;
return this;
}
/**
* If set, the file is expected to contain a checkpoints file calculated with BuildCheckpoints. It makes initial
* block sync faster for new users - please refer to the documentation on the bitcoinj website for further details.
*/
public WalletAppKit setCheckpoints(InputStream checkpoints) {
this.checkpoints = checkNotNull(checkpoints);
return this;
}
/**
* If true (the default) then the startup of this service won't be considered complete until the network has been
* brought up, peer connections established and the block chain synchronised. Therefore {@link #startAndWait()} can
* potentially take a very long time. If false, then startup is considered complete once the network activity
* begins and peer connections/block chain sync will continue in the background.
*/
public WalletAppKit setBlockingStartup(boolean blockingStartup) {
this.blockingStartup = blockingStartup;
return this;
}
/**
* Sets the string that will appear in the subver field of the version message.
* @param userAgent A short string that should be the name of your app, e.g. "My Wallet"
* @param version A short string that contains the version number, e.g. "1.0-BETA"
*/
public WalletAppKit setUserAgent(String userAgent, String version) {
this.userAgent = checkNotNull(userAgent);
this.version = checkNotNull(version);
return this;
}
/**
* <p>Override this to load all wallet extensions if any are necessary.</p>
*
* <p>When this is called, chain(), store(), and peerGroup() will return the created objects, however they are not
* initialized/started</p>
*/
protected void addWalletExtensions() throws Exception { }
/**
* This method is invoked on a background thread after all objects are initialised, but before the peer group
* or block chain download is started. You can tweak the objects configuration here.
*/
protected void onSetupCompleted() { }
@Override
protected void startUp() throws Exception {
// Runs in a separate thread.
if (!directory.exists()) {
if (!directory.mkdir()) {
throw new IOException("Could not create named directory.");
}
}
FileInputStream walletStream = null;
try {
File chainFile = new File(directory, filePrefix + ".spvchain");
boolean chainFileExists = chainFile.exists();
vWalletFile = new File(directory, filePrefix + ".wallet");
boolean shouldReplayWallet = vWalletFile.exists() && !chainFileExists;
vStore = new SPVBlockStore(params, chainFile);
if (!chainFileExists && checkpoints != null) {
// Ugly hack! We have to create the wallet once here to learn the earliest key time, and then throw it
// away. The reason is that wallet extensions might need access to peergroups/chains/etc so we have to
// create the wallet later, but we need to know the time early here before we create the BlockChain
// object.
long time = Long.MAX_VALUE;
if (vWalletFile.exists()) {
Wallet wallet = new Wallet(params);
FileInputStream stream = new FileInputStream(vWalletFile);
new WalletProtobufSerializer().readWallet(WalletProtobufSerializer.parseToProto(stream), wallet);
time = wallet.getEarliestKeyCreationTime();
}
CheckpointManager.checkpoint(params, checkpoints, vStore, time);
}
vChain = new BlockChain(params, vStore);
vPeerGroup = new PeerGroup(params, vChain);
if (this.userAgent != null)
vPeerGroup.setUserAgent(userAgent, version);
if (vWalletFile.exists()) {
walletStream = new FileInputStream(vWalletFile);
vWallet = new Wallet(params);
addWalletExtensions(); // All extensions must be present before we deserialize
new WalletProtobufSerializer().readWallet(WalletProtobufSerializer.parseToProto(walletStream), vWallet);
if (shouldReplayWallet)
vWallet.clearTransactions(0);
} else {
vWallet = new Wallet(params);
vWallet.addKey(new ECKey());
addWalletExtensions();
}
if (useAutoSave) vWallet.autosaveToFile(vWalletFile, 1, TimeUnit.SECONDS, null);
// Set up peer addresses or discovery first, so if wallet extensions try to broadcast a transaction
// before we're actually connected the broadcast waits for an appropriate number of connections.
if (peerAddresses != null) {
for (PeerAddress addr : peerAddresses) vPeerGroup.addAddress(addr);
peerAddresses = null;
} else {
vPeerGroup.addPeerDiscovery(new DnsDiscovery(params));
}
vChain.addWallet(vWallet);
vPeerGroup.addWallet(vWallet);
onSetupCompleted();
if (blockingStartup) {
vPeerGroup.startAndWait();
// Make sure we shut down cleanly.
installShutdownHook();
// TODO: Be able to use the provided download listener when doing a blocking startup.
final DownloadListener listener = new DownloadListener();
vPeerGroup.startBlockChainDownload(listener);
listener.await();
} else {
Futures.addCallback(vPeerGroup.start(), new FutureCallback<State>() {
@Override
public void onSuccess(State result) {
final PeerEventListener l = downloadListener == null ? new DownloadListener() : downloadListener;
vPeerGroup.startBlockChainDownload(l);
}
@Override
public void onFailure(Throwable t) {
throw new RuntimeException(t);
}
});
}
} catch (BlockStoreException e) {
throw new IOException(e);
} finally {
if (walletStream != null) walletStream.close();
}
}
private void installShutdownHook() {
if (autoStop) Runtime.getRuntime().addShutdownHook(new Thread() {
@Override public void run() {
try {
WalletAppKit.this.stopAndWait();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
@Override
protected void shutDown() throws Exception {
// Runs in a separate thread.
try {
vPeerGroup.stopAndWait();
vWallet.saveToFile(vWalletFile);
vStore.close();
vPeerGroup = null;
vWallet = null;
vStore = null;
vChain = null;
} catch (BlockStoreException e) {
throw new IOException(e);
}
}
public NetworkParameters params() {
return params;
}
public BlockChain chain() {
checkState(state() == State.STARTING || state() == State.RUNNING, "Cannot call until startup is complete");
return vChain;
}
public SPVBlockStore store() {
checkState(state() == State.STARTING || state() == State.RUNNING, "Cannot call until startup is complete");
return vStore;
}
public Wallet wallet() {
checkState(state() == State.STARTING || state() == State.RUNNING, "Cannot call until startup is complete");
return vWallet;
}
public PeerGroup peerGroup() {
checkState(state() == State.STARTING || state() == State.RUNNING, "Cannot call until startup is complete");
return vPeerGroup;
}
public File directory() {
return directory;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util;
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
/**
* BitSet of fixed length (numBits), backed by accessible ({@link #getBits})
* long[], accessed with an int index, implementing {@link Bits} and
* {@link DocIdSet}. If you need to manage more than 2.1B bits, use
* {@link LongBitSet}.
*
* @lucene.internal
*/
public final class FixedBitSet extends BitSet implements MutableBits, Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FixedBitSet.class);
/**
* If the given {@link FixedBitSet} is large enough to hold {@code numBits},
* returns the given bits, otherwise returns a new {@link FixedBitSet} which
* can hold the requested number of bits.
*
* <p>
* <b>NOTE:</b> the returned bitset reuses the underlying {@code long[]} of
* the given {@code bits} if possible. Also, calling {@link #length()} on the
* returned bits may return a value greater than {@code numBits}.
*/
public static FixedBitSet ensureCapacity(FixedBitSet bits, int numBits) {
if (numBits < bits.length()) {
return bits;
} else {
int numWords = bits2words(numBits);
long[] arr = bits.getBits();
if (numWords >= arr.length) {
arr = ArrayUtil.grow(arr, numWords + 1);
}
return new FixedBitSet(arr, arr.length << 6);
}
}
/** returns the number of 64 bit words it would take to hold numBits */
public static int bits2words(int numBits) {
int numLong = numBits >>> 6;
if ((numBits & 63) != 0) {
numLong++;
}
return numLong;
}
/**
* Returns the popcount or cardinality of the intersection of the two sets.
* Neither set is modified.
*/
public static long intersectionCount(FixedBitSet a, FixedBitSet b) {
return BitUtil.pop_intersect(a.bits, b.bits, 0, Math.min(a.numWords, b.numWords));
}
/**
* Returns the popcount or cardinality of the union of the two sets. Neither
* set is modified.
*/
public static long unionCount(FixedBitSet a, FixedBitSet b) {
long tot = BitUtil.pop_union(a.bits, b.bits, 0, Math.min(a.numWords, b.numWords));
if (a.numWords < b.numWords) {
tot += BitUtil.pop_array(b.bits, a.numWords, b.numWords - a.numWords);
} else if (a.numWords > b.numWords) {
tot += BitUtil.pop_array(a.bits, b.numWords, a.numWords - b.numWords);
}
return tot;
}
/**
* Returns the popcount or cardinality of "a and not b" or
* "intersection(a, not(b))". Neither set is modified.
*/
public static long andNotCount(FixedBitSet a, FixedBitSet b) {
long tot = BitUtil.pop_andnot(a.bits, b.bits, 0, Math.min(a.numWords, b.numWords));
if (a.numWords > b.numWords) {
tot += BitUtil.pop_array(a.bits, b.numWords, a.numWords - b.numWords);
}
return tot;
}
final long[] bits;
final int numBits;
final int numWords;
public FixedBitSet(int numBits) {
this.numBits = numBits;
bits = new long[bits2words(numBits)];
numWords = bits.length;
}
public FixedBitSet(long[] storedBits, int numBits) {
this.numWords = bits2words(numBits);
if (numWords > storedBits.length) {
throw new IllegalArgumentException("The given long array is too small to hold " + numBits + " bits");
}
this.numBits = numBits;
this.bits = storedBits;
}
@Override
public int length() {
return numBits;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(bits);
}
/** Expert. */
public long[] getBits() {
return bits;
}
@Override
public int cardinality() {
return (int) BitUtil.pop_array(bits, 0, bits.length);
}
@Override
public boolean get(int index) {
assert index >= 0 && index < numBits: "index=" + index + ", numBits=" + numBits;
int i = index >> 6; // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
long bitmask = 1L << index;
return (bits[i] & bitmask) != 0;
}
public void set(int index) {
assert index >= 0 && index < numBits: "index=" + index + ", numBits=" + numBits;
int wordNum = index >> 6; // div 64
long bitmask = 1L << index;
bits[wordNum] |= bitmask;
}
public boolean getAndSet(int index) {
assert index >= 0 && index < numBits;
int wordNum = index >> 6; // div 64
long bitmask = 1L << index;
boolean val = (bits[wordNum] & bitmask) != 0;
bits[wordNum] |= bitmask;
return val;
}
@Override
public void clear(int index) {
assert index >= 0 && index < numBits;
int wordNum = index >> 6;
long bitmask = 1L << index;
bits[wordNum] &= ~bitmask;
}
public boolean getAndClear(int index) {
assert index >= 0 && index < numBits;
int wordNum = index >> 6; // div 64
long bitmask = 1L << index;
boolean val = (bits[wordNum] & bitmask) != 0;
bits[wordNum] &= ~bitmask;
return val;
}
@Override
public int nextSetBit(int index) {
assert index >= 0 && index < numBits : "index=" + index + ", numBits=" + numBits;
int i = index >> 6;
long word = bits[i] >> index; // skip all the bits to the right of index
if (word!=0) {
return index + Long.numberOfTrailingZeros(word);
}
while(++i < numWords) {
word = bits[i];
if (word != 0) {
return (i<<6) + Long.numberOfTrailingZeros(word);
}
}
return DocIdSetIterator.NO_MORE_DOCS;
}
@Override
public int prevSetBit(int index) {
assert index >= 0 && index < numBits: "index=" + index + " numBits=" + numBits;
int i = index >> 6;
final int subIndex = index & 0x3f; // index within the word
long word = (bits[i] << (63-subIndex)); // skip all the bits to the left of index
if (word != 0) {
return (i << 6) + subIndex - Long.numberOfLeadingZeros(word); // See LUCENE-3197
}
while (--i >= 0) {
word = bits[i];
if (word !=0 ) {
return (i << 6) + 63 - Long.numberOfLeadingZeros(word);
}
}
return -1;
}
@Override
public void or(DocIdSetIterator iter) throws IOException {
if (BitSetIterator.getFixedBitSetOrNull(iter) != null) {
assertUnpositioned(iter);
final FixedBitSet bits = BitSetIterator.getFixedBitSetOrNull(iter);
or(bits);
} else {
super.or(iter);
}
}
/** this = this OR other */
public void or(FixedBitSet other) {
or(other.bits, other.numWords);
}
private void or(final long[] otherArr, final int otherNumWords) {
assert otherNumWords <= numWords : "numWords=" + numWords + ", otherNumWords=" + otherNumWords;
final long[] thisArr = this.bits;
int pos = Math.min(numWords, otherNumWords);
while (--pos >= 0) {
thisArr[pos] |= otherArr[pos];
}
}
/** this = this XOR other */
public void xor(FixedBitSet other) {
xor(other.bits, other.numWords);
}
/** Does in-place XOR of the bits provided by the iterator. */
public void xor(DocIdSetIterator iter) throws IOException {
assertUnpositioned(iter);
if (BitSetIterator.getFixedBitSetOrNull(iter) != null) {
final FixedBitSet bits = BitSetIterator.getFixedBitSetOrNull(iter);
xor(bits);
} else {
int doc;
while ((doc = iter.nextDoc()) < numBits) {
flip(doc);
}
}
}
private void xor(long[] otherBits, int otherNumWords) {
assert otherNumWords <= numWords : "numWords=" + numWords + ", other.numWords=" + otherNumWords;
final long[] thisBits = this.bits;
int pos = Math.min(numWords, otherNumWords);
while (--pos >= 0) {
thisBits[pos] ^= otherBits[pos];
}
}
@Override
public void and(DocIdSetIterator iter) throws IOException {
if (BitSetIterator.getFixedBitSetOrNull(iter) != null) {
assertUnpositioned(iter);
final FixedBitSet bits = BitSetIterator.getFixedBitSetOrNull(iter);
and(bits);
} else {
super.and(iter);
}
}
/** returns true if the sets have any elements in common */
public boolean intersects(FixedBitSet other) {
int pos = Math.min(numWords, other.numWords);
while (--pos>=0) {
if ((bits[pos] & other.bits[pos]) != 0) return true;
}
return false;
}
/** this = this AND other */
public void and(FixedBitSet other) {
and(other.bits, other.numWords);
}
private void and(final long[] otherArr, final int otherNumWords) {
final long[] thisArr = this.bits;
int pos = Math.min(this.numWords, otherNumWords);
while(--pos >= 0) {
thisArr[pos] &= otherArr[pos];
}
if (this.numWords > otherNumWords) {
Arrays.fill(thisArr, otherNumWords, this.numWords, 0L);
}
}
@Override
public void andNot(DocIdSetIterator iter) throws IOException {
if (BitSetIterator.getFixedBitSetOrNull(iter) != null) {
assertUnpositioned(iter);
final FixedBitSet bits = BitSetIterator.getFixedBitSetOrNull(iter);
andNot(bits);
} else {
super.andNot(iter);
}
}
/** this = this AND NOT other */
public void andNot(FixedBitSet other) {
andNot(other.bits, other.bits.length);
}
private void andNot(final long[] otherArr, final int otherNumWords) {
final long[] thisArr = this.bits;
int pos = Math.min(this.numWords, otherNumWords);
while(--pos >= 0) {
thisArr[pos] &= ~otherArr[pos];
}
}
// NOTE: no .isEmpty() here because that's trappy (ie,
// typically isEmpty is low cost, but this one wouldn't
// be)
/** Flips a range of bits
*
* @param startIndex lower index
* @param endIndex one-past the last bit to flip
*/
public void flip(int startIndex, int endIndex) {
assert startIndex >= 0 && startIndex < numBits;
assert endIndex >= 0 && endIndex <= numBits;
if (endIndex <= startIndex) {
return;
}
int startWord = startIndex >> 6;
int endWord = (endIndex-1) >> 6;
/*** Grrr, java shifting wraps around so -1L>>>64 == -1
* for that reason, make sure not to use endmask if the bits to flip will
* be zero in the last word (redefine endWord to be the last changed...)
long startmask = -1L << (startIndex & 0x3f); // example: 11111...111000
long endmask = -1L >>> (64-(endIndex & 0x3f)); // example: 00111...111111
***/
long startmask = -1L << startIndex;
long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord) {
bits[startWord] ^= (startmask & endmask);
return;
}
bits[startWord] ^= startmask;
for (int i=startWord+1; i<endWord; i++) {
bits[i] = ~bits[i];
}
bits[endWord] ^= endmask;
}
/** Flip the bit at the provided index. */
public void flip(int index) {
assert index >= 0 && index < numBits: "index=" + index + " numBits=" + numBits;
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
bits[wordNum] ^= bitmask;
}
/** Sets a range of bits
*
* @param startIndex lower index
* @param endIndex one-past the last bit to set
*/
public void set(int startIndex, int endIndex) {
assert startIndex >= 0 && startIndex < numBits;
assert endIndex >= 0 && endIndex <= numBits;
if (endIndex <= startIndex) {
return;
}
int startWord = startIndex >> 6;
int endWord = (endIndex-1) >> 6;
long startmask = -1L << startIndex;
long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord) {
bits[startWord] |= (startmask & endmask);
return;
}
bits[startWord] |= startmask;
Arrays.fill(bits, startWord+1, endWord, -1L);
bits[endWord] |= endmask;
}
@Override
public void clear(int startIndex, int endIndex) {
assert startIndex >= 0 && startIndex < numBits : "startIndex=" + startIndex + ", numBits=" + numBits;
assert endIndex >= 0 && endIndex <= numBits : "endIndex=" + endIndex + ", numBits=" + numBits;
if (endIndex <= startIndex) {
return;
}
int startWord = startIndex >> 6;
int endWord = (endIndex-1) >> 6;
long startmask = -1L << startIndex;
long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
// invert masks since we are clearing
startmask = ~startmask;
endmask = ~endmask;
if (startWord == endWord) {
bits[startWord] &= (startmask | endmask);
return;
}
bits[startWord] &= startmask;
Arrays.fill(bits, startWord+1, endWord, 0L);
bits[endWord] &= endmask;
}
@Override
public FixedBitSet clone() {
long[] bits = new long[this.bits.length];
System.arraycopy(this.bits, 0, bits, 0, bits.length);
return new FixedBitSet(bits, numBits);
}
/** returns true if both sets have the same bits set */
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof FixedBitSet)) {
return false;
}
FixedBitSet other = (FixedBitSet) o;
if (numBits != other.length()) {
return false;
}
return Arrays.equals(bits, other.bits);
}
@Override
public int hashCode() {
long h = 0;
for (int i = numWords; --i>=0;) {
h ^= bits[i];
h = (h << 1) | (h >>> 63); // rotate left
}
// fold leftmost bits into right and add a constant to prevent
// empty sets from returning 0, which is too common.
return (int) ((h>>32) ^ h) + 0x98761234;
}
}
| |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.service.http.proxy;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_CONNECTION;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_LOCATION;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_SET_COOKIE;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_UPGRADE;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_VIA;
import static org.kaazing.gateway.transport.http.HttpStatus.CLIENT_NOT_FOUND;
import static org.kaazing.gateway.transport.http.HttpStatus.INFO_SWITCHING_PROTOCOLS;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_FORWARDED;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_X_FORWARDED_FOR;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_X_FORWARDED_HOST;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_X_FORWARDED_PROTO;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_X_FORWARDED_SERVER;
import static java.lang.String.format;
import java.net.URI;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.mina.core.future.CloseFuture;
import org.apache.mina.core.future.ConnectFuture;
import org.apache.mina.core.future.IoFutureListener;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.session.IoSessionInitializer;
import org.kaazing.gateway.resource.address.ResourceAddress;
import org.kaazing.gateway.resource.address.http.HttpResourceAddress;
import org.kaazing.gateway.resource.address.uri.URIUtils;
import org.kaazing.gateway.service.ServiceContext;
import org.kaazing.gateway.service.ServiceProperties;
import org.kaazing.gateway.service.proxy.AbstractProxyAcceptHandler;
import org.kaazing.gateway.service.proxy.AbstractProxyHandler;
import org.kaazing.gateway.transport.BridgeSession;
import org.kaazing.gateway.transport.IoHandlerAdapter;
import org.kaazing.gateway.transport.http.DefaultHttpSession;
import org.kaazing.gateway.transport.http.HttpAcceptSession;
import org.kaazing.gateway.transport.http.HttpConnectSession;
import org.kaazing.gateway.transport.http.HttpSession;
import org.kaazing.gateway.transport.http.HttpStatus;
import org.kaazing.mina.core.session.IoSessionEx;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class HttpProxyServiceHandler extends AbstractProxyAcceptHandler {
private static final Logger LOGGER = LoggerFactory.getLogger("service.http.proxy");
private static final String VIA_HEADER_FORMATTER = "1.1 kaazing-%s";
private static final String FORWARDED_INJECT = "inject";
private static final String FORWARDED_EXCLUDE = "exclude";
private static final String FORWARDED_IGNORE = "ignore";
private static final String FORWARDED_FOR = "for";
private static final String FORWARDED_BY = "by";
private static final String FORWARDED_PROTO = "proto";
private static final String FORWARDED_HOST = "host";
private static final Set KNOWN_SIMPLE_PROPERTIES;
static {
Set<String> set = new HashSet<>();
set.add("rewrite-cookie-domain");
set.add("rewrite-cookie-path");
set.add("rewrite-location");
set.add("use-forwarded");
KNOWN_SIMPLE_PROPERTIES = Collections.unmodifiableSet(set);
}
private static final Set KNOWN_NESTED_PROPERTIES;
static {
Set<String> set = new HashSet<>();
set.add("cookie-domain-mapping");
set.add("cookie-path-mapping");
set.add("location-mapping");
KNOWN_NESTED_PROPERTIES = Collections.unmodifiableSet(set);
}
private final String viaHeader;
private static final Set<String> USE_FORWARDED_VALUES;
static {
Set<String> set = new HashSet<>();
set.add(FORWARDED_INJECT);
set.add(FORWARDED_EXCLUDE);
set.add(FORWARDED_IGNORE);
USE_FORWARDED_VALUES = Collections.unmodifiableSet(set);
}
private String connectURI;
private String useForwarded;
private String serviceName;
private int remoteClientPort;
private boolean rewriteCookieDomain;
private boolean rewriteCookiePath;
private boolean rewriteLocation;
private Map<String, String> cookieDomainMap;
private Map<String, String> cookiePathMap;
private Map<String, String> locationMap;
public HttpProxyServiceHandler() {
viaHeader = String.format(VIA_HEADER_FORMATTER, UUID.randomUUID());
}
void init() {
ServiceContext serviceContext = getServiceContext();
serviceName = serviceContext.getServiceName();
Collection<String> acceptURIs = serviceContext.getAccepts();
Collection<String> connectURIs = serviceContext.getConnects();
String acceptURI = acceptURIs.iterator().next();
connectURI = connectURIs.iterator().next();
validateProperties(serviceContext);
ServiceProperties properties = serviceContext.getProperties();
rewriteCookieDomain = "enabled".equals(properties.get("rewrite-cookie-domain"));
rewriteCookiePath = "enabled".equals(properties.get("rewrite-cookie-path"));
rewriteLocation = !"disabled".equals(properties.get("rewrite-location"));
cookieDomainMap = new HashMap<>();
if (rewriteCookieDomain) {
List<ServiceProperties> cookieDomainProperties = properties.getNested("cookie-domain-mapping");
for (ServiceProperties sp : cookieDomainProperties) {
cookieDomainMap.put(sp.get("from"), sp.get("to"));
}
}
cookiePathMap = new HashMap<>();
if (rewriteCookiePath) {
List<ServiceProperties> cookiePathProperties = properties.getNested("cookie-path-mapping");
for (ServiceProperties sp : cookiePathProperties) {
cookiePathMap.put(sp.get("from"), sp.get("to"));
}
}
locationMap = new HashMap<>();
if (rewriteLocation) {
List<ServiceProperties> locationProperties = properties.getNested("location-mapping");
for (ServiceProperties sp : locationProperties) {
locationMap.put(sp.get("from"), sp.get("to"));
}
locationMap.put(connectURI, acceptURI);
}
useForwarded = properties.get("use-forwarded");
if (useForwarded == null) {
useForwarded = FORWARDED_IGNORE;
}
if (!USE_FORWARDED_VALUES.contains(useForwarded)) {
throw new IllegalArgumentException(serviceContext.getServiceName()
+ " http.proxy service specifies unknown property value : " + useForwarded + " for use-forwarded");
}
}
private void validateProperties(ServiceContext serviceContext) {
ServiceProperties properties = serviceContext.getProperties();
// validate all properties: rewrite-cookie-domain, rewrite-cookie-path, rewrite-location
Iterable<String> simpleProperties = properties.simplePropertyNames();
Set<String> unknownProperties = StreamSupport.stream(simpleProperties.spliterator(), false)
.filter(p -> !KNOWN_SIMPLE_PROPERTIES.contains(p))
.collect(Collectors.toSet());
Iterable<String> nestedProperties = properties.nestedPropertyNames();
StreamSupport.stream(nestedProperties.spliterator(), false)
.filter(p -> !KNOWN_NESTED_PROPERTIES.contains(p))
.forEach(unknownProperties::add);
if (!unknownProperties.isEmpty()) {
throw new IllegalArgumentException(serviceContext.getServiceName() +
" http.proxy service specifies unknown properties : " + unknownProperties);
}
}
@Override
protected AbstractProxyHandler createConnectHandler() {
return new ConnectHandler();
}
@Override
public void sessionOpened(IoSession session) {
// get the port number of the remote client
BridgeSession bridgeSession = (BridgeSession) session;
remoteClientPort = BridgeSession.REMOTE_ADDRESS.get(bridgeSession).getTransport().getResource().getPort();
if (!session.isClosing()) {
final DefaultHttpSession acceptSession = (DefaultHttpSession) session;
// final Subject subject = ((IoSessionEx) acceptSession).getSubject();
// log warning first time we see Http 1.0 request
if (acceptSession.getVersion().toString().equals("HTTP/1.0")) {
if (this.serviceName != null) {
LOGGER.warn(String.format(
"http.proxy service %s received an HTTP 1.0 request. HTTP 1.0 is not explicitly supported.",
this.serviceName));
} else {
LOGGER.warn("http.proxy service received an HTTP 1.0 request. HTTP 1.0 is not explicitly supported.");
}
}
if (!validateRequestPath(acceptSession)) {
acceptSession.setStatus(CLIENT_NOT_FOUND);
acceptSession.close(false);
return;
}
if (!validateNoLoopDetected(acceptSession)) {
return;
}
ConnectSessionInitializer sessionInitializer = new ConnectSessionInitializer(acceptSession);
String resolvedConnectURI = computeConnectURI(acceptSession, URI.create(connectURI)).toASCIIString();
ConnectFuture future = getServiceContext().connect(resolvedConnectURI, getConnectHandler(), sessionInitializer);
future.addListener(new ConnectListener(acceptSession));
super.sessionOpened(acceptSession);
}
}
private URI computeConnectURI(DefaultHttpSession acceptSession, URI connectURI) {
String acceptPath = acceptSession.getServicePath().getPath();
String requestUri = acceptSession.getRequestURI().toString();
return URI.create(connectURI + requestUri.substring(acceptPath.length()));
}
private boolean validateRequestPath(DefaultHttpSession acceptSession) {
URI requestURI = acceptSession.getRequestURI();
String acceptPath = acceptSession.getServicePath().getPath();
String requestPath = requestURI.normalize().getPath();
return requestPath.startsWith(acceptPath);
}
/**
* Helper method performing loop detection
* @param acceptSession - session parameter
* @return - whether a loop was detected or not
*/
private boolean validateNoLoopDetected(DefaultHttpSession acceptSession) {
List<String> viaHeaders = acceptSession.getReadHeaders(HEADER_VIA);
if (viaHeaders != null && viaHeaders.stream().anyMatch(h -> h.equals(viaHeader))) {
LOGGER.warn("Connection to " + getConnectURIs().iterator().next() +
" failed due to loop detection [" + acceptSession + "->]");
acceptSession.setStatus(HttpStatus.SERVER_LOOP_DETECTED);
acceptSession.close(true);
return false;
}
return true;
}
/*
* Initializer for connect session. It adds the processed accept session headers on the connect session
*/
private class ConnectSessionInitializer implements IoSessionInitializer<ConnectFuture> {
private final DefaultHttpSession acceptSession;
ConnectSessionInitializer(DefaultHttpSession acceptSession) {
this.acceptSession = acceptSession;
}
@Override
public void initializeSession(IoSession session, ConnectFuture future) {
HttpConnectSession connectSession = (HttpConnectSession) session;
connectSession.setVersion(acceptSession.getVersion());
connectSession.setMethod(acceptSession.getMethod());
URI connectURI = connectSession.getRequestURI();
connectSession.setRequestURI(connectURI);
processRequestHeaders(acceptSession, connectSession);
}
}
private class ConnectListener implements IoFutureListener<ConnectFuture> {
private final DefaultHttpSession acceptSession;
ConnectListener(DefaultHttpSession acceptSession) {
this.acceptSession = acceptSession;
}
@Override
public void operationComplete(ConnectFuture future) {
String connectURI = getConnectURIs().iterator().next();
if (future.isConnected()) {
DefaultHttpSession connectSession = (DefaultHttpSession) future.getSession();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Connected to " + connectURI + " [" + acceptSession + "->" + connectSession + "]");
}
if (acceptSession == null || acceptSession.isClosing()) {
connectSession.close(true);
} else {
AttachedSessionManager attachedSessionManager = attachSessions(acceptSession, connectSession);
connectSession.getCloseFuture().addListener(new Upgrader(connectSession, acceptSession));
acceptSession.getCloseFuture().addListener(new Upgrader(acceptSession, connectSession));
flushQueuedMessages(acceptSession, attachedSessionManager);
}
} else {
LOGGER.warn("Connection to " + connectURI + " failed [" + acceptSession + "->]");
acceptSession.setStatus(HttpStatus.SERVER_GATEWAY_TIMEOUT);
acceptSession.close(true);
}
}
}
private class ConnectHandler extends AbstractProxyHandler {
@Override
public void messageReceived(IoSession session, Object message) {
processResponseHeaders(session);
super.messageReceived(session, message);
}
@Override
public void sessionClosed(IoSession session) {
processResponseHeaders(session);
super.sessionClosed(session);
}
private void processResponseHeaders(IoSession session) {
HttpConnectSession connectSession = (HttpConnectSession) session;
AttachedSessionManager attachedSessionManager = getAttachedSessionManager(session);
if (attachedSessionManager != null) {
HttpAcceptSession acceptSession = (HttpAcceptSession) attachedSessionManager.getAttachedSession();
if (acceptSession.getWrittenBytes() == 0L && !acceptSession.isCommitting()
&& !acceptSession.isClosing()) {
acceptSession.setStatus(connectSession.getStatus());
acceptSession.setReason(connectSession.getReason());
acceptSession.setVersion(connectSession.getVersion());
processResponseHeaders(connectSession, acceptSession);
}
}
}
private void processResponseHeaders(HttpSession connectSession, HttpSession acceptSession) {
Set<String> hopByHopHeaders = getHopByHopHeaders(connectSession);
boolean upgrade = connectSession.getReadHeader(HEADER_UPGRADE) != null;
if (upgrade) {
hopByHopHeaders.remove(HEADER_UPGRADE);
}
// Add processed connect session headers to accept session
for (Map.Entry<String, List<String>> e : connectSession.getReadHeaders().entrySet()) {
String name = e.getKey();
// don't add hop-by-hop response headers
if (hopByHopHeaders.contains(name)) {
continue;
}
for (String value : e.getValue()) {
if (name.equalsIgnoreCase(HEADER_SET_COOKIE)) {
if (rewriteCookieDomain) {
value = processCookieDomain(value, cookieDomainMap);
}
if (rewriteCookiePath) {
value = processCookiePath(value, cookiePathMap);
}
acceptSession.addWriteHeader(name, value);
} else if (name.equalsIgnoreCase(HEADER_LOCATION)) {
if (rewriteLocation) {
value = processLocationHeader(value, locationMap);
}
acceptSession.addWriteHeader(name, value);
} else {
acceptSession.addWriteHeader(name ,value);
}
}
}
// Add Connection: upgrade to acceptSession
if (upgrade) {
acceptSession.setWriteHeader(HEADER_CONNECTION, HEADER_UPGRADE);
}
}
private String processCookieDomain(String cookie, Map<String, String> cookieDomainMap) {
String lowerCookie = cookie.toLowerCase();
if (lowerCookie.contains("domain=")) {
return cookieDomainMap.entrySet().stream()
.filter(e -> lowerCookie.contains("domain="+e.getKey()))
.findFirst()
.map(e -> {
int index = lowerCookie.indexOf("domain="+e.getKey());
return cookie.substring(0, index+7)+e.getValue()+cookie.substring(index+7+e.getKey().length());
})
.orElse(cookie);
}
return cookie;
}
private String processCookiePath(String cookie, Map<String, String> cookiePathMap) {
String lowerCookie = cookie.toLowerCase();
if (lowerCookie.contains("path=")) {
return cookiePathMap.entrySet().stream()
.filter(e -> lowerCookie.contains("path="+e.getKey()))
.findFirst()
.map(e -> {
int index = lowerCookie.indexOf("path="+e.getKey());
return cookie.substring(0, index+5)+e.getValue()+cookie.substring(index+5+e.getKey().length());
})
.orElse(cookie);
}
return cookie;
}
private String processLocationHeader(String location, Map<String, String> locationMap) {
return locationMap.entrySet().stream()
.filter(e -> location.startsWith(e.getKey()))
.findFirst()
.map(e -> location.replaceFirst(Pattern.quote(e.getKey()), e.getValue()))
.orElse(location);
}
}
/*
* Write all (except hop-by-hop) headers from source session to destination session.
*
* If the header is an upgrade one, let the Upgrade header go through as this service supports upgrade
*/
private static boolean processHopByHopHeaders(HttpSession src, HttpSession dest) {
Set<String> hopByHopHeaders = getHopByHopHeaders(src);
boolean upgrade = src.getReadHeader(HEADER_UPGRADE) != null;
if (upgrade) {
hopByHopHeaders.remove(HEADER_UPGRADE);
}
// Add source session headers to destination session
for (Map.Entry<String, List<String>> e : src.getReadHeaders().entrySet()) {
String name = e.getKey();
for (String value : e.getValue()) {
if (!hopByHopHeaders.contains(name)) {
dest.addWriteHeader(name, value);
}
}
}
return upgrade;
}
/*
* Write all (except hop-by-hop) request headers from accept session to connect session. If the request is an
* upgrade one, let the Upgrade header go through as this service supports upgrade
*/
private void processRequestHeaders(HttpAcceptSession acceptSession, HttpConnectSession connectSession) {
boolean upgrade = processHopByHopHeaders(acceptSession, connectSession);
// Add Connection: upgrade or Connection: close header
if (upgrade) {
connectSession.setWriteHeader(HEADER_CONNECTION, HEADER_UPGRADE);
} else {
ResourceAddress address = connectSession.getRemoteAddress();
// If keep-alive is disabled, add Connection: close header
if (!address.getOption(HttpResourceAddress.KEEP_ALIVE)) {
connectSession.setWriteHeader(HEADER_CONNECTION, "close");
}
}
// Add Via: 1.1 kaazing + uuid header
connectSession.addWriteHeader(HEADER_VIA, viaHeader);
// Add forwarded headers
setupForwardedHeaders(acceptSession, connectSession);
}
/**
* Compose the Forwarded header and the X-Forwarded headers and add them to the connect session write headers.
*
* @param acceptSession
* @param connectSession
* @param forwardedProperty the value of the 'use-forwarded' property used for http.proxy type in gateway-config:
* inject (add the corresponding data to the Forwarded/X-Forwarded headers), ignore (this proxy is anonymous, no
* forwarded header data is added), or exclude (delete any existing Forwarded/X-Forwarded headers received, and do
* not add any new data)
*/
private void setupForwardedHeaders(HttpAcceptSession acceptSession, HttpConnectSession connectSession) {
if (FORWARDED_EXCLUDE.equalsIgnoreCase(useForwarded)) {
excludeForwardedHeaders(connectSession);
return;
}
if (FORWARDED_INJECT.equalsIgnoreCase(useForwarded)) {
String remoteIpWithPort = format("%s:%d", getResourceIpAddress(acceptSession, FORWARDED_FOR), remoteClientPort);
if (remoteIpWithPort != null) {
connectSession.addWriteHeader(HEADER_X_FORWARDED_FOR, remoteIpWithPort);
}
String serverIpAddress = getResourceIpAddress(acceptSession, FORWARDED_BY);
if (serverIpAddress != null) {
connectSession.addWriteHeader(HEADER_X_FORWARDED_SERVER, serverIpAddress);
}
String protocol = acceptSession.isSecure() ? "https" : "http";
connectSession.addWriteHeader(HEADER_X_FORWARDED_PROTO, protocol);
String externalURI = acceptSession.getLocalAddress().getExternalURI();
String host = URIUtils.getHost(externalURI);
String port = format("%d", URIUtils.getPort(externalURI));
connectSession.addWriteHeader(HEADER_X_FORWARDED_HOST, format("%s:%s", host, port));
connectSession.addWriteHeader(HEADER_FORWARDED,
format("%s=%s;%s=%s;%s=%s;%s=%s:%s", FORWARDED_FOR, remoteIpWithPort, FORWARDED_BY, serverIpAddress,
FORWARDED_PROTO, protocol, FORWARDED_HOST, host, port));
}
}
/**
* Remove the Forwarded headers from the connect session if the 'use-forwarded' property for the http.proxy type is
* set to 'exclude'.
*
* @param connectSession
*/
private static void excludeForwardedHeaders(HttpConnectSession connectSession) {
connectSession.clearWriteHeaders(HEADER_FORWARDED);
connectSession.clearWriteHeaders(HEADER_X_FORWARDED_FOR);
connectSession.clearWriteHeaders(HEADER_X_FORWARDED_SERVER);
connectSession.clearWriteHeaders(HEADER_X_FORWARDED_HOST);
connectSession.clearWriteHeaders(HEADER_X_FORWARDED_PROTO);
}
/**
* Get the IP address of the resource based on the parameter name
*
* @param acceptSession
* @param parameterName can be either 'for' (the IP address of the client/server making the request to this
* service), or 'by' (the IP address of this proxy)
* @return the IP address based on the parameter name received
*/
private static String getResourceIpAddress(HttpAcceptSession acceptSession, String parameterName) {
String resourceIpAddress = null;
ResourceAddress resourceAddress = null;
switch (parameterName) {
case FORWARDED_FOR:
resourceAddress = acceptSession.getRemoteAddress();
break;
case FORWARDED_BY:
resourceAddress = acceptSession.getLocalAddress();
break;
}
ResourceAddress tcpResourceAddress = resourceAddress.findTransport("tcp");
if (tcpResourceAddress != null) {
URI resource = tcpResourceAddress.getResource();
resourceIpAddress = resource.getHost();
}
return resourceIpAddress;
}
/*
* Get all hop-by-hop headers from Connection header value. Also add Connection header itself to the set
*/
private static Set<String> getHopByHopHeaders(HttpSession session) {
List<String> connectionHeaders = session.getReadHeaders(HEADER_CONNECTION);
if (connectionHeaders == null) {
connectionHeaders = Collections.emptyList();
}
Set<String> hopByHopHeaders = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
for (String conHeader : connectionHeaders) {
hopByHopHeaders.add(conHeader);
}
hopByHopHeaders.add(HEADER_CONNECTION);
return hopByHopHeaders;
}
/*
* An upgrade handler that connects transport sessions of http accept and connect sessions.
*/
private static class ProxyUpgradeHandler extends IoHandlerAdapter<IoSessionEx> {
final IoSession attachedSession;
ProxyUpgradeHandler(IoSession attachedSession) {
this.attachedSession = attachedSession;
}
@Override
protected void doSessionOpened(final IoSessionEx session) throws Exception {
session.resumeRead();
}
@Override
protected void doMessageReceived(IoSessionEx session, Object message) throws Exception {
attachedSession.write(message);
}
@Override
protected void doExceptionCaught(IoSessionEx session, Throwable cause) throws Exception {
attachedSession.close(false);
}
@Override
protected void doSessionClosed(IoSessionEx session) throws Exception {
attachedSession.close(false);
}
}
/*
* A close listener that upgrades underlying transport connection at the end of http session close.
*/
private static class Upgrader implements IoFutureListener<CloseFuture> {
private final DefaultHttpSession session;
private final DefaultHttpSession attachedSession;
Upgrader(DefaultHttpSession session, DefaultHttpSession attachedSession) {
this.session = session;
this.attachedSession = attachedSession;
}
@Override
public void operationComplete(CloseFuture future) {
if (session.getStatus() == INFO_SWITCHING_PROTOCOLS) {
ProxyUpgradeHandler handler = new ProxyUpgradeHandler(attachedSession.getParent());
session.suspendRead();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("http.proxy service is upgrading session %s", session));
}
session.upgrade(handler);
}
}
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Copyright 2006 Google Inc. All rights reserved.
package com.google.devtools.build.lib.rules.cpp;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.packages.util.Crosstool.CcToolchainConfig;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** "White-box" unit test of cc_import rule. */
@RunWith(JUnit4.class)
public abstract class CcImportBaseConfiguredTargetTest extends BuildViewTestCase {
protected String skylarkImplementationLoadStatement = "";
@Before
public void setSkylarkImplementationLoadStatement() throws Exception {
setSkylarkSemanticsOptions(SkylarkCcCommonTestHelper.CC_SKYLARK_WHITELIST_FLAG);
invalidatePackages();
setIsSkylarkImplementation();
}
protected abstract void setIsSkylarkImplementation();
@Test
public void testCcImportRule() throws Exception {
scratch.file(
"third_party/BUILD",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'a_import',",
" static_library = 'A.a',",
" shared_library = 'A.so',",
" interface_library = 'A.ifso',",
" hdrs = ['a.h'],",
" alwayslink = 1,",
" system_provided = 0,",
")");
getConfiguredTarget("//third_party:a_import");
}
@Test
public void testWrongCcImportDefinitions() throws Exception {
checkError(
"a",
"foo",
"does not produce any cc_import static_library files " + "(expected .a, .lib or .pic.a)",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'foo',",
" static_library = 'libfoo.so',",
")");
checkError(
"b",
"foo",
"does not produce any cc_import shared_library files (expected .so, .dylib or .dll)",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'foo',",
" shared_library = 'libfoo.a',",
")");
checkError(
"c",
"foo",
"does not produce any cc_import interface_library files "
+ "(expected .ifso, .tbd, .lib, .so or .dylib)",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'foo',",
" shared_library = 'libfoo.dll',",
" interface_library = 'libfoo.a',",
")");
checkError(
"d",
"foo",
"'shared_library' shouldn't be specified when 'system_provided' is true",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'foo',",
" shared_library = 'libfoo.so',",
" system_provided = 1,",
")");
checkError(
"e",
"foo",
"'shared_library' should be specified when 'system_provided' is false",
skylarkImplementationLoadStatement,
"cc_import(",
" name = 'foo',",
" interface_library = 'libfoo.ifso',",
" system_provided = 0,",
")");
}
@Test
public void testRuntimeOnlyCcImportDefinitionsOnWindows() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder()
.withFeatures(
CppRuleClasses.COPY_DYNAMIC_LIBRARIES_TO_BINARY,
CppRuleClasses.TARGETS_WINDOWS));
useConfiguration();
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', shared_library = 'libfoo.dll')");
Artifact dynamicLibrary =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getResolvedSymlinkDynamicLibrary();
Iterable<Artifact> dynamicLibrariesForRuntime =
target
.get(CcInfo.PROVIDER)
.getCcLinkingContext()
.getDynamicLibrariesForRuntime(/* linkingStatically= */ false);
assertThat(dynamicLibrary).isEqualTo(null);
assertThat(artifactsToStrings(dynamicLibrariesForRuntime)).containsExactly("src a/libfoo.dll");
}
@Test
public void testCcImportWithStaticLibrary() throws Exception {
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', static_library = 'libfoo.a')");
Artifact library =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getStaticLibrary();
assertThat(artifactsToStrings(ImmutableList.of(library))).containsExactly("src a/libfoo.a");
}
@Test
public void testCcImportWithSharedLibrary() throws Exception {
useConfiguration("--cpu=k8");
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', shared_library = 'libfoo.so')");
Artifact dynamicLibrary =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getResolvedSymlinkDynamicLibrary();
Iterable<Artifact> dynamicLibrariesForRuntime =
target
.get(CcInfo.PROVIDER)
.getCcLinkingContext()
.getDynamicLibrariesForRuntime(/* linkingStatically= */ false);
assertThat(artifactsToStrings(ImmutableList.of(dynamicLibrary)))
.containsExactly("src a/libfoo.so");
assertThat(artifactsToStrings(dynamicLibrariesForRuntime))
.containsExactly("bin _solib_k8/_U_S_Sa_Cfoo___Ua/libfoo.so");
}
@Test
public void testCcImportWithInterfaceSharedLibrary() throws Exception {
useConfiguration("--cpu=k8");
ConfiguredTarget target =
scratchConfiguredTarget(
"b",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', shared_library = 'libfoo.so',"
+ " interface_library = 'libfoo.ifso')");
;
Artifact library =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getResolvedSymlinkInterfaceLibrary();
assertThat(artifactsToStrings(ImmutableList.of(library))).containsExactly("src b/libfoo.ifso");
Iterable<Artifact> dynamicLibrariesForRuntime =
target
.get(CcInfo.PROVIDER)
.getCcLinkingContext()
.getDynamicLibrariesForRuntime(/* linkingStatically= */ false);
assertThat(artifactsToStrings(dynamicLibrariesForRuntime))
.containsExactly("bin _solib_k8/_U_S_Sb_Cfoo___Ub/libfoo.so");
}
@Test
public void testCcImportWithBothStaticAndSharedLibraries() throws Exception {
useConfiguration("--cpu=k8");
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', static_library = 'libfoo.a', shared_library = 'libfoo.so')");
Artifact library =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getStaticLibrary();
assertThat(artifactsToStrings(ImmutableList.of(library))).containsExactly("src a/libfoo.a");
Artifact dynamicLibrary =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getResolvedSymlinkDynamicLibrary();
Iterable<Artifact> dynamicLibrariesForRuntime =
target
.get(CcInfo.PROVIDER)
.getCcLinkingContext()
.getDynamicLibrariesForRuntime(/* linkingStatically= */ false);
assertThat(artifactsToStrings(ImmutableList.of(dynamicLibrary)))
.containsExactly("src a/libfoo.so");
assertThat(artifactsToStrings(dynamicLibrariesForRuntime))
.containsExactly("bin _solib_k8/_U_S_Sa_Cfoo___Ua/libfoo.so");
}
@Test
public void testCcImportWithAlwaysLinkStaticLibrary() throws Exception {
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', static_library = 'libfoo.a', alwayslink = 1)");
boolean alwayslink =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getAlwayslink();
assertThat(alwayslink).isTrue();
}
@Test
public void testCcImportSystemProvidedIsTrue() throws Exception {
ConfiguredTarget target =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', interface_library = 'libfoo.ifso', system_provided = 1)");
Artifact library =
Iterables.getOnlyElement(target.get(CcInfo.PROVIDER).getCcLinkingContext().getLibraries())
.getResolvedSymlinkInterfaceLibrary();
assertThat(artifactsToStrings(ImmutableList.of(library))).containsExactly("src a/libfoo.ifso");
Iterable<Artifact> dynamicLibrariesForRuntime =
target
.get(CcInfo.PROVIDER)
.getCcLinkingContext()
.getDynamicLibrariesForRuntime(/* linkingStatically= */ false);
assertThat(artifactsToStrings(dynamicLibrariesForRuntime)).isEmpty();
}
@Test
public void testCcImportProvideHeaderFiles() throws Exception {
Iterable<Artifact> headers =
scratchConfiguredTarget(
"a",
"foo",
skylarkImplementationLoadStatement,
"cc_import(name = 'foo', static_library = 'libfoo.a', hdrs = ['foo.h'])")
.get(CcInfo.PROVIDER)
.getCcCompilationContext()
.getDeclaredIncludeSrcs();
assertThat(artifactsToStrings(headers)).containsExactly("src a/foo.h");
}
@Test
public void testCcImportLoadedThroughMacro() throws Exception {
setupTestCcImportLoadedThroughMacro(/* loadMacro= */ true);
assertThat(getConfiguredTarget("//a:a")).isNotNull();
assertNoEvents();
}
@Test
public void testCcImportNotLoadedThroughMacro() throws Exception {
setupTestCcImportLoadedThroughMacro(/* loadMacro= */ false);
reporter.removeHandler(failFastHandler);
getConfiguredTarget("//a:a");
assertContainsEvent("rules are deprecated");
}
private void setupTestCcImportLoadedThroughMacro(boolean loadMacro) throws Exception {
useConfiguration("--incompatible_load_cc_rules_from_bzl");
scratch.file(
"a/BUILD",
getAnalysisMock().ccSupport().getMacroLoadStatement(loadMacro, "cc_import"),
"cc_import(name='a', static_library='a.a')");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene;
import static com.google.common.collect.ImmutableSet.of;
import static javax.jcr.PropertyType.TYPENAME_STRING;
import static org.apache.jackrabbit.oak.InitialContent.INITIAL_CONTENT;
import static org.apache.jackrabbit.oak.api.Type.STRINGS;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
import static org.apache.jackrabbit.oak.plugins.index.lucene.FieldNames.PATH;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INCLUDE_PROPERTY_NAMES;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.VERSION;
import static org.apache.jackrabbit.oak.plugins.index.lucene.TestUtil.newLuceneIndexDefinitionV2;
import static org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper.newLuceneIndexDefinition;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.annotation.Nonnull;
import com.google.common.collect.ImmutableList;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.commons.CIHelper;
import org.apache.jackrabbit.oak.plugins.blob.datastore.CachingFileDataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils;
import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditor;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
import org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory;
import org.apache.jackrabbit.oak.plugins.index.lucene.writer.MultiplexersLucene;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.DefaultEditor;
import org.apache.jackrabbit.oak.spi.commit.Editor;
import org.apache.jackrabbit.oak.spi.commit.EditorHook;
import org.apache.jackrabbit.oak.spi.mount.Mount;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.mount.Mounts;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
import org.apache.jackrabbit.test.ISO8601;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class LuceneIndexEditorTest {
private EditorHook HOOK;
private NodeState root = INITIAL_CONTENT;
private NodeBuilder builder = root.builder();
private IndexTracker tracker = new IndexTracker();
private IndexNode indexNode;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
@Parameterized.Parameter
public boolean useBlobStore;
@Parameterized.Parameters(name = "{index}: useBlobStore ({0})")
public static List<Boolean[]> fixtures() {
return ImmutableList.of(new Boolean[] {true}, new Boolean[] {false});
}
@Before
public void setup() throws Exception {
if (useBlobStore) {
LuceneIndexEditorProvider provider = new LuceneIndexEditorProvider();
CachingFileDataStore ds = DataStoreUtils
.createCachingFDS(temporaryFolder.newFolder().getAbsolutePath(),
temporaryFolder.newFolder().getAbsolutePath());
provider.setBlobStore(new DataStoreBlobStore(ds));
HOOK = new EditorHook(new IndexUpdateProvider(provider));
} else {
HOOK = new EditorHook(new IndexUpdateProvider(new LuceneIndexEditorProvider()));
}
}
@Test
public void testLuceneWithFullText() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder idxnb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
IndexDefinition defn = new IndexDefinition(root, idxnb.getNodeState(), "/foo");
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
builder.child("test").setProperty("price", 100);
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
//system fields starts with ':' so need to be escaped
assertEquals("/test", query(escape(FieldNames.createAnalyzedFieldName("foo"))+":fox", defn));
assertNull("Non string properties not indexed by default",
getPath(NumericRangeQuery.newLongRange("price", 100L, 100L, true, true)));
}
@Test
public void noChangeIfNonIndexedDelete() throws Exception{
NodeState before = builder.getNodeState();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
builder.child("test").setProperty("foo", "bar");
builder.child("test").child("a");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals("/test", getPath(new TermQuery(new Term("foo", "bar"))));
NodeState luceneIdxState1 = NodeStateUtils.getNode(indexed, "/oak:index/lucene");
before = indexed;
builder = indexed.builder();
builder.getChildNode("test").getChildNode("a").remove();
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
NodeState luceneIdxState2 = NodeStateUtils.getNode(indexed, "/oak:index/lucene");
assertEquals(luceneIdxState1, luceneIdxState2);
}
private String escape(String name) {
return name.replace(":", "\\:");
}
@Test
public void testLuceneWithNonFullText() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo", "price", "weight", "bool", "creationTime"), STRINGS));
IndexDefinition defn = new IndexDefinition(root, nb.getNodeState(), "/foo");
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
builder.child("test").setProperty("bar", "kite is flying");
builder.child("test").setProperty("price", 100);
builder.child("test").setProperty("weight", 10.0);
builder.child("test").setProperty("bool", true);
builder.child("test").setProperty("truth", true);
builder.child("test").setProperty("creationTime", createCal("05/06/2014"));
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertNull("Fulltext search should not work", query("foo:fox",defn));
assertEquals("/test", getPath(new TermQuery(new Term("foo", "fox is jumping"))));
assertNull("bar must NOT be indexed", getPath(new TermQuery(new Term("bar", "kite is flying"))));
//Long
assertEquals("/test", getPath(NumericRangeQuery.newDoubleRange("weight", 8D, 12D, true, true)));
//Double
assertEquals("/test", getPath(NumericRangeQuery.newLongRange("price", 100L, 100L, true, true)));
//Boolean
assertEquals("/test", getPath(new TermQuery(new Term("bool", "true"))));
assertNull("truth must NOT be indexed", getPath(new TermQuery(new Term("truth", "true"))));
//Date
assertEquals("/test", getPath(NumericRangeQuery.newLongRange("creationTime",
dateToTime("05/05/2014"), dateToTime("05/07/2014"), true, true)));
}
@Test
public void noOfDocsIndexedNonFullText() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
builder.child("test2").setProperty("bar", "kite is flying");
builder.child("test3").setProperty("foo", "wind is blowing");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals(2, getSearcher().getIndexReader().numDocs());
}
@Test
public void saveDirectoryListing() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true);
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
NodeState dir = indexed.getChildNode("oak:index").getChildNode("lucene").getChildNode(":data");
assertTrue(dir.hasProperty(OakDirectory.PROP_DIR_LISTING));
}
/**
* 1. Index property foo in /test
* 2. Then modify some other property in /test
*
* This should not cause the index to be updated
*/
@Test
public void nonIncludedPropertyChange() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"),
STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
builder.child("test2").setProperty("foo", "bird is chirping");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals(2, getSearcher().getIndexReader().numDocs());
assertEquals("/test", getPath(new TermQuery(new Term("foo", "fox is jumping"))));
releaseIndexNode();
before = indexed;
builder = before.builder();
builder.child("test").setProperty("bar", "kite is flying");
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals(2, getSearcher().getIndexReader().numDocs());
assertEquals("change in non included property should not cause " +
"index update",0, getSearcher().getIndexReader().numDeletedDocs());
}
@Test
public void testLuceneWithRelativeProperty() throws Exception {
// OAK-6833
assumeFalse(CIHelper.windows());
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo", "jcr:content/mime",
"jcr:content/metadata/type"), STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
builder.child("test").child("jcr:content").setProperty("mime", "text");
builder.child("test").child("jcr:content").child("metadata").setProperty("type", "image");
builder.child("jcr:content").setProperty("count", "text");
builder.child("jcr:content").child("boom").child("metadata").setProperty("type", "image");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals(1, getSearcher().getIndexReader().numDocs());
assertEquals("/test", getPath(new TermQuery(new Term("foo", "fox is jumping"))));
assertEquals("/test", getPath(new TermQuery(new Term("jcr:content/mime", "text"))));
assertEquals("/test", getPath(new TermQuery(new Term("jcr:content/metadata/type", "image"))));
assertNull("bar must NOT be indexed", getPath(new TermQuery(new Term("count", "text"))));
releaseIndexNode();
before = indexed;
builder = before.builder();
builder.child("test").child("jcr:content").setProperty("mime", "pdf");
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertEquals("/test", getPath(new TermQuery(new Term("jcr:content/mime", "pdf"))));
releaseIndexNode();
before = indexed;
builder = before.builder();
builder.child("test").child("jcr:content").remove();
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
assertNull("removes must be persisted too, 1st level",
getPath(new TermQuery(new Term("jcr:content/mime", "pdf"))));
assertNull("removes must be persisted too, 2nd level",
getPath(new TermQuery(new Term("jcr:content/metadata/type",
"image"))));
}
@Test
public void indexVersionSwitchOnReindex() throws Exception{
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinition(index, "lucene",
of(TYPENAME_STRING));
//1. Trigger a index so that next index step does not see it as a fresh index
NodeState indexed = HOOK.processCommit(EMPTY_NODE, builder.getNodeState(), CommitInfo.EMPTY);
builder = indexed.builder();
//By default logic would use current version. To simulate upgrade we forcefully set
//version to V1
builder.child(INDEX_DEFINITIONS_NAME).child("lucene").setProperty(IndexDefinition.INDEX_VERSION,
IndexFormatVersion.V1.getVersion());
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
NodeState after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
assertEquals(IndexFormatVersion.V1, new IndexDefinition(root,
indexed.getChildNode(INDEX_DEFINITIONS_NAME).getChildNode("lucene"), "/foo").getVersion());
//3. Trigger a reindex and version should switch to current
builder = indexed.builder();
before = indexed;
builder.child(INDEX_DEFINITIONS_NAME).child("lucene").setProperty(IndexConstants.REINDEX_PROPERTY_NAME, true);
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
assertEquals(IndexFormatVersion.getDefault(), new IndexDefinition(root,
indexed.getChildNode(INDEX_DEFINITIONS_NAME).getChildNode("lucene"), "/foo").getVersion());
}
@Test
public void autoFormatUpdate() throws Exception{
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene",
of(TYPENAME_STRING));
//1. Trigger a index so that next index step does not see it as a fresh index
NodeState indexed = HOOK.processCommit(EMPTY_NODE, builder.getNodeState(), CommitInfo.EMPTY);
IndexDefinition defn = new IndexDefinition(root, indexed.getChildNode(INDEX_DEFINITIONS_NAME).getChildNode("lucene"), "/foo");
assertFalse(defn.isOfOldFormat());
}
@Test
public void copyOnWriteAndLocks() throws Exception {
assumeFalse(CIHelper.windows());
ExecutorService executorService = Executors.newFixedThreadPool(2);
IndexCopier copier = new IndexCopier(executorService, temporaryFolder.getRoot());
FailOnDemandEditorProvider failingProvider = new FailOnDemandEditorProvider();
EditorHook hook = new EditorHook(
new IndexUpdateProvider(
new CompositeIndexEditorProvider(
failingProvider,
new LuceneIndexEditorProvider(copier))));
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
IndexUtils.createIndexDefinition(index, "failingIndex", false, false, of("foo"), null);
//1. Get initial set indexed. So that next cycle is normal indexing
NodeState indexed = hook.processCommit(EMPTY_NODE, builder.getNodeState(), CommitInfo.EMPTY);
builder = indexed.builder();
NodeState before = indexed;
builder.child("test").setProperty("a", "fox is jumping");
NodeState after = builder.getNodeState();
//2. Ensure that Lucene gets triggered but close is not called
failingProvider.setShouldFail(true);
try {
hook.processCommit(before, after, CommitInfo.EMPTY);
fail();
} catch (CommitFailedException ignore){
}
//3. Disable the troubling editor
failingProvider.setShouldFail(false);
//4. Now commit should process fine
hook.processCommit(before, after, CommitInfo.EMPTY);
executorService.shutdown();
}
@Test
public void multiplexingWriter() throws Exception{
newLucenePropertyIndex("lucene", "foo");
MountInfoProvider mip = Mounts.newBuilder()
.mount("foo", "/libs", "/apps").build();
EditorHook hook = new EditorHook(
new IndexUpdateProvider(
new LuceneIndexEditorProvider(null, new ExtractedTextCache(0, 0), null, mip)));
NodeState indexed = hook.processCommit(EMPTY_NODE, builder.getNodeState(), CommitInfo.EMPTY);
builder = indexed.builder();
NodeState before = indexed;
builder.child("content").child("en").setProperty("foo", "bar");
builder.child("libs").child("install").setProperty("foo", "bar");
NodeState after = builder.getNodeState();
indexed = hook.processCommit(before, after, CommitInfo.EMPTY);
builder = indexed.builder();
assertEquals(1, numDocs(mip.getMountByName("foo")));
assertEquals(1, numDocs(mip.getDefaultMount()));
}
private int numDocs(Mount m) throws IOException {
String indexDirName = MultiplexersLucene.getIndexDirName(m);
NodeBuilder defnBuilder = builder.child(INDEX_DEFINITIONS_NAME).child("lucene");
Directory d = new OakDirectory(defnBuilder, indexDirName, new IndexDefinition(root, defnBuilder.getNodeState(), "/foo"), true);
IndexReader r = DirectoryReader.open(d);
return r.numDocs();
}
//@Test
public void checkLuceneIndexFileUpdates() throws Exception{
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinition(index, "lucene",
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo" , "bar", "baz"), STRINGS));
//nb.removeProperty(REINDEX_PROPERTY_NAME);
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
//InfoStream.setDefault(new PrintStreamInfoStream(System.out));
before = commitAndDump(before, builder.getNodeState());
builder = before.builder();
builder.child("test2").setProperty("bar", "ship is sinking");
before = commitAndDump(before, builder.getNodeState());
builder = before.builder();
builder.child("test3").setProperty("baz", "horn is blowing");
before = commitAndDump(before, builder.getNodeState());
builder = before.builder();
builder.child("test2").remove();
before = commitAndDump(before, builder.getNodeState());
builder = before.builder();
builder.child("test2").setProperty("bar", "ship is back again");
before = commitAndDump(before, builder.getNodeState());
}
@After
public void releaseIndexNode(){
if(indexNode != null){
indexNode.release();
}
indexNode = null;
}
private NodeState newLucenePropertyIndex(String indexName, String propName){
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, indexName,
of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of(propName), STRINGS));
return builder.getNodeState();
}
private String query(String query, IndexDefinition defn) throws IOException, ParseException {
QueryParser queryParser = new QueryParser(VERSION, "", defn.getAnalyzer());
return getPath(queryParser.parse(query));
}
private String getPath(Query query) throws IOException {
TopDocs td = getSearcher().search(query, 100);
if (td.totalHits > 0){
if(td.totalHits > 1){
fail("More than 1 result found for query " + query);
}
return getSearcher().getIndexReader().document(td.scoreDocs[0].doc).get(PATH);
}
return null;
}
private IndexSearcher getSearcher(){
if(indexNode == null){
indexNode = tracker.acquireIndexNode("/oak:index/lucene");
}
return indexNode.getSearcher();
}
private NodeState commitAndDump(NodeState before, NodeState after) throws CommitFailedException, IOException {
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
dumpIndexDir();
return indexed;
}
private void dumpIndexDir() throws IOException {
Directory dir = ((DirectoryReader)getSearcher().getIndexReader()).directory();
System.out.println("================");
String[] fileNames = dir.listAll();
Arrays.sort(fileNames);
for (String file : fileNames){
System.out.printf("%s - %d %n", file, dir.fileLength(file));
}
releaseIndexNode();
}
static Calendar createCal(String dt) throws java.text.ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
Calendar cal = Calendar.getInstance();
cal.setTime(sdf.parse(dt));
return cal;
}
static long dateToTime(String dt) throws java.text.ParseException {
return FieldFactory.dateToLong(ISO8601.format(createCal(dt)));
}
private static class FailOnDemandEditorProvider implements IndexEditorProvider {
private boolean shouldFail;
@Override
public Editor getIndexEditor(@Nonnull String type, @Nonnull NodeBuilder definition,
@Nonnull NodeState root,
@Nonnull IndexUpdateCallback callback) throws CommitFailedException {
if (PropertyIndexEditorProvider.TYPE.equals(type)) {
return new FailOnDemandEditor();
}
return null;
}
public void setShouldFail(boolean shouldFail) {
this.shouldFail = shouldFail;
}
private class FailOnDemandEditor extends DefaultEditor implements IndexEditor {
@Override
public void leave(NodeState before, NodeState after)
throws CommitFailedException {
throwExceptionIfTold();
super.leave(before, after);
}
void throwExceptionIfTold() throws CommitFailedException {
if (shouldFail) {
throw new CommitFailedException("commit",1 , null);
}
}
}
}
}
| |
package com.badlogic.gdx.tests;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL10;
import com.badlogic.gdx.graphics.Mesh;
import com.badlogic.gdx.graphics.VertexAttribute;
import com.badlogic.gdx.graphics.VertexAttributes;
import com.badlogic.gdx.graphics.VertexAttributes.Usage;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
import com.badlogic.gdx.math.EarClippingTriangulator;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.tests.utils.GdxTest;
import com.badlogic.gdx.utils.Disposable;
public class EarClippingTriangulatorTest extends GdxTest {
private List<TestCase> testCases = new ArrayList<TestCase>();
private int casesX;
private int casesY;
@Override
public void create() {
// An empty "polygon"
testCases.add(new TestCase(new float[] {}, true));
// A point
testCases.add(new TestCase(new float[] {0, 0}, true));
// A line segment
testCases.add(new TestCase(new float[] {0, 0, 1, 1}, true));
// A counterclockwise triangle
testCases.add(new TestCase(new float[] {
0, 0,
0, 1,
1, 0,
}));
// A counterclockwise square
testCases.add(new TestCase(new float[] {
0, 0,
0, 1,
1, 1,
1, 0,
}));
// A clockwise square
testCases.add(new TestCase(new float[] {
0, 0,
1, 0,
1, 1,
0, 1,
}));
// Starfleet insigna
testCases.add(new TestCase(new float[] {
0, 0,
0.6f, 0.4f,
1, 0,
0.5f, 1,
}));
// Starfleet insigna with repeated point
testCases.add(new TestCase(new float[] {
0, 0,
0.6f, 0.4f,
0.6f, 0.4f,
1, 0,
0.5f, 1,
}));
// Three collinear points
testCases.add(new TestCase(new float[] {
0, 0,
1, 0,
2, 0,
}));
// Four collinear points
testCases.add(new TestCase(new float[] {
0, 0,
1, 0,
2, 0,
3, 0,
}));
// Non-consecutive collinear points
testCases.add(new TestCase(new float[] {
0, 0,
1, 1,
2, 0,
3, 1,
4, 0,
}, true));
// Plus shape
testCases.add(new TestCase(new float[] {
1, 0,
2, 0,
2, 1,
3, 1,
3, 2,
2, 2,
2, 3,
1, 3,
1, 2,
0, 2,
0, 1,
1, 1,
}));
// Star shape
testCases.add(new TestCase(new float[] {
4, 0,
5, 3,
8, 4,
5, 5,
4, 8,
3, 5,
0, 4,
3, 3,
}));
// U shape
testCases.add(new TestCase(new float[] {
1, 0,
2, 0,
3, 1,
3, 3,
2, 3,
2, 1,
1, 1,
1, 3,
0, 3,
0, 1,
}));
// Spiral
testCases.add(new TestCase(new float[] {
1, 0,
4, 0,
5, 1,
5, 4,
4, 5,
1, 5,
0, 4,
0, 3,
1, 2,
2, 2,
3, 3,
1, 3,
1, 4,
4, 4,
4, 1,
0, 1,
}));
// Test case from http://www.flipcode.com/archives/Efficient_Polygon_Triangulation.shtml
testCases.add(new TestCase(new float[] {
0, 6,
0, 0,
3, 0,
4, 1,
6, 1,
8, 0,
12, 0,
13, 2,
8, 2,
8, 4,
11, 4,
11, 6,
6, 6,
4, 3,
2, 6,
}));
// Self-intersection
testCases.add(new TestCase(new float[] {
0, 0,
1, 1,
2, -1,
3, 1,
4, 0,
}, true));
// Self-touching
testCases.add(new TestCase(new float[] {
0, 0,
4, 0,
4, 4,
2, 4,
2, 3,
3, 3,
3, 1,
1, 1,
1, 3,
2, 3,
2, 4,
0, 4,
}, true));
// Self-overlapping
testCases.add(new TestCase(new float[] {
0, 0,
4, 0,
4, 4,
1, 4,
1, 3,
3, 3,
3, 1,
1, 1,
1, 3,
3, 3,
3, 4,
0, 4,
}, true));
// Test case from http://www.davdata.nl/math/polygons.html
testCases.add(new TestCase(new float[] {
190, 480,
140, 180,
310, 100,
330, 390,
290, 390,
280, 260,
220, 260,
220, 430,
370, 430,
350, 30,
50, 30,
160, 560,
730, 510,
710, 20,
410, 30,
470, 440,
640, 410,
630, 140,
590, 140,
580, 360,
510, 370,
510, 60,
650, 70,
660, 450,
190, 480,
}));
// Issue 815, http://code.google.com/p/libgdx/issues/detail?id=815
testCases.add(new TestCase(new float[] {
-2.0f, 0.0f,
-2.0f, 0.5f,
0.0f, 1.0f,
0.5f, 2.875f,
1.0f, 0.5f,
1.5f, 1.0f,
2.0f, 1.0f,
2.0f, 0.0f,
}));
// Issue 207, comment #1, http://code.google.com/p/libgdx/issues/detail?id=207#c1
testCases.add(new TestCase(new float[] {
72.42465f, 197.07095f,
78.485535f, 189.92776f,
86.12059f, 180.92929f,
99.68253f, 164.94557f,
105.24325f, 165.79604f,
107.21862f, 166.09814f,
112.41958f, 162.78253f,
113.73238f, 161.94562f,
123.29477f, 167.93805f,
126.70667f, 170.07617f,
73.22717f, 199.51062f,
}));
// Issue 207, comment #11, http://code.google.com/p/libgdx/issues/detail?id=207#c11
// Also on issue 1081, http://code.google.com/p/libgdx/issues/detail?id=1081
testCases.add(new TestCase(new float[] {
2400.0f, 480.0f,
2400.0f, 176.0f,
1920.0f, 480.0f,
1920.0459f, 484.22314f,
1920.1797f, 487.91016f,
1920.3955f, 491.0874f,
1920.6875f, 493.78125f,
1921.0498f, 496.01807f,
1921.4766f, 497.82422f,
1921.9619f, 499.22607f,
1922.5f, 500.25f,
1923.085f, 500.92236f,
1923.7109f, 501.26953f,
1924.3721f, 501.31787f,
1925.0625f, 501.09375f,
1925.7764f, 500.62354f,
1926.5078f, 499.9336f,
1927.251f, 499.0503f,
1928.0f, 498.0f,
1928.749f, 496.80908f,
1929.4922f, 495.5039f,
1930.2236f, 494.11084f,
1930.9375f, 492.65625f,
1931.6279f, 491.1665f,
1932.2891f, 489.66797f,
1932.915f, 488.187f,
1933.5f, 486.75f,
1934.0381f, 485.3833f,
1934.5234f, 484.11328f,
1934.9502f, 482.9663f,
1935.3125f, 481.96875f,
1935.6045f, 481.14697f,
1935.8203f, 480.52734f,
1935.9541f, 480.13623f,
1936.0f, 480.0f,
}));
// Issue 1407, http://code.google.com/p/libgdx/issues/detail?id=1407
testCases.add(new TestCase(new float[] {
3.914329f, 1.9008259f,
4.414321f, 1.903619f,
4.8973203f, 1.9063174f,
5.4979978f, 1.9096732f,
}, true));
// Issue 1407, http://code.google.com/p/libgdx/issues/detail?id=1407,
// with an additional point to show what is happening.
testCases.add(new TestCase(new float[] {
3.914329f, 1.9008259f,
4.414321f, 1.903619f,
4.8973203f, 1.9063174f,
5.4979978f, 1.9096732f,
4, 4,
}));
casesX = (int) Math.ceil(Math.sqrt(testCases.size()));
casesY = (int) Math.ceil((float) testCases.size() / casesX);
Gdx.input.setInputProcessor(new InputAdapter() {
@Override
public boolean keyDown (int keycode) {
switch (keycode) {
case Keys.RIGHT:
cycle(1);
break;
case Keys.LEFT:
cycle(-1);
break;
case Keys.SPACE:
reverse();
break;
default:
return super.keyDown(keycode);
}
return true;
}
});
}
@Override
public void render() {
Gdx.gl10.glClearColor(1, 1, 1, 1);
Gdx.gl10.glClear(GL10.GL_COLOR_BUFFER_BIT);
int w = Gdx.graphics.getWidth();
int h = Gdx.graphics.getHeight();
Gdx.gl10.glViewport(0, 0, w, h);
final float M = 0.1f;
Gdx.gl10.glMatrixMode(GL10.GL_PROJECTION);
Gdx.gl10.glLoadIdentity();
Gdx.gl10.glOrthof(-M, casesX * (1 + M), -M, casesY * (1 + M), -1, 1);
Gdx.gl10.glMatrixMode(GL10.GL_MODELVIEW);
Gdx.gl10.glLoadIdentity();
int x = 0;
int y = 0;
for (TestCase testCase : testCases) {
Gdx.gl10.glPushMatrix();
Gdx.gl10.glTranslatef(x * (1 + M), y * (1 + M), 0);
testCase.render();
Gdx.gl10.glPopMatrix();
x++;
if (x >= casesX) {
x = 0;
y++;
}
}
}
@Override
public void dispose() {
for (TestCase testCase : testCases) {
testCase.dispose();
}
}
void cycle(int step) {
for (TestCase testCase : testCases) {
testCase.cycle(step);
}
}
void reverse() {
for (TestCase testCase : testCases) {
testCase.reverse();
}
}
static final Color VALID_COLOR = new Color(0.8f, 1.0f, 0.8f, 1.0f);
static final Color INVALID_COLOR = new Color(1.0f, 0.8f, 0.8f, 1.0f);
private class TestCase implements Disposable {
final List<Vector2> polygon;
final boolean invalid;
final Mesh polygonMesh;
final Mesh interiorMesh;
final Mesh triangleOutlineMesh;
final Rectangle boundingRect;
public TestCase(float[] p) {
this(p, false);
}
public TestCase(float[] p, boolean invalid) {
this.invalid = invalid;
polygon = vertexArrayToList(p);
int numPolygonVertices = polygon.size();
Vector2 min = new Vector2(Float.MAX_VALUE, Float.MAX_VALUE);
Vector2 max = new Vector2(-Float.MAX_VALUE, -Float.MAX_VALUE);
for (int i = 0; i < numPolygonVertices; i++) {
Vector2 v = polygon.get(i);
min.x = Math.min(min.x, v.x);
min.y = Math.min(min.y, v.y);
max.x = Math.max(max.x, v.x);
max.y = Math.max(max.y, v.y);
}
boundingRect = new Rectangle(min.x, min.y, Math.max(0.001f, max.x - min.x), Math.max(0.001f, max.y - min.y));
int numTriangles = Math.max(0, polygon.size() - 2);
VertexAttributes position = new VertexAttributes(
new VertexAttribute(Usage.Position, 2, ShaderProgram.POSITION_ATTRIBUTE));
VertexAttributes positionAndColor = new VertexAttributes(
new VertexAttribute(Usage.Position, 2, ShaderProgram.POSITION_ATTRIBUTE),
new VertexAttribute(Usage.Color, 4, ShaderProgram.COLOR_ATTRIBUTE));
polygonMesh = new Mesh(true, polygon.size(), 0, position);
interiorMesh = new Mesh(true, 3 * numTriangles, 0, positionAndColor);
triangleOutlineMesh = new Mesh(true, 6 * numTriangles, 0, position);
triangulate();
}
private void triangulate() {
List<Vector2> triangles = new EarClippingTriangulator().computeTriangles(polygon);
int numTriangleVertices = triangles.size();
ArrayList<Vector2> triangleOutlines = new ArrayList<Vector2>(2 * numTriangleVertices);
for (int i = 0, j = 0; i < numTriangleVertices;) {
Vector2 a = triangles.get(i++);
Vector2 b = triangles.get(i++);
Vector2 c = triangles.get(i++);
triangleOutlines.add(a);
triangleOutlines.add(b);
triangleOutlines.add(b);
triangleOutlines.add(c);
triangleOutlines.add(c);
triangleOutlines.add(a);
}
polygonMesh.setVertices(listToVertexArray(polygon));
interiorMesh.setVertices(listToColoredVertexArray(triangles, getColor()));
triangleOutlineMesh.setVertices(listToVertexArray(triangleOutlines));
}
public void cycle(int step) {
if (polygon.isEmpty()) {
return;
}
while (step > 0) {
polygon.add(0, polygon.remove(polygon.size() - 1));
--step;
}
while (step < 0) {
polygon.add(polygon.remove(0));
++step;
}
triangulate();
}
public void reverse() {
Collections.reverse(polygon);
}
private Color getColor() {
if (invalid) {
return INVALID_COLOR;
} else {
return VALID_COLOR;
}
}
public void render() {
Gdx.gl10.glScalef(1 / boundingRect.width, 1 / boundingRect.height, 1);
Gdx.gl10.glTranslatef(-boundingRect.x, -boundingRect.y, 0);
interiorMesh.render(GL10.GL_TRIANGLES);
Gdx.gl10.glColor4f(0.4f, 0.4f, 0.4f, 1.0f);
Gdx.gl10.glLineWidth(1.0f);
triangleOutlineMesh.render(GL10.GL_LINES);
Gdx.gl10.glColor4f(0.3f, 0.0f, 0.0f, 1.0f);
Gdx.gl10.glLineWidth(2.0f);
polygonMesh.render(GL10.GL_LINE_LOOP);
}
@Override
public void dispose() {
polygonMesh.dispose();
interiorMesh.dispose();
triangleOutlineMesh.dispose();
}
}
static List<Vector2> vertexArrayToList(float[] array) {
int n = array.length;
List<Vector2> list = new ArrayList<Vector2>(n / 2);
for (int i = 0; i < n; i += 2) {
list.add(new Vector2(array[i], array[i+1]));
}
return list;
}
static float[] listToVertexArray(List<Vector2> list) {
int n = list.size();
float[] array = new float[n * 2];
int i = 0;
for (Vector2 v : list) {
array[i++] = v.x;
array[i++] = v.y;
}
return array;
}
static float[] listToColoredVertexArray(List<Vector2> list, Color color) {
int n = list.size();
float[] array = new float[n * 6];
int i = 0;
int j = 0;
for (Vector2 v : list) {
array[i++] = v.x;
array[i++] = v.y;
float brightness = 0.3f + 0.6f * (j / 3) * 3 / list.size();
array[i++] = color.r * brightness;
array[i++] = color.g * brightness;
array[i++] = color.b * brightness;
array[i++] = 1;
j++;
}
return array;
}
}
| |
/**
* Copyright 2013 Vecna Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.vecna.maven.jshint.mojo;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.codehaus.plexus.util.DirectoryScanner;
import org.codehaus.plexus.util.StringUtils;
import org.mozilla.javascript.Function;
import org.mozilla.javascript.NativeArray;
import org.mozilla.javascript.NativeObject;
import org.mozilla.javascript.RhinoException;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.vecna.maven.jshint.report.JsHintCheckstyleReporter;
import com.vecna.maven.jshint.report.JsHintReporter;
import com.vecna.maven.jshint.rhino.JsEngine;
/**
* JSHint plugin.
* @author ogolberg@vecna.com
*/
@Mojo(name = "check",
defaultPhase = LifecyclePhase.PROCESS_SOURCES,
threadSafe = true)
public class JsHintMojo extends AbstractMojo {
/**
* Location of the JSHint source on the classpath. Only needs to be set if a custom version of JSHint is needed.
*/
@Parameter(defaultValue = "jshint.js")
private String jsHintJS;
/**
* Directory with the javascript files to be checked.
*/
@Parameter(defaultValue = "${basedir}/src/main/javascript")
private File srcDirectory;
/**
* List of file name patterns to include.
*/
@Parameter
private String[] includes;
/**
* List of file name patterns to exclude.
*/
@Parameter
private String[] excludes;
/**
* Location of the JSHint options file on the classpath or filesystem.
*/
@Parameter(defaultValue = "jshintrc")
private String optionsFile;
/**
* JSHint options (these take priority over the options file).
*/
@Parameter
private Map<String, String> options;
/**
* Allowed globals (these take priority over the options file).
*/
@Parameter
private Map<String, String> globals;
/**
* Maximum number of JSHint violations. Exceeding this will fail the build.
*/
@Parameter(defaultValue = "0")
private int maxErrorsAllowed;
/**
* Whether to skip execution.
*/
@Parameter
private boolean skip;
/**
* Location of the violation report.
*/
@Parameter(defaultValue = "${project.build.directory}/jshint.xml")
private File reportOutput;
private final JsHintReporter reporter = new JsHintCheckstyleReporter();
/**
* Open a classpath resource for reading
* @param path the path to the resource
* @return an {@link InputStream} or <code>null</code> if the resource is not found
*/
private InputStream openClasspathResource(String path) {
return Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
}
/**
* Open a file or classpath resource for reading
* @param path the path (first looked up on the file system, then on the classpath)
* @return an {@link InputStream} or <code>null</code> if the resource is not found
*/
private InputStream openFileOrClasspathResource(String path) {
try {
return new FileInputStream(path);
} catch (FileNotFoundException e) {
return openClasspathResource(path);
}
}
/**
* Read JSHint options from a JSON file specified in the configuration
* @param engine JS engine
* @return JS object with the options or an empty object if the options file doesn't exist
* @throws MojoExecutionException if an error occurred while reading or parsing the file
*/
private NativeObject readOptionsFromFile(JsEngine engine) throws MojoExecutionException {
if (StringUtils.isNotEmpty(optionsFile)) {
InputStream optionsSrc = openFileOrClasspathResource(optionsFile);
if (optionsSrc != null) {
String json;
try {
json = IOUtils.toString(optionsSrc);
} catch (IOException e) {
throw new MojoExecutionException("failed to read " + optionsFile, e);
}
try {
return engine.parseJSON(json);
} catch (RhinoException e) {
throw new MojoExecutionException("failed to parse " + optionsFile, e);
}
}
}
return new NativeObject();
}
/**
* Extract (and remove) the globals parameter from jshintrc options
*/
private NativeObject extractGlobals(NativeObject optionsFromFile) throws MojoExecutionException {
Object globalsFromFile = optionsFromFile.remove("globals");
if (globalsFromFile != null && !(globalsFromFile instanceof NativeObject)) {
throw new MojoExecutionException("bad globals definition");
} else {
return (NativeObject) globalsFromFile;
}
}
/**
* Apply the globals from the plugin configuration to the globals extracted from the options file
*/
private NativeObject addGlobals(NativeObject globalsFromFile) {
if (globals != null) {
NativeObject merged = globalsFromFile == null ? new NativeObject() : globalsFromFile;
for (Entry<String, String> keyval : globals.entrySet()) {
merged.put(keyval.getKey(), merged, Boolean.valueOf(keyval.getValue()));
}
return merged;
} else {
return globalsFromFile;
}
}
/**
* @return source files (relative to the source directory)
*/
private String[] getSourceFiles() {
DirectoryScanner scanner = new DirectoryScanner();
scanner.setBasedir(srcDirectory);
scanner.setIncludes(includes);
scanner.setExcludes(excludes);
scanner.scan();
return scanner.getIncludedFiles();
}
/**
* @return true/false if the value is "true"/"false", integer if the value is numeric, or the string value itself
*/
private Object toOptionValue(String value) {
if ("true".equals(value)) {
return true;
} else if ("false".equals(value)) {
return false;
} else if (StringUtils.isNumeric(value)) {
return Integer.valueOf(value);
} else {
return value;
}
}
/**
* combine the options from the plugin configuration with the options read from the file
*/
private void addOptions(NativeObject optionsObject) {
if (options != null) {
for (Entry<String, String> keyval : options.entrySet()) {
optionsObject.put(keyval.getKey(), optionsObject, toOptionValue(keyval.getValue()));
}
}
}
/**
* create the error report
* @param errors errors
* @throws IOException if the report file cannot be written to
*/
private void writeReport(Multimap<String, JsHintError> errors) throws IOException, MojoExecutionException {
try {
org.codehaus.plexus.util.FileUtils.forceMkdir(reportOutput.getParentFile());
} catch (IOException e) {
throw new MojoExecutionException("cannot create directory " + reportOutput.getParentFile());
}
OutputStream reportOut = new FileOutputStream(reportOutput);
try {
reporter.report(errors, reportOut);
} finally {
reportOut.close();
}
}
/**
* {@inheritDoc}
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (skip) {
getLog().info("skipping execution");
} else {
String[] sourceFiles = getSourceFiles();
if (sourceFiles.length == 0) {
getLog().info("no source files found");
} else {
final InputStream jsHintSrc = openClasspathResource(jsHintJS);
JsEngine engine;
try {
engine = new JsEngine().browserEnv().eval(jsHintSrc);
} catch (IOException e) {
throw new MojoExecutionException("failed to bootstrap JSHint", e);
}
NativeObject combinedOpts = readOptionsFromFile(engine);
NativeObject combinedGlobals = extractGlobals(combinedOpts);
addOptions(combinedOpts);
addGlobals(combinedGlobals);
Function jsHint = (Function) engine.get("JSHINT");
Multimap<String, JsHintError> errors = HashMultimap.create();
for (String srcFile : sourceFiles) {
List<String> source;
try {
source = FileUtils.readLines(new File(srcDirectory, srcFile));
} catch (IOException e) {
throw new MojoExecutionException("failed to read " + srcFile, e);
}
NativeArray array = new NativeArray(source.toArray());
engine.call(jsHint, array, combinedOpts, combinedGlobals);
NativeArray nativeErrors = (NativeArray) engine.get(jsHint, "errors");
for (int i = 0; i < nativeErrors.size(); i++) {
NativeObject nativeError = (NativeObject) nativeErrors.get(i);
JsHintError error = new JsHintError(srcFile, nativeError);
// handling the built-in JsHint error limit
if (error.getReason().startsWith("Too many errors")) {
break;
}
getLog().error(error.toString());
errors.put(error.getSource(), error);
}
}
try {
writeReport(errors);
} catch (IOException e) {
throw new MojoExecutionException("failed to write the report", e);
}
if (errors.size() > maxErrorsAllowed) {
throw new MojoFailureException("JSHint violations: " + errors.size() + ". Allowed violations: " + maxErrorsAllowed);
}
}
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015, 2016 Pivotal Software, Inc. and others
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
*
* http://www.eclipse.org/legal/epl-v10.html
*
* and the Apache License v2.0 is available at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* Pivotal Software, Inc. - initial API and implementation
********************************************************************************/
package org.eclipse.cft.server.ui.internal;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.eclipse.cft.server.core.internal.CloudFoundryPlugin;
import org.eclipse.cft.server.core.internal.CloudFoundryProjectUtil;
import org.eclipse.cft.server.core.internal.CloudServerEvent;
import org.eclipse.cft.server.core.internal.ServerEventHandler;
import org.eclipse.cft.server.core.internal.jrebel.CFRebelServerIntegration;
import org.eclipse.cft.server.core.internal.jrebel.JRebelIntegrationUtility;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.wst.server.core.IModule;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
public class CloudRebelUIServerIntegration extends CFRebelServerIntegration {
private static CFRebelServerIntegration integration;
public CloudRebelUIServerIntegration() {
}
public void register() {
if (integration == null && JRebelIntegrationUtility.isJRebelIDEInstalled()) {
integration = this;
ServerEventHandler.getDefault().addServerListener(integration);
}
}
@Override
protected void handleRebelProject(CloudServerEvent event, IModule module, String consoleMessage,
IProgressMonitor monitor) throws CoreException {
IProject project = module.getProject();
// Only replace rebel xml file if a manual Remoting
// update is performed on Spring boot applications
if (event.getType() == CloudServerEvent.EVENT_JREBEL_REMOTING_UPDATE
&& CloudFoundryProjectUtil.isSpringBootCloudFoundryConfigured(project)) {
updateRebelXML(project, monitor);
}
super.handleRebelProject(event, module, consoleMessage, monitor);
}
protected void updateRebelXML(IProject project, IProgressMonitor monitor) {
// rebel.xml is overwritten for Spring Boot Jar apps to skip the /lib
// folder which
// is only generated in the Spring Boot Jar but has no workspace
// equivalent
try {
IFile file = getRebelXMLFile(project);
if (file != null && file.isAccessible()) {
String path = file.getRawLocation() != null ? file.getRawLocation().toString() : null;
if (path != null) {
List<String> outputPaths = getClasspathSourceOutputPaths(project);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder db = factory.newDocumentBuilder();
Document doc = db.parse(new File(path));
Element javaProjectOutputElement = null;
NodeList nodeList = doc.getElementsByTagName("*"); //$NON-NLS-1$
if (nodeList != null) {
String libFolderName = "lib/**"; //$NON-NLS-1$
String javaBuildpackFolderName = ".java-buildpack/**"; //$NON-NLS-1$
// Find the node element that corresponds to the
// project's source output. Check all the resolved
// output paths to find
// the one that matches the "dir" element in the
// rebel.xml
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if ((node instanceof Element) && node.getNodeName().equals("dir")) { //$NON-NLS-1$
Element element = (Element) node;
String att = element.getAttribute("name"); //$NON-NLS-1$
if (att != null) {
for (String outpath : outputPaths) {
if (att.contains(outpath)) {
javaProjectOutputElement = element;
break;
}
}
if (javaProjectOutputElement != null) {
break;
}
}
}
}
if (javaProjectOutputElement != null) {
NodeList binChildren = javaProjectOutputElement.getChildNodes();
Element existingExcludeLib = null;
Element existingExcludeJavabuildpack = null;
if (binChildren != null) {
for (int i = 0; i < binChildren.getLength(); i++) {
Node node = binChildren.item(i);
if (node instanceof Element && node.getNodeName().equals("exclude")) { //$NON-NLS-1$
Element excludeElement = (Element) node;
Attr attr = excludeElement.getAttributeNode("name"); //$NON-NLS-1$
if (attr != null && attr.getNodeValue() != null) {
if (attr.getNodeValue().equals(libFolderName)) {
existingExcludeLib = excludeElement;
}
else if (attr.getNodeValue().equals(javaBuildpackFolderName)) {
existingExcludeJavabuildpack = excludeElement;
}
}
}
}
}
Element updatedExcludeLib = null;
if (existingExcludeLib == null) {
updatedExcludeLib = doc.createElement("exclude"); //$NON-NLS-1$
updatedExcludeLib.setAttribute("name", libFolderName); //$NON-NLS-1$
javaProjectOutputElement.appendChild(updatedExcludeLib);
}
Element updatedExcludeJavabuildpack = null;
if (existingExcludeJavabuildpack == null) {
updatedExcludeJavabuildpack = doc.createElement("exclude"); //$NON-NLS-1$
updatedExcludeJavabuildpack.setAttribute("name", javaBuildpackFolderName); //$NON-NLS-1$
javaProjectOutputElement.appendChild(updatedExcludeJavabuildpack);
}
if (updatedExcludeLib != null || updatedExcludeJavabuildpack != null) {
final boolean[] proceed = { false };
final List<String> toexclude = new ArrayList<String>();
if (updatedExcludeLib != null) {
toexclude.add(libFolderName);
}
if (updatedExcludeJavabuildpack != null) {
toexclude.add(javaBuildpackFolderName);
}
Display.getDefault().syncExec(new Runnable() {
public void run() {
Shell shell = CFUiUtil.getShell();
proceed[0] = shell != null && !shell.isDisposed()
&& MessageDialog.openQuestion(shell,
Messages.CloudRebelUIHandler_TEXT_REPLACE_REBEL_XML_TITLE,
NLS.bind(
Messages.CloudRebelUIHandler_TEXT_REPLACE_REBEL_XML_BODY,
toexclude));
}
});
if (proceed[0]) {
// If replacing the exist rebel.xml file, be
// sure to switch off automatic rebel.xml
// generation
JRebelIntegrationUtility.setAutoGeneratedXMLDisabledProperty(project);
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes"); //$NON-NLS-1$
DOMSource source = new DOMSource(doc);
StreamResult console = new StreamResult(new File(path));
transformer.transform(source, console);
project.refreshLocal(IResource.DEPTH_INFINITE, monitor);
}
}
}
}
}
}
}
catch (ParserConfigurationException e) {
CloudFoundryPlugin.logError(e);
}
catch (SAXException e) {
CloudFoundryPlugin.logError(e);
}
catch (IOException e) {
CloudFoundryPlugin.logError(e);
}
catch (TransformerException e) {
CloudFoundryPlugin.logError(e);
}
catch (CoreException e) {
CloudFoundryPlugin.logError(e);
}
}
protected List<String> getClasspathSourceOutputPaths(IProject project) {
IJavaProject javaProject = CloudFoundryProjectUtil.getJavaProject(project);
List<String> outputPaths = new ArrayList<String>();
if (javaProject != null) {
try {
IClasspathEntry[] classpath = javaProject.getResolvedClasspath(true);
if (classpath != null) {
for (IClasspathEntry entry : classpath) {
if (entry != null && entry.getEntryKind() == IClasspathEntry.CPE_SOURCE) {
String outputPath = entry.getOutputLocation() != null ? entry.getOutputLocation().toString()
: null;
if (outputPath != null && !outputPaths.contains(outputPath)
&& !outputPath.contains("target/test-classes")) {//$NON-NLS-1$
outputPaths.add(outputPath);
}
}
}
}
String outputPath = javaProject.getOutputLocation() != null ? javaProject.getOutputLocation().toString()
: null;
if (outputPath != null && !outputPaths.contains(outputPath)) {
outputPaths.add(outputPath);
}
}
catch (JavaModelException e) {
CloudFoundryPlugin.logError(e);
}
}
return outputPaths;
}
protected IFile getRebelXMLFile(IProject project) {
if (project != null && project.isAccessible()) {
try {
return getFile(project, "rebel.xml"); //$NON-NLS-1$
}
catch (CoreException e) {
CloudFoundryPlugin.logError(e);
}
}
return null;
}
protected IFile getFile(IResource resource, String fileName) throws CoreException {
if (resource instanceof IFile && resource.getName().equals(fileName)) {
return (IFile) resource;
}
else if (resource instanceof IContainer) {
IContainer container = (IContainer) resource;
IResource[] children = container.members();
if (children != null) {
for (IResource child : children) {
IFile file = getFile(child, fileName);
if (file != null) {
return file;
}
}
}
}
return null;
}
}
| |
/*
* The MIT License
*
* Copyright 2017 Thomas Gilon.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package aptgraph.infection;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
/**
* Main class for Infection.
*
* @author Thomas Gilon
*/
public final class Main {
private static final String DEFAULT_FORMAT = "squid";
private static final double DEFAULT_PROPORTION = 1.0;
private static final int DEFAULT_INJECTION = Integer.MAX_VALUE;
private static final long DEFAULT_DISTANCE = 0L;
/**
* Main method of Infection.
*
* @param args Arguments from the command line
* @throws ParseException If we cannot parse command line args
* @throws FileNotFoundException If the input file does not exist
* @throws IOException If we cannot read the input file
* @throws IllegalArgumentException If wrong arguments are given
* @throws java.text.ParseException If text can't be parsed
*/
public static void main(final String[] args)
throws ParseException, FileNotFoundException, IOException,
IllegalArgumentException, java.text.ParseException {
// Default value of arguments
String format = DEFAULT_FORMAT;
double proportion = DEFAULT_PROPORTION;
int injection_day = DEFAULT_INJECTION;
long distance_time = DEFAULT_DISTANCE;
long delay = 0L;
// Parse command line arguments
Options options = new Options();
options.addOption("i", true, "Input log file (required)");
options.addOption("o", true, "Output log file (required)");
options.addOption("d", true, "APT domain name (required)");
options.addOption("t", true, "Type (periodic or traffic) (required)");
options.addOption("u", true, "Targeted user or subnet (required)");
Option arg_time_step = Option.builder("step")
.optionalArg(true)
.desc("Specify time step between periodic"
+ " injections in milliseconds (required for"
+ " periodic APT)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_time_step);
Option arg_delta_time = Option.builder("delta")
.optionalArg(true)
.desc("Maximal duration between two requests of the same burst"
+ " (required for traffic APT)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_delta_time);
Option arg_duration = Option.builder("duration")
.optionalArg(true)
.desc("Minimal duration of a burst to allow APT injection"
+ " (required for traffic APT)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_duration);
Option arg_injection_day = Option.builder("injection")
.optionalArg(true)
.desc("Maximal daily number of injections (option for traffic"
+ " APT, default : no limitation)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_injection_day);
Option arg_proportion = Option.builder("proportion")
.optionalArg(true)
.desc("Injection rate in the possible bursts (1 = inject"
+ " in all possible bursts) (option for traffic APT,"
+ " default : 1)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_proportion);
Option arg_distance = Option.builder("distance")
.optionalArg(true)
.desc("Minimal time distance between two injections"
+ " (option for traffic APT, default : no limitation)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_distance);
Option arg_delay = Option.builder("delay")
.optionalArg(true)
.desc("Delay between start of the burst and injection of APT "
+ "(option for traffic APT, default :"
+ " middle of the burst)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_delay);
Option arg_format = Option.builder("f")
.optionalArg(true)
.desc("Specify format of input file (squid or json) "
+ "(option, default : squid)")
.hasArg(true)
.numberOfArgs(1)
.build();
options.addOption(arg_format);
options.addOption("h", false, "Show this help");
CommandLineParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("h")
|| !cmd.hasOption("i")
|| !cmd.hasOption("o")
|| !cmd.hasOption("d")
|| !cmd.hasOption("t")
|| !cmd.hasOption("u")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("java -jar infection-<version>.jar", options);
return;
}
String type = "";
try {
type = cmd.getOptionValue("t");
if (!type.equals("periodic") && !type.equals("traffic")) {
throw new IllegalArgumentException("Wrong type option");
}
} catch (IllegalArgumentException ex) {
System.err.println(ex);
}
long time_step = 0L;
long delta_time = Long.MAX_VALUE;
long duration = 0L;
try {
if (type.equals("periodic")) {
if (!cmd.hasOption("step")) {
throw new IllegalArgumentException(
"Time Step is not given");
} else {
time_step = Long.parseLong(cmd.getOptionValue("step"));
}
} else if (type.equals("traffic")) {
if (!cmd.hasOption("delta")) {
throw new IllegalArgumentException(
"Delta Time is not given");
} else {
delta_time = Long.parseLong(cmd.getOptionValue("delta"));
}
if (!cmd.hasOption("duration")) {
throw new IllegalArgumentException(
"Duration is not given");
} else {
duration = Long.parseLong(cmd.getOptionValue("duration"));
delay = duration / 2; // Default value
if (cmd.hasOption("delay")) {
delay = Long.parseLong(
cmd.getOptionValue("delay"));
}
}
if (cmd.hasOption("injection")) {
injection_day = Integer.parseInt(
cmd.getOptionValue("injection"));
}
if (cmd.hasOption("proportion")) {
proportion = Double.parseDouble(
cmd.getOptionValue("proportion"));
}
if (cmd.hasOption("distance")) {
distance_time = Long.parseLong(
cmd.getOptionValue("distance"));
}
}
} catch (IllegalArgumentException ex) {
System.err.println(ex);
}
try {
if (cmd.hasOption("f")) {
format = cmd.getOptionValue("f");
if (!format.equals("squid") && !format.equals("json")) {
throw new IllegalArgumentException("Wrong format option");
}
}
} catch (IllegalArgumentException ex) {
System.err.println(ex);
}
FileOutputStream output_stream
= new FileOutputStream(cmd.getOptionValue("o"));
try {
ApplyAPT apt = new ApplyAPT();
if (type.equals("periodic")) {
apt.periodicAPT(
new FileInputStream(cmd.getOptionValue("i")),
output_stream,
cmd.getOptionValue("d"),
cmd.getOptionValue("u"),
format,
time_step);
} else if (type.equals("traffic")) {
apt.trafficAPT(
new FileInputStream(cmd.getOptionValue("i")),
output_stream,
cmd.getOptionValue("d"),
cmd.getOptionValue("u"),
format,
delta_time,
duration,
injection_day,
proportion,
distance_time,
delay);
}
} finally {
output_stream.close();
}
}
private Main() {
}
}
| |
package org.hisp.dhis.attribute;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.api.client.util.Maps;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.attribute.exception.MissingMandatoryAttributeValueException;
import org.hisp.dhis.attribute.exception.NonUniqueAttributeValueException;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.feedback.ErrorCode;
import org.hisp.dhis.feedback.ErrorReport;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Transactional
public class DefaultAttributeService
implements AttributeService
{
private static final Predicate<AttributeValue> SHOULD_DELETE_ON_UPDATE =
( attributeValue ) ->
attributeValue.getValue() == null && attributeValue.getAttribute().getValueType() == ValueType.TRUE_ONLY;
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private AttributeStore attributeStore;
public void setAttributeStore( AttributeStore attributeStore )
{
this.attributeStore = attributeStore;
}
private AttributeValueStore attributeValueStore;
public void setAttributeValueStore( AttributeValueStore attributeValueStore )
{
this.attributeValueStore = attributeValueStore;
}
@Autowired
private IdentifiableObjectManager manager;
// -------------------------------------------------------------------------
// Attribute implementation
// -------------------------------------------------------------------------
@Override
public void addAttribute( Attribute attribute )
{
attributeStore.save( attribute );
}
@Override
public void updateAttribute( Attribute attribute )
{
attributeStore.update( attribute );
}
@Override
public void deleteAttribute( Attribute attribute )
{
attributeStore.delete( attribute );
}
@Override
public Attribute getAttribute( int id )
{
return attributeStore.get( id );
}
@Override
public Attribute getAttribute( String uid )
{
return attributeStore.getByUid( uid );
}
@Override
public Attribute getAttributeByName( String name )
{
return attributeStore.getByName( name );
}
@Override
public Attribute getAttributeByCode( String code )
{
return attributeStore.getByCode( code );
}
@Override
public List<Attribute> getAllAttributes()
{
return new ArrayList<>( attributeStore.getAll() );
}
@Override
public List<Attribute> getAttributes( Class<?> klass )
{
return new ArrayList<>( attributeStore.getAttributes( klass ) );
}
@Override
public List<Attribute> getMandatoryAttributes( Class<?> klass )
{
return new ArrayList<>( attributeStore.getMandatoryAttributes( klass ) );
}
@Override
public List<Attribute> getUniqueAttributes( Class<?> klass )
{
return new ArrayList<>( attributeStore.getUniqueAttributes( klass ) );
}
// -------------------------------------------------------------------------
// AttributeValue implementation
// -------------------------------------------------------------------------
@Override
public <T extends IdentifiableObject> void addAttributeValue( T object, AttributeValue attributeValue ) throws NonUniqueAttributeValueException
{
if ( object == null || attributeValue == null || attributeValue.getAttribute() == null ||
!attributeValue.getAttribute().getSupportedClasses().contains( object.getClass() ) )
{
return;
}
if ( attributeValue.getAttribute().isUnique() )
{
List<AttributeValue> values = manager.getAttributeValueByAttributeAndValue( object.getClass(), attributeValue.getAttribute(), attributeValue.getValue() );
if ( !values.isEmpty() )
{
throw new NonUniqueAttributeValueException( attributeValue );
}
}
attributeValue.setAutoFields();
attributeValueStore.save( attributeValue );
object.getAttributeValues().add( attributeValue );
}
@Override
public <T extends IdentifiableObject> void updateAttributeValue( T object, AttributeValue attributeValue ) throws NonUniqueAttributeValueException
{
if ( object == null || attributeValue == null || attributeValue.getAttribute() == null ||
!attributeValue.getAttribute().getSupportedClasses().contains( object.getClass() ) )
{
return;
}
if ( attributeValue.getAttribute().isUnique() )
{
List<AttributeValue> values = manager.getAttributeValueByAttributeAndValue( object.getClass(), attributeValue.getAttribute(), attributeValue.getValue() );
if ( values.size() > 1 || (values.size() == 1 && !object.getAttributeValues().contains( values.get( 0 ) )) )
{
throw new NonUniqueAttributeValueException( attributeValue );
}
}
attributeValue.setAutoFields();
attributeValueStore.update( attributeValue );
object.getAttributeValues().add( attributeValue );
}
@Override
public void deleteAttributeValue( AttributeValue attributeValue )
{
attributeValueStore.delete( attributeValue );
}
@Override
public AttributeValue getAttributeValue( int id )
{
return attributeValueStore.get( id );
}
@Override
public List<AttributeValue> getAllAttributeValues()
{
return new ArrayList<>( attributeValueStore.getAll() );
}
@Override
public List<AttributeValue> getAllAttributeValuesByAttributes( List<Attribute> attributes )
{
return attributeValueStore.getAllByAttributes( attributes );
}
@Override
public List<AttributeValue> getAllAttributeValuesByAttribute( Attribute attribute )
{
return attributeValueStore.getAllByAttribute( attribute );
}
@Override
public List<AttributeValue> getAllAttributeValuesByAttributeAndValue( Attribute attribute, String value )
{
return attributeValueStore.getAllByAttributeAndValue( attribute, value );
}
@Override
public <T extends IdentifiableObject> boolean isAttributeValueUnique( T object, AttributeValue attributeValue )
{
return attributeValueStore.isAttributeValueUnique( object, attributeValue );
}
@Override
public int getAttributeValueCount()
{
return attributeValueStore.getCount();
}
@Override
public <T extends IdentifiableObject> List<ErrorReport> validateAttributeValues( T object, Set<AttributeValue> attributeValues )
{
List<ErrorReport> errorReports = new ArrayList<>();
if ( attributeValues.isEmpty() )
{
return errorReports;
}
Map<String, AttributeValue> attributeValueMap = attributeValues.stream()
.collect( Collectors.toMap( av -> av.getAttribute().getUid(), av -> av ) );
Iterator<AttributeValue> iterator = object.getAttributeValues().iterator();
List<Attribute> mandatoryAttributes = getMandatoryAttributes( object.getClass() );
while ( iterator.hasNext() )
{
AttributeValue attributeValue = iterator.next();
if ( attributeValueMap.containsKey( attributeValue.getAttribute().getUid() ) )
{
AttributeValue av = attributeValueMap.get( attributeValue.getAttribute().getUid() );
if ( attributeValue.isUnique() )
{
if ( !manager.isAttributeValueUnique( object.getClass(), object, attributeValue.getAttribute(), av.getValue() ) )
{
errorReports.add( new ErrorReport( Attribute.class, ErrorCode.E4009, attributeValue.getAttribute().getUid(), av.getValue() ) );
}
}
attributeValueMap.remove( attributeValue.getAttribute().getUid() );
mandatoryAttributes.remove( attributeValue.getAttribute() );
}
}
for ( String uid : attributeValueMap.keySet() )
{
AttributeValue attributeValue = attributeValueMap.get( uid );
if ( !attributeValue.getAttribute().getSupportedClasses().contains( object.getClass() ) )
{
errorReports.add( new ErrorReport( Attribute.class, ErrorCode.E4010, attributeValue.getAttribute().getUid(), object.getClass().getSimpleName() ) );
}
else
{
mandatoryAttributes.remove( attributeValue.getAttribute() );
}
}
mandatoryAttributes.stream().forEach( att -> errorReports.add( new ErrorReport( Attribute.class, ErrorCode.E4011, att.getUid() ) ) );
return errorReports;
}
@Override
public <T extends IdentifiableObject> void updateAttributeValues( T object, List<String> jsonAttributeValues ) throws Exception
{
updateAttributeValues( object, getJsonAttributeValues( jsonAttributeValues ) );
}
@Override
public <T extends IdentifiableObject> void updateAttributeValues( T object, Set<AttributeValue> attributeValues ) throws Exception
{
if ( attributeValues.isEmpty() )
{
return;
}
Set<AttributeValue> toBeDeleted = attributeValues.stream()
.filter( SHOULD_DELETE_ON_UPDATE )
.collect( Collectors.toSet() );
Map<String, AttributeValue> attributeValueMap = attributeValues.stream()
.filter( SHOULD_DELETE_ON_UPDATE.negate() )
.collect( Collectors.toMap( av -> av.getAttribute().getUid(), av -> av ) );
Iterator<AttributeValue> iterator = object.getAttributeValues().iterator();
List<Attribute> mandatoryAttributes = getMandatoryAttributes( object.getClass() );
while ( iterator.hasNext() )
{
AttributeValue attributeValue = iterator.next();
if ( attributeValueMap.containsKey( attributeValue.getAttribute().getUid() ) )
{
AttributeValue av = attributeValueMap.get( attributeValue.getAttribute().getUid() );
if ( attributeValue.isUnique() )
{
if ( manager.isAttributeValueUnique( object.getClass(), object, attributeValue.getAttribute(), av.getValue() ) )
{
attributeValue.setValue( av.getValue() );
}
else
{
throw new NonUniqueAttributeValueException( attributeValue, av.getValue() );
}
}
else
{
attributeValue.setValue( av.getValue() );
}
attributeValueMap.remove( attributeValue.getAttribute().getUid() );
mandatoryAttributes.remove( attributeValue.getAttribute() );
}
else
{
iterator.remove();
}
}
for ( String uid : attributeValueMap.keySet() )
{
AttributeValue attributeValue = attributeValueMap.get( uid );
addAttributeValue( object, attributeValue );
mandatoryAttributes.remove( attributeValue.getAttribute() );
}
for ( AttributeValue attributeValue : toBeDeleted )
{
mandatoryAttributes.remove( attributeValue.getAttribute() );
deleteAttributeValue( attributeValue );
}
if ( !mandatoryAttributes.isEmpty() )
{
throw new MissingMandatoryAttributeValueException( mandatoryAttributes );
}
}
//--------------------------------------------------------------------------------------------------
// Helpers
//--------------------------------------------------------------------------------------------------
private Set<AttributeValue> getJsonAttributeValues( List<String> jsonAttributeValues )
throws IOException
{
Set<AttributeValue> attributeValues = new HashSet<>();
Map<Integer, String> attributeValueMap = jsonToMap( jsonAttributeValues );
for ( Map.Entry<Integer, String> entry : attributeValueMap.entrySet() )
{
int id = entry.getKey();
String value = entry.getValue();
Attribute attribute = getAttribute( id );
if ( attribute == null )
{
continue;
}
AttributeValue attributeValue = parseAttributeValue( attribute, value );
if ( attributeValue == null )
{
continue;
}
attributeValues.add( attributeValue );
}
return attributeValues;
}
/**
* Parse and create AttributeValue from attribute, id and string value.
* Sets null for all non-"true" TRUE_ONLY AttributeValues.
*/
private AttributeValue parseAttributeValue( Attribute attribute, String value )
{
AttributeValue attributeValue = null;
if ( attribute.getValueType() == ValueType.TRUE_ONLY )
{
value = !StringUtils.isEmpty( value ) && "true".equalsIgnoreCase( value ) ? "true" : null;
attributeValue = new AttributeValue( value, attribute );
}
else if ( !StringUtils.isEmpty( value ) )
{
attributeValue = new AttributeValue( value, attribute );
}
return attributeValue;
}
/**
* Parses raw JSON into a map of ID -> Value.
* Allows null and empty values (must be handled later).
*/
private Map<Integer, String> jsonToMap( List<String> jsonAttributeValues )
throws IOException
{
Map<Integer, String> parsed = Maps.newHashMap();
ObjectMapper mapper = new ObjectMapper();
for ( String jsonString : jsonAttributeValues )
{
JsonNode node = mapper.readValue( jsonString, JsonNode.class );
JsonNode nId = node.get( "id" );
JsonNode nValue = node.get( "value" );
if ( nId == null || nId.isNull() )
{
continue;
}
parsed.put( nId.asInt(), nValue.asText() );
}
return parsed;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Code source of this file:
// http://grepcode.com/file/repo1.maven.org/maven2/
// org.apache.maven/maven-artifact/3.1.1/
// org/apache/maven/artifact/versioning/ComparableVersion.java/
//
// Modifications made on top of the source:
// 1. Changed
// package org.apache.maven.artifact.versioning;
// to
// package org.apache.hadoop.util;
// 2. Removed author tags to clear hadoop author tag warning
//
package org.apache.hadoop.util;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Properties;
import java.util.Stack;
/**
* Generic implementation of version comparison.
*
* <p>Features:
* <ul>
* <li>mixing of '<code>-</code>' (dash) and '<code>.</code>' (dot) separators,</li>
* <li>transition between characters and digits also constitutes a separator:
* <code>1.0alpha1 => [1, 0, alpha, 1]</code></li>
* <li>unlimited number of version components,</li>
* <li>version components in the text can be digits or strings,</li>
* <li>strings are checked for well-known qualifiers and the qualifier ordering is used for version ordering.
* Well-known qualifiers (case insensitive) are:<ul>
* <li><code>alpha</code> or <code>a</code></li>
* <li><code>beta</code> or <code>b</code></li>
* <li><code>milestone</code> or <code>m</code></li>
* <li><code>rc</code> or <code>cr</code></li>
* <li><code>snapshot</code></li>
* <li><code>(the empty string)</code> or <code>ga</code> or <code>final</code></li>
* <li><code>sp</code></li>
* </ul>
* Unknown qualifiers are considered after known qualifiers, with lexical order (always case insensitive),
* </li>
* <li>a dash usually precedes a qualifier, and is always less important than something preceded with a dot.</li>
* </ul></p>
*
* @see <a href="https://cwiki.apache.org/confluence/display/MAVENOLD/Versioning">"Versioning" on Maven Wiki</a>
*/
public class ComparableVersion
implements Comparable<ComparableVersion>
{
private String value;
private String canonical;
private ListItem items;
private interface Item
{
int INTEGER_ITEM = 0;
int STRING_ITEM = 1;
int LIST_ITEM = 2;
int compareTo( Item item );
int getType();
boolean isNull();
}
/**
* Represents a numeric item in the version item list.
*/
private static class IntegerItem
implements Item
{
private static final BigInteger BIG_INTEGER_ZERO = new BigInteger( "0" );
private final BigInteger value;
public static final IntegerItem ZERO = new IntegerItem();
private IntegerItem()
{
this.value = BIG_INTEGER_ZERO;
}
public IntegerItem( String str )
{
this.value = new BigInteger( str );
}
public int getType()
{
return INTEGER_ITEM;
}
public boolean isNull()
{
return BIG_INTEGER_ZERO.equals( value );
}
public int compareTo( Item item )
{
if ( item == null )
{
return BIG_INTEGER_ZERO.equals( value ) ? 0 : 1; // 1.0 == 1, 1.1 > 1
}
switch ( item.getType() )
{
case INTEGER_ITEM:
return value.compareTo( ( (IntegerItem) item ).value );
case STRING_ITEM:
return 1; // 1.1 > 1-sp
case LIST_ITEM:
return 1; // 1.1 > 1-1
default:
throw new RuntimeException( "invalid item: " + item.getClass() );
}
}
public String toString()
{
return value.toString();
}
}
/**
* Represents a string in the version item list, usually a qualifier.
*/
private static class StringItem
implements Item
{
private static final String[] QUALIFIERS = { "alpha", "beta", "milestone", "rc", "snapshot", "", "sp" };
private static final List<String> _QUALIFIERS = Arrays.asList( QUALIFIERS );
private static final Properties ALIASES = new Properties();
static
{
ALIASES.put( "ga", "" );
ALIASES.put( "final", "" );
ALIASES.put( "cr", "rc" );
}
/**
* A comparable value for the empty-string qualifier. This one is used to determine if a given qualifier makes
* the version older than one without a qualifier, or more recent.
*/
private static final String RELEASE_VERSION_INDEX = String.valueOf( _QUALIFIERS.indexOf( "" ) );
private String value;
public StringItem( String value, boolean followedByDigit )
{
if ( followedByDigit && value.length() == 1 )
{
// a1 = alpha-1, b1 = beta-1, m1 = milestone-1
switch ( value.charAt( 0 ) )
{
case 'a':
value = "alpha";
break;
case 'b':
value = "beta";
break;
case 'm':
value = "milestone";
break;
default:
break;
}
}
this.value = ALIASES.getProperty( value , value );
}
public int getType()
{
return STRING_ITEM;
}
public boolean isNull()
{
return ( comparableQualifier( value ).compareTo( RELEASE_VERSION_INDEX ) == 0 );
}
/**
* Returns a comparable value for a qualifier.
*
* This method takes into account the ordering of known qualifiers then unknown qualifiers with lexical ordering.
*
* just returning an Integer with the index here is faster, but requires a lot of if/then/else to check for -1
* or QUALIFIERS.size and then resort to lexical ordering. Most comparisons are decided by the first character,
* so this is still fast. If more characters are needed then it requires a lexical sort anyway.
*
* @param qualifier
* @return an equivalent value that can be used with lexical comparison
*/
public static String comparableQualifier( String qualifier )
{
int i = _QUALIFIERS.indexOf( qualifier );
return i == -1 ? ( _QUALIFIERS.size() + "-" + qualifier ) : String.valueOf( i );
}
public int compareTo( Item item )
{
if ( item == null )
{
// 1-rc < 1, 1-ga > 1
return comparableQualifier( value ).compareTo( RELEASE_VERSION_INDEX );
}
switch ( item.getType() )
{
case INTEGER_ITEM:
return -1; // 1.any < 1.1 ?
case STRING_ITEM:
return comparableQualifier( value ).compareTo( comparableQualifier( ( (StringItem) item ).value ) );
case LIST_ITEM:
return -1; // 1.any < 1-1
default:
throw new RuntimeException( "invalid item: " + item.getClass() );
}
}
public String toString()
{
return value;
}
}
/**
* Represents a version list item. This class is used both for the global item list and for sub-lists (which start
* with '-(number)' in the version specification).
*/
private static class ListItem
extends ArrayList<Item>
implements Item
{
public int getType()
{
return LIST_ITEM;
}
public boolean isNull()
{
return ( size() == 0 );
}
void normalize()
{
for ( ListIterator<Item> iterator = listIterator( size() ); iterator.hasPrevious(); )
{
Item item = iterator.previous();
if ( item.isNull() )
{
iterator.remove(); // remove null trailing items: 0, "", empty list
}
else
{
break;
}
}
}
public int compareTo( Item item )
{
if ( item == null )
{
if ( size() == 0 )
{
return 0; // 1-0 = 1- (normalize) = 1
}
Item first = get( 0 );
return first.compareTo( null );
}
switch ( item.getType() )
{
case INTEGER_ITEM:
return -1; // 1-1 < 1.0.x
case STRING_ITEM:
return 1; // 1-1 > 1-sp
case LIST_ITEM:
Iterator<Item> left = iterator();
Iterator<Item> right = ( (ListItem) item ).iterator();
while ( left.hasNext() || right.hasNext() )
{
Item l = left.hasNext() ? left.next() : null;
Item r = right.hasNext() ? right.next() : null;
// if this is shorter, then invert the compare and mul with -1
int result = l == null ? -1 * r.compareTo( l ) : l.compareTo( r );
if ( result != 0 )
{
return result;
}
}
return 0;
default:
throw new RuntimeException( "invalid item: " + item.getClass() );
}
}
public String toString()
{
StringBuilder buffer = new StringBuilder( "(" );
for ( Iterator<Item> iter = iterator(); iter.hasNext(); )
{
buffer.append( iter.next() );
if ( iter.hasNext() )
{
buffer.append( ',' );
}
}
buffer.append( ')' );
return buffer.toString();
}
}
public ComparableVersion( String version )
{
parseVersion( version );
}
public final void parseVersion( String version )
{
this.value = version;
items = new ListItem();
version = StringUtils.toLowerCase(version);
ListItem list = items;
Stack<Item> stack = new Stack<Item>();
stack.push( list );
boolean isDigit = false;
int startIndex = 0;
for ( int i = 0; i < version.length(); i++ )
{
char c = version.charAt( i );
if ( c == '.' )
{
if ( i == startIndex )
{
list.add( IntegerItem.ZERO );
}
else
{
list.add( parseItem( isDigit, version.substring( startIndex, i ) ) );
}
startIndex = i + 1;
}
else if ( c == '-' )
{
if ( i == startIndex )
{
list.add( IntegerItem.ZERO );
}
else
{
list.add( parseItem( isDigit, version.substring( startIndex, i ) ) );
}
startIndex = i + 1;
if ( isDigit )
{
list.normalize(); // 1.0-* = 1-*
if ( ( i + 1 < version.length() ) && Character.isDigit( version.charAt( i + 1 ) ) )
{
// new ListItem only if previous were digits and new char is a digit,
// ie need to differentiate only 1.1 from 1-1
list.add( list = new ListItem() );
stack.push( list );
}
}
}
else if ( Character.isDigit( c ) )
{
if ( !isDigit && i > startIndex )
{
list.add( new StringItem( version.substring( startIndex, i ), true ) );
startIndex = i;
}
isDigit = true;
}
else
{
if ( isDigit && i > startIndex )
{
list.add( parseItem( true, version.substring( startIndex, i ) ) );
startIndex = i;
}
isDigit = false;
}
}
if ( version.length() > startIndex )
{
list.add( parseItem( isDigit, version.substring( startIndex ) ) );
}
while ( !stack.isEmpty() )
{
list = (ListItem) stack.pop();
list.normalize();
}
canonical = items.toString();
}
private static Item parseItem( boolean isDigit, String buf )
{
return isDigit ? new IntegerItem( buf ) : new StringItem( buf, false );
}
public int compareTo( ComparableVersion o )
{
return items.compareTo( o.items );
}
public String toString()
{
return value;
}
public boolean equals( Object o )
{
return ( o instanceof ComparableVersion ) && canonical.equals( ( (ComparableVersion) o ).canonical );
}
public int hashCode()
{
return canonical.hashCode();
}
}
| |
/*
* Copyright 2010 Daniel Weisser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.danielweisser.android.ldapsync.platform;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import android.content.ContentProviderClient;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.OperationApplicationException;
import android.content.SyncResult;
import android.database.Cursor;
import android.database.sqlite.SQLiteException;
import android.net.Uri;
import android.os.RemoteException;
import android.provider.ContactsContract;
import android.provider.ContactsContract.Data;
import android.provider.ContactsContract.Groups;
import android.provider.ContactsContract.RawContacts;
import android.provider.ContactsContract.Settings;
import android.provider.ContactsContract.CommonDataKinds.Email;
import android.provider.ContactsContract.CommonDataKinds.Phone;
import android.provider.ContactsContract.CommonDataKinds.Photo;
import android.provider.ContactsContract.CommonDataKinds.StructuredName;
import android.provider.ContactsContract.CommonDataKinds.StructuredPostal;
import android.util.Log;
import de.danielweisser.android.ldapsync.Constants;
import de.danielweisser.android.ldapsync.client.Address;
import de.danielweisser.android.ldapsync.client.Contact;
import de.danielweisser.android.ldapsync.syncadapter.Logger;
/**
* Class for managing contacts sync related operations
*
* @author <a href="mailto:daniel.weisser@gmx.de">Daniel Weisser</a>
*/
public class ContactManager {
private static final String TAG = "ContactManager";
private Logger l;
public ContactManager(Logger l) {
this.l = l;
}
/**
* Synchronize raw contacts
*
* @param context
* The context of Authenticator Activity
* @param accountName
* The account name
* @param contacts
* The list of retrieved LDAP contacts
* @param syncResult
* SyncResults for tracking the sync
*/
public synchronized void syncContacts(Context context, String accountName, List<Contact> contacts, SyncResult syncResult) {
final ContentResolver resolver = context.getContentResolver();
// Get all phone contacts for the LDAP account
HashMap<String, Long> contactsOnPhone = getAllContactsOnPhone(resolver, accountName);
// Update and create new contacts
for (final Contact contact : contacts) {
if (contactsOnPhone.containsKey(contact.getDn())) {
Long contactId = contactsOnPhone.get(contact.getDn());
Log.d(TAG, "Update contact: " + contact.getDn());
l.d("Update contact: " + contact.getDn() + " " + contact.getFirstName() + " " + contact.getLastName() + " (" + contactId + ")");
updateContact(resolver, contactId, contact);
syncResult.stats.numUpdates++;
contactsOnPhone.remove(contact.getDn());
} else {
Log.d(TAG, "Add contact: " + contact.getFirstName() + " " + contact.getLastName());
l.d("Add contact: " + contact.getFirstName() + " " + contact.getLastName());
addContact(resolver, accountName, contact);
syncResult.stats.numInserts++;
}
}
// Delete contacts
for (Entry<String, Long> contact : contactsOnPhone.entrySet()) {
Log.d(TAG, "Delete contact: " + contact.getKey());
deleteContact(resolver, contact.getValue());
l.d("Delete contact: " + contact.getKey() + "(" + contact.getValue() + ")");
syncResult.stats.numDeletes++;
}
}
private void updateContact(ContentResolver resolver, long rawContactId, Contact contact) {
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
Contact existingContact = new Contact();
final String selection = Data.RAW_CONTACT_ID + "=?";
final String[] projection = new String[] { Data.MIMETYPE, Data.DATA1, Data.DATA2, Data.DATA3, Data.DATA4, Data.DATA7, Data.DATA8, Data.DATA9,
Data.DATA10, Data.DATA15 };
try {
final Cursor c = resolver.query(Data.CONTENT_URI, projection, selection, new String[] { rawContactId + "" }, null);
if (c != null) {
while (c.moveToNext()) {
String mimetype = c.getString(c.getColumnIndex(Data.MIMETYPE));
if (mimetype.equals(StructuredName.CONTENT_ITEM_TYPE)) {
existingContact.setFirstName(c.getString(c.getColumnIndex(Data.DATA2)));
existingContact.setLastName(c.getString(c.getColumnIndex(Data.DATA3)));
} else if (mimetype.equals(Email.CONTENT_ITEM_TYPE)) {
int type = c.getInt(c.getColumnIndex(Data.DATA2));
if (type == Email.TYPE_WORK) {
String[] mails = new String[] { c.getString(c.getColumnIndex(Data.DATA1)) };
existingContact.setEmails(mails);
}
} else if (mimetype.equals(Phone.CONTENT_ITEM_TYPE)) {
int type = c.getInt(c.getColumnIndex(Data.DATA2));
if (type == Phone.TYPE_WORK_MOBILE) {
existingContact.setCellWorkPhone(c.getString(c.getColumnIndex(Data.DATA1)));
} else if (type == Phone.TYPE_WORK) {
existingContact.setWorkPhone(c.getString(c.getColumnIndex(Data.DATA1)));
} else if (type == Phone.TYPE_HOME) {
existingContact.setHomePhone(c.getString(c.getColumnIndex(Data.DATA1)));
}
} else if (mimetype.equals(Photo.CONTENT_ITEM_TYPE)) {
existingContact.setImage(c.getBlob(c.getColumnIndex(Photo.PHOTO)));
} else if (mimetype.equals(StructuredPostal.CONTENT_ITEM_TYPE)) {
int type = c.getInt(c.getColumnIndex(Data.DATA2));
Address address = new Address();
address.setStreet(c.getString(c.getColumnIndex(Data.DATA4)));
address.setCity(c.getString(c.getColumnIndex(Data.DATA7)));
address.setCountry(c.getString(c.getColumnIndex(Data.DATA10)));
address.setZip(c.getString(c.getColumnIndex(Data.DATA9)));
address.setState(c.getString(c.getColumnIndex(Data.DATA8)));
if (type == StructuredPostal.TYPE_WORK) {
existingContact.setAddress(address);
}
}
}
}
prepareFields(rawContactId, contact, existingContact, ops, false);
if (ops.size() > 0) {
resolver.applyBatch(ContactsContract.AUTHORITY, ops);
}
} catch (RemoteException e) {
Log.e(TAG, e.getMessage(), e);
} catch (OperationApplicationException e) {
Log.e(TAG, e.getMessage(), e);
} catch (SQLiteException e) {
Log.e(TAG, e.getMessage(), e);
} catch (IllegalStateException e) {
Log.e(TAG, e.getMessage(), e);
}
}
private void deleteContact(ContentResolver resolver, Long rawContactId) {
try {
resolver.delete(RawContacts.CONTENT_URI, RawContacts.CONTACT_ID + "=?", new String[] { "" + rawContactId });
} catch (SQLiteException e) {
Log.e(TAG, e.getMessage(), e);
} catch (IllegalStateException e) {
Log.e(TAG, e.getMessage(), e);
}
}
/**
* Retrieves all contacts that are on the phone for this account.
*
* @return
*/
private static HashMap<String, Long> getAllContactsOnPhone(ContentResolver resolver, String accountName) {
final String[] projection = new String[] { RawContacts._ID, RawContacts.SYNC1, RawContacts.SOURCE_ID };
final String selection = RawContacts.ACCOUNT_NAME + "=?";
final Cursor c = resolver.query(RawContacts.CONTENT_URI, projection, selection, new String[] { accountName }, null);
HashMap<String, Long> contactsOnPhone = new HashMap<String, Long>();
if (c != null) {
while (c.moveToNext()) {
contactsOnPhone.put(c.getString(c.getColumnIndex(RawContacts.SOURCE_ID)), c.getLong(c.getColumnIndex(Data._ID)));
}
c.close();
}
return contactsOnPhone;
}
private Uri addCallerIsSyncAdapterFlag(Uri uri) {
Uri.Builder b = uri.buildUpon();
b.appendQueryParameter(ContactsContract.CALLER_IS_SYNCADAPTER, "true");
return b.build();
}
/**
* Add a new contact to the RawContacts table.
*
* @param resolver
* @param accountName
* @param contact
*/
private void addContact(ContentResolver resolver, String accountName, Contact contact) {
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
Uri uri = addCallerIsSyncAdapterFlag(RawContacts.CONTENT_URI);
ContentValues cv = new ContentValues();
cv.put(RawContacts.ACCOUNT_TYPE, Constants.ACCOUNT_TYPE);
cv.put(RawContacts.ACCOUNT_NAME, accountName);
cv.put(RawContacts.SOURCE_ID, contact.getDn());
// This is the first insert into the raw contacts table
ContentProviderOperation i1 = ContentProviderOperation.newInsert(uri).withValues(cv).build();
ops.add(i1);
prepareFields(-1, contact, new Contact(), ops, true);
// Now create the contact with a single batch operation
try {
ContentProviderResult[] res = resolver.applyBatch(ContactsContract.AUTHORITY, ops);
// The first insert is the one generating the ID for this contact
long id = ContentUris.parseId(res[0].uri);
l.d("The new contact has id: " + id);
} catch (Exception e) {
Log.e(TAG, "Cannot create contact ", e);
}
}
private void prepareFields(long rawContactId, Contact newC, Contact existingC, ArrayList<ContentProviderOperation> ops, boolean isNew) {
ContactMerger contactMerger = new ContactMerger(rawContactId, newC, existingC, ops, l);
contactMerger.updateName();
contactMerger.updateMail(Email.TYPE_WORK);
contactMerger.updatePhone(Phone.TYPE_WORK_MOBILE);
contactMerger.updatePhone(Phone.TYPE_WORK);
contactMerger.updatePhone(Phone.TYPE_HOME);
contactMerger.updateAddress(StructuredPostal.TYPE_WORK);
contactMerger.updatePicture();
}
public static void makeGroupVisible(String accountName, ContentResolver resolver) {
try {
ContentProviderClient client = resolver.acquireContentProviderClient(ContactsContract.AUTHORITY_URI);
ContentValues cv = new ContentValues();
cv.put(Groups.ACCOUNT_NAME, accountName);
cv.put(Groups.ACCOUNT_TYPE, Constants.ACCOUNT_TYPE);
cv.put(Settings.UNGROUPED_VISIBLE, true);
client.insert(Settings.CONTENT_URI.buildUpon().appendQueryParameter(ContactsContract.CALLER_IS_SYNCADAPTER, "true").build(), cv);
} catch (RemoteException e) {
Log.d(TAG, "Cannot make the Group Visible");
}
}
}
| |
/*
* IntoTheHeavensDesktopView.java
*/
package intotheheavensdesktop;
import intotheheavensdesktop.sound.AudioPlayer;
import java.io.IOException;
import net.darkkilauea.intotheheavens.GameMode.State;
import org.jdesktop.application.Action;
import org.jdesktop.application.ResourceMap;
import org.jdesktop.application.SingleFrameApplication;
import org.jdesktop.application.FrameView;
import org.jdesktop.application.TaskMonitor;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.HashMap;
import java.util.Map;
import javax.swing.Timer;
import javax.swing.Icon;
import javax.swing.JDialog;
import java.awt.FileDialog;
import java.io.FilenameFilter;
import javax.swing.JFrame;
import net.darkkilauea.intotheheavens.GameModeManager;
import net.darkkilauea.intotheheavens.IGameModeListener;
import net.darkkilauea.intotheheavens.ITHScript.Location;
import net.darkkilauea.intotheheavens.MainGameMode;
import net.darkkilauea.intotheheavens.WorldState;
/**
* The application's main frame.
*/
public class IntoTheHeavensDesktopView extends FrameView implements IGameModeListener
{
private GameModeManager _manager = new GameModeManager();
private MainGameMode _mainMode = null;
private Timer _audioUpdateTimer = null;
private Map<Integer, AudioPlayer> _audioSources = new HashMap<Integer, AudioPlayer>();
private int _lastAudioId;
private float _masterVolume = 1.0f;
private float _musicVolume = 1.0f;
private float _soundEffectVolume = 1.0f;
public IntoTheHeavensDesktopView(SingleFrameApplication app)
{
super(app);
initComponents();
setupStatusBar();
_audioUpdateTimer = new Timer(200, new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
for (AudioPlayer source : _audioSources.values())
{
try
{
if (source.getState() == AudioPlayer.State.PLAYING)
{
source.update();
}
}
catch (IOException ex)
{
try { source.stop(); }
catch (Exception ex2) { }
}
}
}
});
_audioUpdateTimer.setRepeats(true);
_audioUpdateTimer.start();
_mainMode = new MainGameMode();
_mainMode.setListener(this);
_manager.registerGameMode("Main", _mainMode);
consoleTextArea.setText(this.getResourceMap().getString("welcomeMessage") + "\n");
saveGameMenuItem.setEnabled(false);
archiveMenuItem.setEnabled(false);
}
private void setupStatusBar()
{
// status bar initialization - message timeout, idle icon and busy animation, etc
ResourceMap resourceMap = getResourceMap();
int messageTimeout = resourceMap.getInteger("StatusBar.messageTimeout");
messageTimer = new Timer(messageTimeout, new ActionListener() {
public void actionPerformed(ActionEvent e)
{
statusMessageLabel.setText("");
}
});
messageTimer.setRepeats(false);
int busyAnimationRate = resourceMap.getInteger("StatusBar.busyAnimationRate");
for (int i = 0; i < busyIcons.length; i++)
{
busyIcons[i] = resourceMap.getIcon("StatusBar.busyIcons[" + i + "]");
}
busyIconTimer = new Timer(busyAnimationRate, new ActionListener() {
public void actionPerformed(ActionEvent e)
{
busyIconIndex = (busyIconIndex + 1) % busyIcons.length;
statusAnimationLabel.setIcon(busyIcons[busyIconIndex]);
}
});
idleIcon = resourceMap.getIcon("StatusBar.idleIcon");
statusAnimationLabel.setIcon(idleIcon);
progressBar.setVisible(false);
// connecting action tasks to status bar via TaskMonitor
TaskMonitor taskMonitor = new TaskMonitor(getApplication().getContext());
taskMonitor.addPropertyChangeListener(new java.beans.PropertyChangeListener() {
public void propertyChange(java.beans.PropertyChangeEvent evt)
{
String propertyName = evt.getPropertyName();
if ("started".equals(propertyName))
{
if (!busyIconTimer.isRunning())
{
statusAnimationLabel.setIcon(busyIcons[0]);
busyIconIndex = 0;
busyIconTimer.start();
}
progressBar.setVisible(true);
progressBar.setIndeterminate(true);
}
else if ("done".equals(propertyName))
{
busyIconTimer.stop();
statusAnimationLabel.setIcon(idleIcon);
progressBar.setVisible(false);
progressBar.setValue(0);
}
else if ("message".equals(propertyName))
{
String text = (String)(evt.getNewValue());
statusMessageLabel.setText((text == null) ? "" : text);
messageTimer.restart();
}
else if ("progress".equals(propertyName))
{
int value = (Integer)(evt.getNewValue());
progressBar.setVisible(true);
progressBar.setIndeterminate(false);
progressBar.setValue(value);
}
}
});
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
mainPanel = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
consoleTextArea = new javax.swing.JTextArea();
commandTextField = new javax.swing.JTextField();
menuBar = new javax.swing.JMenuBar();
javax.swing.JMenu fileMenu = new javax.swing.JMenu();
loadStoryMenuItem = new javax.swing.JMenuItem();
jSeparator2 = new javax.swing.JPopupMenu.Separator();
newGameMenuItem = new javax.swing.JMenuItem();
loadGameMenuItem = new javax.swing.JMenuItem();
saveGameMenuItem = new javax.swing.JMenuItem();
jSeparator1 = new javax.swing.JPopupMenu.Separator();
exitMenuItem = new javax.swing.JMenuItem();
toolsMenu = new javax.swing.JMenu();
archiveMenuItem = new javax.swing.JMenuItem();
javax.swing.JMenu helpMenu = new javax.swing.JMenu();
javax.swing.JMenuItem aboutMenuItem = new javax.swing.JMenuItem();
statusPanel = new javax.swing.JPanel();
javax.swing.JSeparator statusPanelSeparator = new javax.swing.JSeparator();
statusMessageLabel = new javax.swing.JLabel();
statusAnimationLabel = new javax.swing.JLabel();
progressBar = new javax.swing.JProgressBar();
mainPanel.setName("mainPanel"); // NOI18N
jScrollPane1.setName("jScrollPane1"); // NOI18N
org.jdesktop.application.ResourceMap resourceMap = org.jdesktop.application.Application.getInstance(intotheheavensdesktop.IntoTheHeavensDesktopApp.class).getContext().getResourceMap(IntoTheHeavensDesktopView.class);
consoleTextArea.setBackground(resourceMap.getColor("consoleTextArea.background")); // NOI18N
consoleTextArea.setColumns(20);
consoleTextArea.setEditable(false);
consoleTextArea.setForeground(resourceMap.getColor("consoleTextArea.foreground")); // NOI18N
consoleTextArea.setLineWrap(true);
consoleTextArea.setRows(5);
consoleTextArea.setWrapStyleWord(true);
consoleTextArea.setName("consoleTextArea"); // NOI18N
jScrollPane1.setViewportView(consoleTextArea);
commandTextField.setText(resourceMap.getString("commandTextField.text")); // NOI18N
javax.swing.ActionMap actionMap = org.jdesktop.application.Application.getInstance(intotheheavensdesktop.IntoTheHeavensDesktopApp.class).getContext().getActionMap(IntoTheHeavensDesktopView.class, this);
commandTextField.setAction(actionMap.get("submitCommand")); // NOI18N
commandTextField.setName("commandTextField"); // NOI18N
javax.swing.GroupLayout mainPanelLayout = new javax.swing.GroupLayout(mainPanel);
mainPanel.setLayout(mainPanelLayout);
mainPanelLayout.setHorizontalGroup(
mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(commandTextField, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 690, Short.MAX_VALUE)
.addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 690, Short.MAX_VALUE)
);
mainPanelLayout.setVerticalGroup(
mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, mainPanelLayout.createSequentialGroup()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 308, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(commandTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE))
);
menuBar.setName("menuBar"); // NOI18N
fileMenu.setText(resourceMap.getString("fileMenu.text")); // NOI18N
fileMenu.setName("fileMenu"); // NOI18N
loadStoryMenuItem.setAction(actionMap.get("loadStoryAction")); // NOI18N
loadStoryMenuItem.setText(resourceMap.getString("loadStoryMenuItem.text")); // NOI18N
loadStoryMenuItem.setName("loadStoryMenuItem"); // NOI18N
fileMenu.add(loadStoryMenuItem);
jSeparator2.setName("jSeparator2"); // NOI18N
fileMenu.add(jSeparator2);
newGameMenuItem.setAction(actionMap.get("newGameAction")); // NOI18N
newGameMenuItem.setName("newGameMenuItem"); // NOI18N
fileMenu.add(newGameMenuItem);
loadGameMenuItem.setAction(actionMap.get("loadGameAction")); // NOI18N
loadGameMenuItem.setText(resourceMap.getString("loadGameMenuItem.text")); // NOI18N
loadGameMenuItem.setName("loadGameMenuItem"); // NOI18N
fileMenu.add(loadGameMenuItem);
saveGameMenuItem.setAction(actionMap.get("saveGameAction")); // NOI18N
saveGameMenuItem.setText(resourceMap.getString("saveGameMenuItem.text")); // NOI18N
saveGameMenuItem.setName("saveGameMenuItem"); // NOI18N
fileMenu.add(saveGameMenuItem);
fileMenu.add(jSeparator1);
exitMenuItem.setAction(actionMap.get("quit")); // NOI18N
exitMenuItem.setName("exitMenuItem"); // NOI18N
fileMenu.add(exitMenuItem);
menuBar.add(fileMenu);
toolsMenu.setText(resourceMap.getString("toolsMenu.text")); // NOI18N
toolsMenu.setName("toolsMenu"); // NOI18N
archiveMenuItem.setAction(actionMap.get("archiveScripts")); // NOI18N
archiveMenuItem.setText(resourceMap.getString("archiveMenuItem.text")); // NOI18N
archiveMenuItem.setName("archiveMenuItem"); // NOI18N
toolsMenu.add(archiveMenuItem);
menuBar.add(toolsMenu);
helpMenu.setText(resourceMap.getString("helpMenu.text")); // NOI18N
helpMenu.setName("helpMenu"); // NOI18N
aboutMenuItem.setAction(actionMap.get("showAboutBox")); // NOI18N
aboutMenuItem.setName("aboutMenuItem"); // NOI18N
helpMenu.add(aboutMenuItem);
menuBar.add(helpMenu);
statusPanel.setName("statusPanel"); // NOI18N
statusPanelSeparator.setName("statusPanelSeparator"); // NOI18N
statusMessageLabel.setName("statusMessageLabel"); // NOI18N
statusAnimationLabel.setHorizontalAlignment(javax.swing.SwingConstants.LEFT);
statusAnimationLabel.setName("statusAnimationLabel"); // NOI18N
progressBar.setName("progressBar"); // NOI18N
javax.swing.GroupLayout statusPanelLayout = new javax.swing.GroupLayout(statusPanel);
statusPanel.setLayout(statusPanelLayout);
statusPanelLayout.setHorizontalGroup(
statusPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(statusPanelSeparator, javax.swing.GroupLayout.DEFAULT_SIZE, 690, Short.MAX_VALUE)
.addGroup(statusPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(statusMessageLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 520, Short.MAX_VALUE)
.addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(statusAnimationLabel)
.addContainerGap())
);
statusPanelLayout.setVerticalGroup(
statusPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(statusPanelLayout.createSequentialGroup()
.addComponent(statusPanelSeparator, javax.swing.GroupLayout.PREFERRED_SIZE, 2, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(statusPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(statusMessageLabel)
.addComponent(statusAnimationLabel)
.addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(3, 3, 3))
);
setComponent(mainPanel);
setMenuBar(menuBar);
setStatusBar(statusPanel);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JMenuItem archiveMenuItem;
private javax.swing.JTextField commandTextField;
private javax.swing.JTextArea consoleTextArea;
private javax.swing.JMenuItem exitMenuItem;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JPopupMenu.Separator jSeparator1;
private javax.swing.JPopupMenu.Separator jSeparator2;
private javax.swing.JMenuItem loadGameMenuItem;
private javax.swing.JMenuItem loadStoryMenuItem;
private javax.swing.JPanel mainPanel;
private javax.swing.JMenuBar menuBar;
private javax.swing.JMenuItem newGameMenuItem;
private javax.swing.JProgressBar progressBar;
private javax.swing.JMenuItem saveGameMenuItem;
private javax.swing.JLabel statusAnimationLabel;
private javax.swing.JLabel statusMessageLabel;
private javax.swing.JPanel statusPanel;
private javax.swing.JMenu toolsMenu;
// End of variables declaration//GEN-END:variables
private Timer messageTimer;
private Timer busyIconTimer;
private Icon idleIcon;
private Icon[] busyIcons = new Icon[15];
private int busyIconIndex = 0;
private JDialog aboutBox;
public void onStateChange(State state)
{
}
public void onTextOutput(String output)
{
consoleTextArea.setText(consoleTextArea.getText() + output + "\n");
}
public void onClearOutput()
{
consoleTextArea.setText("");
}
public void onLocationChange()
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
String saveFile = new File(app.getSaveGameDirectory()).getAbsolutePath() + File.separator + "autosave.sav";
try
{
FileOutputStream stream = new FileOutputStream(saveFile);
MainGameMode mode = (MainGameMode)_manager.getMode("Main");
mode.getWorldState().saveState(stream);
stream.close();
}
catch (Exception ex)
{
onTextOutput("Failed to auto save game! \nException caught: " + ex.toString());
}
}
public int onStartAudio(String filename)
{
try
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
File soundFile = new File(app.getSoundDirectory() + filename);
File musicFile = new File(app.getMusicDirectory() + filename);
AudioPlayer source = null;
if (soundFile.exists())
{
source = new AudioPlayer(soundFile);
source.setVolume(_masterVolume * _soundEffectVolume);
}
else if (musicFile.exists())
{
source = new AudioPlayer(musicFile);
source.setVolume(_masterVolume * _musicVolume);
}
if (source != null)
{
source.play();
_audioSources.put(++_lastAudioId, source);
}
else return 0;
}
catch (Exception ex)
{
onTextOutput("Failed to start audio! \nException caught: " + ex.toString());
return 0;
}
return _lastAudioId;
}
public void onResumeAudio(int audioId)
{
try
{
_audioSources.get(audioId).play();
}
catch (Exception ex)
{
onTextOutput("Failed to resume audio! \nException caught: " + ex.toString());
}
}
public void onPauseAudio(int audioId)
{
_audioSources.get(audioId).pause();
}
public void onStopAudio(int audioId)
{
AudioPlayer source = _audioSources.get(audioId);
try { source.stop(); }
catch (Exception ex) {}
_audioSources.remove(audioId);
}
private void stopAllPlayingAudio()
{
for (AudioPlayer source : _audioSources.values())
{
try { source.stop(); }
catch (Exception ex) {}
}
_audioSources.clear();
}
@Action
public void showAboutBox()
{
if (aboutBox == null)
{
JFrame mainFrame = IntoTheHeavensDesktopApp.getApplication().getMainFrame();
aboutBox = new IntoTheHeavensDesktopAboutBox(mainFrame);
aboutBox.setLocationRelativeTo(mainFrame);
}
IntoTheHeavensDesktopApp.getApplication().show(aboutBox);
}
@Action
public void newGameAction()
{
try
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
stopAllPlayingAudio();
WorldState world = new WorldState();
File locDir = new File(app.getLocationDirectory());
if (!locDir.exists()) throw new Exception("Could not find location directory!");
world.loadLocations(locDir);
Location startLocation = world.findLocation("Start");
if(startLocation != null) world.setCurrentLocation(startLocation);
else throw new Exception("Could not find initial location!");
onClearOutput();
MainGameMode mode = (MainGameMode)_manager.getMode("Main");
mode.loadFromWorldState(world);
_manager.setActiveMode("Main");
saveGameMenuItem.setEnabled(true);
archiveMenuItem.setEnabled(true);
}
catch (Exception ex)
{
onTextOutput("Failed to start new game! \nException caught: " + ex.toString());
}
}
@Action
public void loadGameAction()
{
FileDialog dialog = getSaveFileChooser();
dialog.setMode(FileDialog.LOAD);
dialog.setTitle("Load Game...");
dialog.setVisible(true);
if(dialog.getFile() != null && !dialog.getFile().isEmpty())
{
File loadFile = new File(dialog.getDirectory() + dialog.getFile());
if(!loadFile.getName().endsWith(".sav"))
{
loadFile = new File(loadFile.getPath() + ".sav");
}
try
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
stopAllPlayingAudio();
WorldState world = new WorldState();
File locDir = new File(app.getLocationDirectory());
if (!locDir.exists()) throw new Exception("Could not find location directory!");
world.loadLocations(locDir);
FileInputStream stream = new FileInputStream(loadFile);
if(world.loadState(stream))
{
onClearOutput();
MainGameMode mode = (MainGameMode)_manager.getMode("Main");
mode.loadFromWorldState(world);
_manager.setActiveMode("Main");
saveGameMenuItem.setEnabled(true);
archiveMenuItem.setEnabled(true);
}
else throw new Exception("Could not parse save file (Not a proper save or corrupted)");
stream.close();
}
catch (Exception ex)
{
onTextOutput("Failed to load game! \nException caught: " + ex.toString());
}
}
}
@Action
public void saveGameAction()
{
FileDialog dialog = getSaveFileChooser();
dialog.setVisible(true);
if(dialog.getFile() != null && !dialog.getFile().isEmpty())
{
File saveFile = new File(dialog.getDirectory() + dialog.getFile());
if(!saveFile.getName().endsWith(".sav"))
{
saveFile = new File(saveFile.getPath() + ".sav");
}
try
{
FileOutputStream stream = new FileOutputStream(saveFile);
MainGameMode mode = (MainGameMode)_manager.getMode("Main");
mode.getWorldState().saveState(stream);
stream.close();
}
catch (Exception ex)
{
onTextOutput("Failed to save game! \nException caught: " + ex.toString());
}
}
}
@Action
public void submitCommand()
{
if(_manager.getActiveMode() != null)
{
String commandText = commandTextField.getText();
if(commandText.startsWith("/") || commandText.startsWith("!"))
{
commandText = commandText.substring(1, commandText.length());
}
consoleTextArea.setText(consoleTextArea.getText() + "> " + commandText + "\n");
commandTextField.setText(null);
_manager.getActiveMode().injectTextInput(commandText);
}
else
{
commandTextField.setText(null);
}
}
private FileDialog getSaveFileChooser()
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
JFrame mainFrame = IntoTheHeavensDesktopApp.getApplication().getMainFrame();
FileDialog dialog = new FileDialog(mainFrame, "Save Game...", FileDialog.SAVE);
dialog.setDirectory(new File(app.getSaveGameDirectory()).getAbsolutePath());
dialog.setFilenameFilter(new FilenameFilter() {
@Override
public boolean accept(File dir, String name)
{
File file = new File(dir.getAbsolutePath() + name);
if(file.isDirectory() && !file.isHidden()) return true;
else if(name.endsWith(".sav"))
{
return true;
}
else return false;
}
});
return dialog;
}
@Action
public void archiveScripts()
{
IntoTheHeavensDesktopApp app = IntoTheHeavensDesktopApp.getApplication();
JFrame mainFrame = IntoTheHeavensDesktopApp.getApplication().getMainFrame();
FileDialog dialog = new FileDialog(mainFrame, "Save Archive...", FileDialog.SAVE);
dialog.setDirectory(new File(app.getLocationDirectory()).getAbsolutePath());
dialog.setFilenameFilter(new FilenameFilter() {
@Override
public boolean accept(File dir, String name)
{
File file = new File(dir.getAbsolutePath() + name);
if(file.isDirectory() && !file.isHidden()) return true;
else if(name.endsWith(".arc"))
{
return true;
}
else return false;
}
});
dialog.setVisible(true);
if(dialog.getFile() != null && !dialog.getFile().isEmpty())
{
File archiveFile = new File(dialog.getDirectory() + dialog.getFile());
if(!archiveFile.getName().endsWith(".arc"))
{
archiveFile = new File(archiveFile.getPath() + ".arc");
}
try
{
FileOutputStream stream = new FileOutputStream(archiveFile);
MainGameMode mode = (MainGameMode)_manager.getMode("Main");
mode.getWorldState().archiveLocations(stream);
stream.close();
}
catch (Exception ex)
{
onTextOutput("Failed to archive locations! \nException caught: " + ex.toString());
}
}
}
@Action
public void loadStoryAction()
{
JFrame mainFrame = IntoTheHeavensDesktopApp.getApplication().getMainFrame();
StorySelectionDialog dialog = new StorySelectionDialog(mainFrame, true);
dialog.setVisible(true);
}
}
| |
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.config.server.utils;
import com.alibaba.nacos.common.utils.IoUtils;
import com.alibaba.nacos.common.utils.MD5Utils;
import com.alibaba.nacos.config.server.constant.Constants;
import com.alibaba.nacos.sys.env.EnvUtil;
import org.apache.commons.io.FileUtils;
import com.alibaba.nacos.common.utils.StringUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* Disk util.
*
* @author jiuRen
*/
public class DiskUtil {
static final String BASE_DIR = File.separator + "data" + File.separator + "config-data";
static final String TENANT_BASE_DIR = File.separator + "data" + File.separator + "tenant-config-data";
static final String BETA_DIR = File.separator + "data" + File.separator + "beta-data";
static final String TENANT_BETA_DIR = File.separator + "data" + File.separator + "tenant-beta-data";
static final String TAG_DIR = File.separator + "data" + File.separator + "tag-data";
static final String TENANT_TAG_DIR = File.separator + "data" + File.separator + "tag-beta-data";
public static void saveHeartBeatToDisk(String heartBeatTime) throws IOException {
FileUtils.writeStringToFile(heartBeatFile(), heartBeatTime, Constants.ENCODE);
}
/**
* Save configuration information to disk.
*/
public static void saveToDisk(String dataId, String group, String tenant, String content) throws IOException {
File targetFile = targetFile(dataId, group, tenant);
FileUtils.writeStringToFile(targetFile, content, Constants.ENCODE);
}
/**
* Save beta information to disk.
*/
public static void saveBetaToDisk(String dataId, String group, String tenant, String content) throws IOException {
File targetFile = targetBetaFile(dataId, group, tenant);
FileUtils.writeStringToFile(targetFile, content, Constants.ENCODE);
}
/**
* Save tag information to disk.
*/
public static void saveTagToDisk(String dataId, String group, String tenant, String tag, String content)
throws IOException {
File targetFile = targetTagFile(dataId, group, tenant, tag);
FileUtils.writeStringToFile(targetFile, content, Constants.ENCODE);
}
/**
* Deletes configuration files on disk.
*/
public static void removeConfigInfo(String dataId, String group, String tenant) {
FileUtils.deleteQuietly(targetFile(dataId, group, tenant));
}
/**
* Deletes beta configuration files on disk.
*/
public static void removeConfigInfo4Beta(String dataId, String group, String tenant) {
FileUtils.deleteQuietly(targetBetaFile(dataId, group, tenant));
}
/**
* Deletes tag configuration files on disk.
*/
public static void removeConfigInfo4Tag(String dataId, String group, String tenant, String tag) {
FileUtils.deleteQuietly(targetTagFile(dataId, group, tenant, tag));
}
public static void removeHeartHeat() {
FileUtils.deleteQuietly(heartBeatFile());
}
/**
* Returns the path of the server cache file.
*/
public static File targetFile(String dataId, String group, String tenant) {
File file;
if (StringUtils.isBlank(tenant)) {
file = new File(EnvUtil.getNacosHome(), BASE_DIR);
} else {
file = new File(EnvUtil.getNacosHome(), TENANT_BASE_DIR);
file = new File(file, tenant);
}
file = new File(file, group);
file = new File(file, dataId);
return file;
}
/**
* Returns the path of cache file in server.
*/
public static File targetBetaFile(String dataId, String group, String tenant) {
File file;
if (StringUtils.isBlank(tenant)) {
file = new File(EnvUtil.getNacosHome(), BETA_DIR);
} else {
file = new File(EnvUtil.getNacosHome(), TENANT_BETA_DIR);
file = new File(file, tenant);
}
file = new File(file, group);
file = new File(file, dataId);
return file;
}
/**
* Returns the path of the tag cache file in server.
*/
public static File targetTagFile(String dataId, String group, String tenant, String tag) {
File file;
if (StringUtils.isBlank(tenant)) {
file = new File(EnvUtil.getNacosHome(), TAG_DIR);
} else {
file = new File(EnvUtil.getNacosHome(), TENANT_TAG_DIR);
file = new File(file, tenant);
}
file = new File(file, group);
file = new File(file, dataId);
file = new File(file, tag);
return file;
}
public static String getConfig(String dataId, String group, String tenant) throws IOException {
File file = targetFile(dataId, group, tenant);
if (file.exists()) {
try (FileInputStream fis = new FileInputStream(file);) {
return IoUtils.toString(fis, Constants.ENCODE);
} catch (FileNotFoundException e) {
return StringUtils.EMPTY;
}
} else {
return StringUtils.EMPTY;
}
}
public static String getLocalConfigMd5(String dataId, String group, String tenant) throws IOException {
return MD5Utils.md5Hex(getConfig(dataId, group, tenant), Constants.ENCODE);
}
public static File heartBeatFile() {
return new File(EnvUtil.getNacosHome(), "status" + File.separator + "heartBeat.txt");
}
public static String relativePath(String dataId, String group) {
return BASE_DIR + "/" + dataId + "/" + group;
}
/**
* Clear all config file.
*/
public static void clearAll() {
File file = new File(EnvUtil.getNacosHome(), BASE_DIR);
if (FileUtils.deleteQuietly(file)) {
LogUtil.DEFAULT_LOG.info("clear all config-info success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info failed.");
}
File fileTenant = new File(EnvUtil.getNacosHome(), TENANT_BASE_DIR);
if (FileUtils.deleteQuietly(fileTenant)) {
LogUtil.DEFAULT_LOG.info("clear all config-info-tenant success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info-tenant failed.");
}
}
/**
* Clear all beta config file.
*/
public static void clearAllBeta() {
File file = new File(EnvUtil.getNacosHome(), BETA_DIR);
if (FileUtils.deleteQuietly(file)) {
LogUtil.DEFAULT_LOG.info("clear all config-info-beta success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info-beta failed.");
}
File fileTenant = new File(EnvUtil.getNacosHome(), TENANT_BETA_DIR);
if (FileUtils.deleteQuietly(fileTenant)) {
LogUtil.DEFAULT_LOG.info("clear all config-info-beta-tenant success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info-beta-tenant failed.");
}
}
/**
* Clear all tag config file.
*/
public static void clearAllTag() {
File file = new File(EnvUtil.getNacosHome(), TAG_DIR);
if (FileUtils.deleteQuietly(file)) {
LogUtil.DEFAULT_LOG.info("clear all config-info-tag success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info-tag failed.");
}
File fileTenant = new File(EnvUtil.getNacosHome(), TENANT_TAG_DIR);
if (FileUtils.deleteQuietly(fileTenant)) {
LogUtil.DEFAULT_LOG.info("clear all config-info-tag-tenant success.");
} else {
LogUtil.DEFAULT_LOG.warn("clear all config-info-tag-tenant failed.");
}
}
}
| |
package org.uberfire.io.impl.cluster.helix;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.helix.Criteria;
import org.apache.helix.HelixManager;
import org.apache.helix.InstanceType;
import org.apache.helix.NotificationContext;
import org.apache.helix.messaging.handling.HelixTaskResult;
import org.apache.helix.messaging.handling.MessageHandler;
import org.apache.helix.messaging.handling.MessageHandlerFactory;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.uberfire.commons.cluster.ClusterService;
import org.uberfire.commons.data.Pair;
import org.uberfire.commons.message.AsyncCallback;
import org.uberfire.commons.message.MessageHandlerResolver;
import org.uberfire.commons.message.MessageType;
import org.uberfire.io.impl.cluster.ClusterMessageType;
import static java.util.Arrays.*;
import static java.util.UUID.*;
import static org.apache.helix.HelixManagerFactory.*;
public class ClusterServiceHelix implements ClusterService {
private static final AtomicInteger counter = new AtomicInteger( 0 );
private static final Logger logger = LoggerFactory.getLogger( ClusterServiceHelix.class );
private final String clusterName;
private final String instanceName;
private final HelixManager participantManager;
private final String resourceName;
private final Map<String, MessageHandlerResolver> messageHandlerResolver = new ConcurrentHashMap<String, MessageHandlerResolver>();
private final ReentrantLock lock = new ReentrantLock( true );
public ClusterServiceHelix( final String clusterName,
final String zkAddress,
final String instanceName,
final String resourceName,
final MessageHandlerResolver messageHandlerResolver ) {
this.clusterName = clusterName;
this.instanceName = instanceName;
this.resourceName = resourceName;
addMessageHandlerResolver( messageHandlerResolver );
this.participantManager = getZKHelixManager( clusterName, instanceName, InstanceType.PARTICIPANT, zkAddress );
start();
}
//TODO {porcelli} quick hack for now, the real solution would have a cluster per repo
@Override
public void addMessageHandlerResolver( final MessageHandlerResolver resolver ) {
if ( resolver != null ) {
this.messageHandlerResolver.put( resolver.getServiceId(), resolver );
}
}
private void start() {
try {
participantManager.getMessagingService().registerMessageHandlerFactory( Message.MessageType.USER_DEFINE_MSG.toString(), new MessageHandlerResolverWrapper().convert() );
participantManager.getStateMachineEngine().registerStateModelFactory( "LeaderStandby", new LockTransitionalFactory() );
participantManager.connect();
offlinePartition();
} catch ( final Exception ex ) {
throw new RuntimeException( ex );
}
}
private String getNodeStatus() {
final String partition = resourceName + "_0";
final ExternalView view = participantManager.getClusterManagmentTool().getResourceExternalView( clusterName, resourceName );
return view.getStateMap( partition ).get( instanceName );
}
@Override
public void dispose() {
if ( participantManager != null && participantManager.isConnected() ) {
participantManager.disconnect();
}
}
@Override
public void onStart( final Runnable runnable ) {
runnable.run();
}
@Override
public int getHoldCount() {
return lock.getHoldCount();
}
private void offlinePartition() {
if ( "OFFLINE".equals( getNodeStatus() ) ) {
return;
}
participantManager.getClusterManagmentTool().enablePartition( false, clusterName, instanceName, resourceName, asList( resourceName + "_0" ) );
while ( !"OFFLINE".equals( getNodeStatus() ) ) {
try {
Thread.sleep( 10 );
} catch ( InterruptedException e ) {
}
}
}
private void enablePartition() {
if ( "LEADER".equals( getNodeStatus() ) ) {
return;
}
participantManager.getClusterManagmentTool().enablePartition( true, clusterName, instanceName, resourceName, asList( resourceName + "_0" ) );
while ( !"LEADER".equals( getNodeStatus() ) ) {
try {
Thread.sleep( 10 );
} catch ( InterruptedException e ) {
}
}
}
private void disablePartition() {
String nodeStatus = getNodeStatus();
if ( "STANDBY".equals( nodeStatus ) || "OFFLINE".equals( nodeStatus ) ) {
return;
}
participantManager.getClusterManagmentTool().enablePartition( false, clusterName, instanceName, resourceName, asList( resourceName + "_0" ) );
while ( !( "STANDBY".equals( nodeStatus ) || "OFFLINE".equals( nodeStatus ) ) ) {
try {
Thread.sleep( 10 );
nodeStatus = getNodeStatus();
} catch ( InterruptedException e ) {
}
}
}
@Override
public void lock() {
lock.lock();
enablePartition();
}
@Override
public void unlock() {
disablePartition();
lock.unlock();
}
@Override
public void broadcastAndWait( final String serviceId,
final MessageType type,
final Map<String, String> content,
int timeOut ) {
participantManager.getMessagingService().sendAndWait( buildCriteria(), buildMessage( serviceId, type, content ), new org.apache.helix.messaging.AsyncCallback( timeOut ) {
@Override
public void onTimeOut() {
}
@Override
public void onReplyMessage( final Message message ) {
}
}, timeOut );
}
@Override
public void broadcastAndWait( final String serviceId,
final MessageType type,
final Map<String, String> content,
final int timeOut,
final AsyncCallback callback ) {
int msg = participantManager.getMessagingService().sendAndWait( buildCriteria(), buildMessage( serviceId, type, content ), new org.apache.helix.messaging.AsyncCallback() {
@Override
public void onTimeOut() {
callback.onTimeOut();
}
@Override
public void onReplyMessage( final Message message ) {
final MessageType type = buildMessageTypeFromReply( message );
final Map<String, String> map = getMessageContentFromReply( message );
callback.onReply( type, map );
}
}, timeOut );
if ( msg == 0 ) {
callback.onTimeOut();
}
}
@Override
public void broadcast( final String serviceId,
final MessageType type,
final Map<String, String> content ) {
participantManager.getMessagingService().send( buildCriteria(), buildMessage( serviceId, type, content ) );
}
@Override
public void broadcast( final String serviceId,
final MessageType type,
final Map<String, String> content,
final int timeOut,
final AsyncCallback callback ) {
participantManager.getMessagingService().send( buildCriteria(), buildMessage( serviceId, type, content ), new org.apache.helix.messaging.AsyncCallback() {
@Override
public void onTimeOut() {
callback.onTimeOut();
}
@Override
public void onReplyMessage( final Message message ) {
final MessageType type = buildMessageTypeFromReply( message );
final Map<String, String> map = getMessageContent( message );
callback.onReply( type, map );
}
}, timeOut );
}
@Override
public void sendTo( final String serviceId,
final String resourceId,
final MessageType type,
final Map<String, String> content ) {
participantManager.getMessagingService().send( buildCriteria( resourceId ), buildMessage( serviceId, type, content ) );
}
private Criteria buildCriteria( final String resourceId ) {
return new Criteria() {{
setInstanceName( resourceId );
setRecipientInstanceType( InstanceType.PARTICIPANT );
setResource( resourceName );
setSelfExcluded( true );
setSessionSpecific( true );
}};
}
private Criteria buildCriteria() {
return buildCriteria( "%" );
}
private Message buildMessage( final String serviceId,
final MessageType type,
final Map<String, String> content ) {
return new Message( Message.MessageType.USER_DEFINE_MSG, randomUUID().toString() ) {{
setMsgState( Message.MessageState.NEW );
getRecord().setMapField( "content", content );
getRecord().setSimpleField( "serviceId", serviceId );
getRecord().setSimpleField( "type", type.toString() );
getRecord().setSimpleField( "origin", instanceName );
}};
}
@Override
public int priority() {
return Integer.MIN_VALUE + 200;
}
class MessageHandlerResolverWrapper {
MessageHandlerFactory convert() {
return new MessageHandlerFactory() {
@Override
public MessageHandler createHandler( final Message message,
final NotificationContext context ) {
return new MessageHandler( message, context ) {
@Override
public HelixTaskResult handleMessage() throws InterruptedException {
try {
final String serviceId = _message.getRecord().getSimpleField( "serviceId" );
final MessageType type = buildMessageType( _message.getRecord().getSimpleField( "type" ) );
final Map<String, String> map = getMessageContent( _message );
final MessageHandlerResolver resolver = messageHandlerResolver.get( serviceId );
if ( resolver == null ) {
System.err.println( "serviceId not found '" + serviceId + "'" );
return new HelixTaskResult() {{
setSuccess( false );
setMessage( "Can't find resolver" );
}};
}
final org.uberfire.commons.message.MessageHandler handler = resolver.resolveHandler( serviceId, type );
if ( handler == null ) {
System.err.println( "handler not found for '" + serviceId + "' and type '" + type.toString() + "'" );
return new HelixTaskResult() {{
setSuccess( false );
setMessage( "Can't find handler." );
}};
}
final Pair<MessageType, Map<String, String>> result = handler.handleMessage( type, map );
if ( result == null ) {
return new HelixTaskResult() {{
setSuccess( true );
}};
}
return new HelixTaskResult() {{
setSuccess( true );
getTaskResultMap().put( "serviceId", serviceId );
getTaskResultMap().put( "type", result.getK1().toString() );
getTaskResultMap().put( "origin", instanceName );
for ( Map.Entry<String, String> entry : result.getK2().entrySet() ) {
getTaskResultMap().put( entry.getKey(), entry.getValue() );
}
}};
} catch ( final Throwable e ) {
logger.error( "Error while processing cluster message", e );
return new HelixTaskResult() {{
setSuccess( false );
setMessage( e.getMessage() );
setException( new RuntimeException( e ) );
}};
}
}
@Override
public void onError( final Exception e,
final ErrorCode code,
final ErrorType type ) {
}
};
}
@Override
public String getMessageType() {
return Message.MessageType.USER_DEFINE_MSG.toString();
}
@Override
public void reset() {
}
};
}
}
private MessageType buildMessageType( final String _type ) {
if ( _type == null ) {
return null;
}
MessageType type;
try {
type = ClusterMessageType.valueOf( _type );
} catch ( Exception ex ) {
type = new MessageType() {
@Override
public String toString() {
return _type;
}
@Override
public int hashCode() {
return _type.hashCode();
}
};
}
return type;
}
private MessageType buildMessageTypeFromReply( Message message ) {
final Map<String, String> result = message.getRecord().getMapField( Message.Attributes.MESSAGE_RESULT.toString() );
return buildMessageType( result.get( "type" ) );
}
private Map<String, String> getMessageContent( final Message message ) {
return message.getRecord().getMapField( "content" );
}
private Map<String, String> getMessageContentFromReply( final Message message ) {
return new HashMap<String, String>() {{
for ( final Map.Entry<String, String> field : message.getRecord().getMapField( Message.Attributes.MESSAGE_RESULT.toString() ).entrySet() ) {
if ( !field.getKey().equals( "serviceId" ) && !field.getKey().equals( "origin" ) && !field.getKey().equals( "type" ) ) {
put( field.getKey(), field.getValue() );
}
}
}};
}
}
| |
/*L
* Copyright Ekagra Software Technologies Ltd.
* Copyright SAIC, SAIC-Frederick
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cacore-sdk/LICENSE.txt for details.
*/
package test.gov.nih.nci.cacoresdk.domain.operations;
import gov.nih.nci.cacoresdk.domain.operations.Site;
import javax.ws.rs.core.Response;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.InputStream;
import java.io.FileWriter;
import java.io.File;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.entity.FileEntity;
import org.apache.cxf.jaxrs.client.WebClient;
import org.apache.cxf.common.util.Base64Utility;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.Iterator;
import test.gov.nih.nci.cacoresdk.SDKRESTfulTestBase;
import gov.nih.nci.system.applicationservice.ApplicationException;
public class SiteResourceTest extends SDKRESTfulTestBase
{
public static String getTestCaseName()
{
return "Site RESTful Resource Test Case";
}
/**
* Uses Nested Search Criteria for search
* Verifies that the results are returned
* Verifies size of the result set
* Verifies that none of the attributes are null
*
* @throws Exception
*/
public void testGet() throws Exception
{
try {
Site searchObject = new Site();
Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.operations.Site",searchObject );
String id = "";
if(results != null && results.size() > 0)
{
Site obj = (Site) ((List)results).get(0);
Long idVal = obj.getId();
id = new Long(idVal).toString();
}
else
return;
if(id.equals(""))
return;
String url = baseURL + "/rest/Site/"+id;
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Site"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void testSearch() throws Exception
{
try {
String url = baseURL + "/rest/Site/search;id=*";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Site_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
} catch (Exception e) {
e.printStackTrace();
}
}
//***************************************************
//********************************************************End
public void testDelete() throws Exception
{
try {
Site searchObject = new Site();
Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.operations.Site",searchObject );
String id = "";
if(results != null && results.size() > 0)
{
Site obj = (Site) ((List)results).get(0);
Long idVal = obj.getId();
id = new Long(idVal).toString();
}
else
return;
if(id.equals(""))
return;
String url = baseURL + "/rest/Site/"+id;
WebClient client = WebClient.create(url);
Response response = client.delete();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void testPost() throws Exception
{
try {
DefaultHttpClient httpClient = new DefaultHttpClient();
String url = baseURL + "/rest/Site";
WebClient client = WebClient.create(url);
HttpPost postRequest = new HttpPost(url);
File myFile = new File("Site"+"XML.xml");
if(!myFile.exists())
{
testGet();
myFile = new File("Site"+"XML.xml");
if(!myFile.exists())
return;
}
FileEntity input = new FileEntity(myFile);
input.setContentType("application/xml");
System.out.println("input: "+myFile);
postRequest.setEntity(input);
HttpResponse response = httpClient.execute(postRequest);
BufferedReader br = new BufferedReader(
new InputStreamReader((response.getEntity().getContent())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
System.out.println(output);
}
httpClient.getConnectionManager().shutdown();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void testPut() throws Exception
{
try {
DefaultHttpClient httpClient = new DefaultHttpClient();
String url = baseURL + "/rest/Site";
HttpPut putRequest = new HttpPut(url);
File myFile = new File("Site"+"XML.xml");
if(!myFile.exists())
{
testGet();
myFile = new File("Site"+"XML.xml");
if(!myFile.exists())
return;
}
FileEntity input = new FileEntity(myFile);
input.setContentType("application/xml");
putRequest.setEntity(input);
HttpResponse response = httpClient.execute(putRequest);
if(response.getEntity() != null)
{
BufferedReader br = new BufferedReader(
new InputStreamReader((response.getEntity().getContent())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
System.out.println(output);
}
}
httpClient.getConnectionManager().shutdown();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
| |
package org.gdg.frisbee.android.fragment;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.CheckBoxPreference;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceManager;
import android.support.v4.preference.PreferenceFragment;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.LinearLayout;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.google.analytics.tracking.android.GoogleAnalytics;
import com.google.android.gms.auth.GoogleAuthException;
import com.google.android.gms.auth.GoogleAuthUtil;
import com.google.android.gms.common.Scopes;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.gcm.GoogleCloudMessaging;
import com.google.android.gms.plus.Plus;
import java.io.IOException;
import org.gdg.frisbee.android.Const;
import org.gdg.frisbee.android.R;
import org.gdg.frisbee.android.activity.GdgActivity;
import org.gdg.frisbee.android.api.ApiRequest;
import org.gdg.frisbee.android.api.GdgX;
import org.gdg.frisbee.android.api.model.Chapter;
import org.gdg.frisbee.android.api.model.Directory;
import org.gdg.frisbee.android.api.model.GcmRegistrationResponse;
import org.gdg.frisbee.android.app.App;
import org.gdg.frisbee.android.cache.ModelCache;
import org.gdg.frisbee.android.widget.UpcomingEventWidgetProvider;
import timber.log.Timber;
public class SettingsFragment extends PreferenceFragment {
private static final String LOG_TAG = "GDG-SettingsFragment";
private PreferenceManager mPreferenceManager;
private GdgX mXClient;
private GoogleCloudMessaging mGcm;
private SharedPreferences mPreferences;
private GoogleApiClient mGoogleApiClient;
private Preference.OnPreferenceChangeListener mOnHomeGdgPreferenceChange = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object o) {
final String homeGdg = (String) o;
if (mGoogleApiClient.isConnected() && mPreferences.getBoolean("gcm", true)) {
setHomeGdg(homeGdg);
}
// Update widgets to show newest chosen GdgHome events
// TODO: Make it into class which broadcasts update need to all interested entities like MainActivity and Widgets
App.getInstance().startService(new Intent(App.getInstance(), UpcomingEventWidgetProvider.UpdateService.class));
return true;
}
};
private Preference.OnPreferenceChangeListener mOnGcmPreferenceChange = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object o) {
final boolean enableGcm = (Boolean) o;
if (mGoogleApiClient.isConnected()) {
mLoading.setVisibility(View.VISIBLE);
mLoading.startAnimation(AnimationUtils.loadAnimation(getActivity(), R.anim.fade_in));
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... voids) {
try {
String token = GoogleAuthUtil.getToken(getActivity(), Plus.AccountApi.getAccountName(mGoogleApiClient), "oauth2: " + Scopes.PLUS_LOGIN);
mXClient.setToken(token);
if (!enableGcm) {
ApiRequest req = mXClient.unregisterGcm(mPreferences.getString(Const.SETTINGS_GCM_REG_ID, ""), new Response.Listener<GcmRegistrationResponse>() {
@Override
public void onResponse(GcmRegistrationResponse messageResponse) {
mPreferences.edit()
.putBoolean(Const.SETTINGS_GCM, false)
.remove(Const.SETTINGS_GCM_REG_ID)
.apply();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
Timber.e("Fail", volleyError);
}
}
);
req.execute();
} else {
final String regid = mGcm.register(getString(R.string.gcm_sender_id));
ApiRequest req = mXClient.registerGcm(regid, new Response.Listener<GcmRegistrationResponse>() {
@Override
public void onResponse(GcmRegistrationResponse messageResponse) {
mPreferences.edit()
.putBoolean(Const.SETTINGS_GCM, true)
.putString(Const.SETTINGS_GCM_REG_ID, regid)
.putString(Const.SETTINGS_GCM_NOTIFICATION_KEY, messageResponse.getNotificationKey())
.apply();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
Timber.e("Fail", volleyError);
}
}
);
req.execute();
setHomeGdg(mPreferences.getString(Const.SETTINGS_HOME_GDG, ""));
}
} catch (IOException e) {
Timber.e("(Un)Register GCM gailed (IO)", e);
e.printStackTrace();
} catch (GoogleAuthException e) {
Timber.e("(Un)Register GCM gailed (Auth)", e);
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void o) {
super.onPostExecute(o);
Animation fadeOut = AnimationUtils.loadAnimation(getActivity(), R.anim.fade_out);
fadeOut.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
mLoading.setVisibility(View.GONE);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
mLoading.startAnimation(fadeOut);
}
}.execute();
}
return true;
}
};
private Preference.OnPreferenceChangeListener mOnAnalyticsPreferenceChange = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object o) {
boolean analytics = (Boolean) o;
GoogleAnalytics.getInstance(getActivity()).setAppOptOut(!analytics);
return true;
}
};
private LinearLayout mLoading;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mGoogleApiClient = ((GdgActivity)getActivity()).getGoogleApiClient();
}
@Override
public void onStart() {
super.onStart();
}
@Override
public void onStop() {
super.onStop();
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mPreferenceManager = getPreferenceManager();
mPreferenceManager.setSharedPreferencesName("gdg");
mXClient = new GdgX();
mGcm = GoogleCloudMessaging.getInstance(getActivity());
mPreferences = mPreferenceManager.getSharedPreferences();
addPreferencesFromResource(R.xml.settings);
initPreferences();
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mLoading = new LinearLayout(getActivity());
}
private void initPreferences() {
final ListPreference prefHomeGdgList = (ListPreference) findPreference(Const.SETTINGS_HOME_GDG);
if (prefHomeGdgList != null) {
App.getInstance().getModelCache().getAsync("chapter_list_hub", false, new ModelCache.CacheListener() {
@Override
public void onGet(Object item) {
Directory directory = (Directory) item;
CharSequence entries[] = new String[directory.getGroups().size()];
CharSequence entryValues[] = new String[directory.getGroups().size()];
int i = 0;
for (Chapter chapter : directory.getGroups()) {
entries[i] = chapter.getName();
entryValues[i] = chapter.getGplusId();
i++;
}
prefHomeGdgList.setEntries(entries);
prefHomeGdgList.setEntryValues(entryValues);
}
@Override
public void onNotFound(String key) {
}
});
prefHomeGdgList.setOnPreferenceChangeListener(mOnHomeGdgPreferenceChange);
}
CheckBoxPreference prefGcm = (CheckBoxPreference) findPreference(Const.SETTINGS_GCM);
if (prefGcm != null) {
prefGcm.setOnPreferenceChangeListener(mOnGcmPreferenceChange);
}
CheckBoxPreference prefGoogleSignIn = (CheckBoxPreference) findPreference("gdg_signed_in");
if (prefGoogleSignIn != null) {
prefGoogleSignIn.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object o) {
boolean signedIn = (Boolean) o;
if (!signedIn) {
if (mGoogleApiClient.isConnected()) {
Plus.AccountApi.clearDefaultAccount(mGoogleApiClient);
mGoogleApiClient.disconnect();
mGoogleApiClient.connect();
}
} else {
if (!mGoogleApiClient.isConnected()) {
mGoogleApiClient.connect();
}
}
// TODO: Re-implement logout....
return true;
}
});
}
CheckBoxPreference prefAnalytics = (CheckBoxPreference) findPreference("analytics");
if (prefAnalytics != null) {
prefAnalytics.setOnPreferenceChangeListener(mOnAnalyticsPreferenceChange);
}
}
private void setHomeGdg(final String homeGdg) {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... voids) {
String token = null;
try {
token = GoogleAuthUtil.getToken(getActivity(), Plus.AccountApi.getAccountName(((GdgActivity)getActivity()).getGoogleApiClient()), "oauth2: " + Scopes.PLUS_LOGIN);
mXClient.setToken(token);
mXClient.setHomeGdg(homeGdg, null, null).execute();
} catch (IOException e) {
e.printStackTrace();
} catch (GoogleAuthException e) {
e.printStackTrace();
}
return null;
}
}.execute();
}
// TODO: Re-Implement with GMS 4.3
public void onSignInFailed() {
Timber.d("onSignInFailed");
mPreferences.edit().putBoolean(Const.SETTINGS_SIGNED_IN, false).apply();
CheckBoxPreference prefGoogleSignIn = (CheckBoxPreference) findPreference("gdg_signed_in");
if (prefGoogleSignIn != null) {
prefGoogleSignIn.setChecked(false);
}
}
@Override
public void onActivityResult(int requestCode, int responseCode, Intent data) {
super.onActivityResult(requestCode, responseCode, data);
}
}
| |
package com.gurkensalat.osm.mosques.service;
import com.gurkensalat.osm.entity.OsmEntityType;
import com.gurkensalat.osm.entity.OsmNode;
import com.gurkensalat.osm.entity.OsmRoot;
import com.gurkensalat.osm.entity.OsmWay;
import com.gurkensalat.osm.entity.PlaceType;
import com.gurkensalat.osm.mosques.entity.OsmMosquePlace;
import com.gurkensalat.osm.mosques.messaging.OsmServiceMessaging;
import com.gurkensalat.osm.mosques.repository.OsmMosquePlaceRepository;
import com.gurkensalat.osm.repository.OsmParserRepository;
import com.gurkensalat.osm.repository.OsmParserRepositoryImpl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import javax.annotation.Priority;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.commons.lang3.StringUtils.substring;
import static org.apache.commons.lang3.StringUtils.trimToEmpty;
@Component
@Qualifier(OsmServiceMessaging.KIND_SYNC)
@Priority(2)
@Slf4j
public class OsmConverterServiceImpl implements OsmConverterService
{
// @Autowired
// private OsmTagRepository osmTagRepository;
// @Autowired
// TODO fix autowiring of osmParserRepository...
private OsmParserRepository osmParserRepository;
@Autowired
private OsmMosquePlaceRepository osmMosquePlaceRepository;
@Value("${osm.data.location}")
private String dataLocation;
public OsmConverterResult importNodes(String path)
{
log.info("Request to import nodes from {} arrived.", path);
// TODO Hackity Hack - fix autowiring of osmParserRepository...
osmParserRepository = new OsmParserRepositoryImpl();
OsmConverterResult result = new OsmConverterResult();
result.setWhat("nodes from file");
result.setPath(path);
result.setStart(LocalDateTime.now());
try
{
File dataFile = sanitizePath(path);
OsmRoot root = osmParserRepository.parse(dataFile);
log.info("Read {} nodes from {}", root.getNodes().size(), dataFile.getName());
result.setNodes(root.getNodes().size());
result.setWays(root.getWays().size());
for (OsmNode node : root.getNodes())
{
try
{
if (persistOsmNode(node) != null)
{
result.setPlaces(result.getPlaces() + 1);
log.debug("Persisted {} of {} places", result.getPlaces(), result.getNodes());
}
}
catch (Exception e)
{
log.error("While persisting OSM node", e);
}
}
}
catch (UnsupportedEncodingException e)
{
log.error("While sanitizing path", e);
}
result.setEnd(LocalDateTime.now());
return result;
}
public OsmConverterResult importWays(String path)
{
log.info("Request to import ways from {} arrived.", path);
// TODO Hackity Hack - fix autowiring of osmParserRepository...
osmParserRepository = new OsmParserRepositoryImpl();
OsmConverterResult result = new OsmConverterResult();
result.setWhat("ways from file");
result.setPath(path);
result.setStart(LocalDateTime.now());
try
{
File dataFile = sanitizePath(path);
OsmRoot root = osmParserRepository.parse(dataFile);
log.info("Read {} ways from {}", root.getNodes().size(), dataFile.getName());
result.setNodes(root.getNodes().size());
result.setWays(root.getWays().size());
for (OsmWay way : root.getWays())
{
if (persistOsmWay(way) != null)
{
result.setPlaces(result.getPlaces() + 1);
log.debug("Persisted {} of {} places", result.getPlaces(), result.getWays());
}
}
}
catch (UnsupportedEncodingException e)
{
log.error("While sanitizing path", e);
}
result.setEnd(LocalDateTime.now());
return result;
}
/* package protected */
File sanitizePath(String path) throws UnsupportedEncodingException
{
File dataFile = new File(dataLocation);
// Actually, sanitize this input parameter first...
if (!(isEmpty(path)))
{
path = URLDecoder.decode(path, StandardCharsets.UTF_8.toString());
// TODO remove ".." and such from path...
path = path.replaceAll("/", "");
dataFile = new File(dataFile, path);
}
log.info("Data Directory is {}", dataFile.getAbsolutePath());
return dataFile;
}
@Override
public OsmConverterResult fetchAndImportNode(String id)
{
log.info("Request to re-import node {} arrived.", id);
// TODO Hackity Hack - fix autowiring of osmParserRepository...
osmParserRepository = new OsmParserRepositoryImpl();
OsmConverterResult result = new OsmConverterResult();
result.setWhat("nodes from server");
result.setPath(id);
result.setStart(LocalDateTime.now());
long parsedId = Long.parseLong(id);
OsmRoot root = osmParserRepository.loadNodeFromServer(parsedId);
log.info("Read {} nodes and {} ways from server.", root.getNodes().size(), root.getWays().size());
result.setNodes(root.getNodes().size());
result.setWays(root.getWays().size());
// if (root.isGone())
// {
// deleteOsmNode(parsedId);
// }
// else
// {
// }
for (OsmNode node : root.getNodes())
{
if (persistOsmNode(node, null, null) != null)
{
result.setPlaces(result.getPlaces() + 1);
}
}
result.setEnd(LocalDateTime.now());
return result;
}
@Override
public OsmConverterResult fetchAndImportWay(String id)
{
log.info("Request to re-import way {} arrived.", id);
// TODO Hackity Hack - fix autowiring of osmParserRepository...
osmParserRepository = new OsmParserRepositoryImpl();
OsmConverterResult result = new OsmConverterResult();
result.setWhat("ways from server");
result.setPath(id);
result.setStart(LocalDateTime.now());
long parsedId = Long.parseLong(id);
OsmRoot root = osmParserRepository.loadWayFromServer(parsedId);
log.info("Read {} nodes and {} ways from server.", root.getNodes().size(), root.getWays().size());
result.setNodes(root.getNodes().size());
result.setWays(root.getWays().size());
// if (root.isGone())
// {
// deleteOsmWay(parsedId);
// }
// else
// {
// }
for (OsmWay way : root.getWays())
{
if (persistOsmWay(way) != null)
{
result.setPlaces(result.getPlaces() + 1);
}
}
result.setEnd(LocalDateTime.now());
return result;
}
// /*
// log.info("About to import OSM data from {} / {}", dataLocation, path);
//
// log.info("OSM Repository is: {}", osmParserRepository);
//
// log.debug("Place Repository is: {}", osmMosquePlaceRepository);
//
// // TODO find a way to invalidate all tags belonging to nodes in a country...
// // osmTagRepository.invalidateAll();
//
// // First, import everything so we won't miss anything.
// for (String continent : Continents.getContinents().keySet())
// {
// importData(dataDirectory, continent, "all");
// }
//
// for (String country : Countries.getCountries().keySet())
// {
// String countryCode = Countries.getCountries().get(country);
// osmMosquePlaceRepository.invalidateByCountryCode(countryCode);
//
// if ("germany".equals(country))
// {
// for (String state : Countries.getGermanyStates())
// {
// importData(dataDirectory, country, state);
// }
// }
// else
// {
// importData(dataDirectory, country, "all");
// }
// }
//
// statisticsService.calculate();
//
// // Lastly, remove all invalid places
// osmMosquePlaceRepository.deleteAllInvalid();
//
// // Now, return the amount of items in the database
// long loaded = osmMosquePlaceRepository.count();
// log.info("Loaded {} places into database", loaded);
//
// return new ImportDataResponse("O.K., Massa!", loaded);
// }
// */
//
// /*
// private void importData(File dataDirectory, String country, String state)
// {
// String countryCode = Countries.getCountries().get(country);
// if (isEmpty(countryCode))
// {
// // Hack to identify continental data
// countryCode = Continents.getContinents().get(country);
// if (isEmpty(countryCode))
// {
// // Still empty? Then, special hack for "world" :)
// countryCode = "ZZ";
// }
// }
//
// File dataFile = new File(dataDirectory, country + "-" + state + "-religion-muslim" + "-node" + ".osm");
//
// OsmRoot root = osmParserRepository.parse(dataFile);
// for (OsmNode node : root.getNodes())
// {
// persistOsmNode(node, countryCode, state);
// }
//
// log.info("Read {} nodes from {}", root.getNodes().size(), dataFile.getName());
//*/
//
// private void deleteOsmNode(long id)
// {
// log.info("Deleting node {}", id);
// String key = Long.toString(id);
// List<OsmMosquePlace> places = osmMosquePlaceRepository.findByKey(key);
// if ((places != null) && (places.size() > 0))
// {
// OsmMosquePlace place = places.get(0);
// osmMosquePlaceRepository.delete(place.getId());
// }
// }
private OsmMosquePlace persistOsmNode(OsmNode node)
{
return persistOsmNode(node, null, null);
}
private OsmMosquePlace persistOsmNode(OsmNode node, String countryCode, String state)
{
log.debug("Read node: {}, {}, {}", node, node.getLat(), node.getLon());
String key = Long.toString(node.getId());
// re-create a place from OSM data
OsmMosquePlace tempPlace = new OsmMosquePlace(node);
tempPlace.setKey(key);
tempPlace.setType(OsmEntityType.NODE);
tempPlace.setPlaceType(PlaceType.OSM_PLACE_OF_WORSHIP);
tempPlace.setCountryFromOSM(tempPlace.getAddress().getCountry());
OsmMosquePlace place = persistOsmMosquePlace(tempPlace, key, countryCode, state);
if (place != null)
{
if (place.isValid())
{
// persistTags(place.getId(), node.getTags());
}
}
return place;
}
// private void deleteOsmWay(long id)
// {
// log.info("Deleting way {}", id);
// String key = Long.toString(id + OsmMosquePlace.getWayOffset());
// List<OsmMosquePlace> places = osmMosquePlaceRepository.findByKey(key);
// if ((places != null) && (places.size() > 0))
// {
// OsmMosquePlace place = places.get(0);
// osmMosquePlaceRepository.delete(place.getId());
// }
// }
private OsmMosquePlace persistOsmWay(OsmWay way)
{
return persistOsmWay(way, null, null);
}
private OsmMosquePlace persistOsmWay(OsmWay way, String countryCode, String state)
{
log.debug("Read way: {}, {}, {}", way, way.getLat(), way.getLon());
String key = Long.toString(way.getId() + OsmMosquePlace.getWayOffset());
// re-create a place from OSM data
OsmMosquePlace tempPlace = new OsmMosquePlace(way);
tempPlace.setKey(key);
tempPlace.setType(OsmEntityType.WAY);
tempPlace.setPlaceType(PlaceType.OSM_PLACE_OF_WORSHIP);
tempPlace.setCountryFromOSM(tempPlace.getAddress().getCountry());
OsmMosquePlace place = persistOsmMosquePlace(tempPlace, key, countryCode, state);
if (place != null)
{
if (place.isValid())
{
// persistTags(place.getId(), way.getTags());
}
}
return place;
}
private OsmMosquePlace persistOsmMosquePlace(OsmMosquePlace tempPlace, String key, String countryCode, String state)
{
OsmMosquePlace place = null;
if (isEmpty(tempPlace.getAddress().getState()))
{
tempPlace.getAddress().setState(state);
}
if (isEmpty(tempPlace.getAddress().getCountry()))
{
tempPlace.getAddress().setCountry(countryCode);
}
String placeName = trimToEmpty(tempPlace.getName());
if (placeName.length() > 79)
{
log.error("Place name too long ({} chars", placeName.length());
log.error(" was '{}'", placeName);
tempPlace.setName(substring(placeName, 0, 79));
}
tempPlace.getContact().setWebsite(substring(tempPlace.getContact().getWebsite(), 0, 79));
try
{
place = osmMosquePlaceRepository.findByKey(key).orElse(null);
if (place == null)
{
// Place could not be found, insert it...
tempPlace.setCreationTime(LocalDateTime.now());
tempPlace.setModificationTime(LocalDateTime.now());
place = osmMosquePlaceRepository.save(tempPlace);
}
else
{
log.debug("Found pre-existing entity {} / {}", place.getId(), place.getVersion());
log.debug(" reloaded: {} / {}", place.getId(), place.getVersion());
}
/*
Optional<OsmMosquePlace> places = osmMosquePlaceRepository.findByKey(key);
if ((places == null) || (places.isEmpty()))
{
}
else
{
// take the one from the database and update it
place = places.get();
places = osmMosquePlaceRepository.findById(place.getId());
place = places.get();
}
*/
// TODO maybe better to do a deep-copy of the old place?
double oldLat = place.getLat();
double oldLon = place.getLon();
// String oldCountryFromDatafile = place.getCountryFromDatafile();
// String oldCountryFromGeocoding = place.getCountryFromGeocoding();
tempPlace.copyTo(place);
if (place.getLat() == 0)
{
place.setLat(oldLat);
}
if (place.getLon() == 0)
{
place.setLon(oldLon);
}
// if (isEmpty(place.getCountryFromDatafile()))
// {
// place.setCountryFromDatafile(oldCountryFromDatafile);
// }
//
// if (isEmpty(place.getCountryFromGeocoding()))
// {
// place.setCountryFromGeocoding(oldCountryFromGeocoding);
// }
place.setValid(true);
place.setModificationTime(LocalDateTime.now());
place = osmMosquePlaceRepository.save(place);
log.info("Saved Place {}", place);
}
catch (Exception e)
{
log.error("While persisting place", e);
log.info("Place: {}", tempPlace);
log.info("OSM node: {}", tempPlace);
}
return place;
}
// private void persistTags(long parentId, List<OsmWayTag> tags)
// {
// TODO analog to persistTags(OsmNodeTag)
//
// List<OsmNodeTag> tags = new ArrayList<>();
// for (OsmWayTag wayTag : way.getTags())
// {
// OsmNodeTag tag = new OsmNodeTag();
// tag.setKey(wayTag.getKey());
// tag.setValue(wayTag.getValue());
// tags.add(tag);
// }
//
// persistTags(place.getId(), tags);
// }
// private void persistTags(long parentId, List<OsmNodeTag> tags)
// {
// OsmTag osmTag = null;
//
// try
// {
// // Now, save the tags
// // TODO allow for Strings as node ids too
// osmTagRepository.deleteByParentTableAndParentId("OSM_PLACES", parentId);
// for (OsmNodeTag tag : tags)
// {
// // TODO allow for creation of lists of OsmTag entities from OsmNode objects
// // TODO allow for creation of OsmTag entities from OsmNodeTag objects
// osmTag = new OsmTag();
// osmTag.setParentTable("OSM_PLACES");
// osmTag.setParentId(parentId);
// osmTag.setKey(tag.getKey());
// osmTag.setValue(tag.getValue());
// osmTag.setValid(true);
//
// if (osmTag.getValue().length() > 79)
// {
// log.error("Cutting down overly long tag value");
// log.info(" tag was: {} / '{}'", osmTag.getValue().length(), osmTag.getValue());
// osmTag.setValue(StringUtils.substring(osmTag.getValue(), 0, 79));
// log.info(" saving: '{}'", osmTag.getValue().length(), osmTag.getValue());
// }
//
// osmTag = osmTagRepository.save(osmTag);
// log.debug(" saved tag {}", osmTag);
// }
// }
// catch (Exception e)
// {
// log.error("While persisting place", e);
// log.info("parent : {}", parentId);
// log.info(" tag: {}", osmTag);
// }
// }
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.repo.sql.data.common;
import com.evolveum.midpoint.prism.Item;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.PrismValue;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.repo.sql.data.RepositoryContext;
import com.evolveum.midpoint.repo.sql.data.common.id.RObjectTextInfoId;
import com.evolveum.midpoint.repo.sql.query2.definition.NotQueryable;
import com.evolveum.midpoint.repo.sql.util.RUtil;
import com.evolveum.midpoint.schema.util.FullTextSearchConfigurationUtil;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.annotations.ForeignKey;
import org.jetbrains.annotations.NotNull;
import javax.persistence.*;
import java.io.Serializable;
import java.util.*;
import java.util.Objects;
import static com.evolveum.midpoint.repo.sql.data.common.RObjectTextInfo.TABLE_NAME;
/**
* @author mederly
*/
@Entity
@IdClass(RObjectTextInfoId.class)
@Table(name = TABLE_NAME)
public class RObjectTextInfo implements Serializable {
private static final Trace LOGGER = TraceManager.getTrace(RObjectTextInfo.class);
public static final String TABLE_NAME = "m_object_text_info";
public static final String COLUMN_OWNER_OID = "owner_oid";
public static final String F_TEXT = "text";
public static final int MAX_TEXT_SIZE = 255;
private RObject owner;
private String ownerOid;
private String text;
public RObjectTextInfo() {
}
public RObjectTextInfo(RObject owner, String text) {
this.owner = owner;
this.text = text;
}
@ForeignKey(name = "fk_object_text_info_owner")
@MapsId("owner")
@ManyToOne(fetch = FetchType.LAZY)
@NotQueryable
public RObject getOwner() {
return owner;
}
@Id
@Column(name = COLUMN_OWNER_OID, length = RUtil.COLUMN_LENGTH_OID)
@NotQueryable
public String getOwnerOid() {
if (ownerOid == null && owner != null) {
ownerOid = owner.getOid();
}
return ownerOid;
}
public void setOwner(RObject owner) {
this.owner = owner;
}
public void setOwnerOid(String ownerOid) {
this.ownerOid = ownerOid;
}
@Id
@Column(name = "text", length = MAX_TEXT_SIZE)
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof RObjectTextInfo))
return false;
RObjectTextInfo that = (RObjectTextInfo) o;
return Objects.equals(getOwnerOid(), that.getOwnerOid()) &&
Objects.equals(getText(), that.getText());
}
@Override
public int hashCode() {
return Objects.hash(getOwnerOid(), getText());
}
public static <T extends ObjectType> Set<RObjectTextInfo> createItemsSet(@NotNull ObjectType object, @NotNull RObject repo,
@NotNull RepositoryContext repositoryContext) {
FullTextSearchConfigurationType config = repositoryContext.repositoryService.getFullTextSearchConfiguration();
if (!FullTextSearchConfigurationUtil.isEnabled(config)) {
return Collections.emptySet();
}
Set<ItemPath> paths = FullTextSearchConfigurationUtil.getFullTextSearchItemPaths(config, object.getClass());
List<PrismValue> values = new ArrayList<>();
for (ItemPath path : paths) {
Object o = object.asPrismObject().find(path);
if (o == null) {
// shouldn't occur
} else if (o instanceof PrismValue) {
values.add((PrismValue) o);
} else if (o instanceof Item) {
values.addAll(((Item<?, ?>) o).getValues());
} else {
throw new IllegalStateException("Unexpected value " + o + " in " + object + " at " + path);
}
}
List<String> allWords = new ArrayList<>(); // not a (hash) set in order to preserve order
for (PrismValue value : values) {
if (value == null) {
continue;
}
if (value instanceof PrismPropertyValue) {
Object realValue = value.getRealValue();
if (realValue == null) {
// skip
} else if (realValue instanceof String) {
append(allWords, (String) realValue, repositoryContext.prismContext);
} else if (realValue instanceof PolyString) {
append(allWords, (PolyString) realValue, repositoryContext.prismContext);
} else {
append(allWords, realValue.toString(), repositoryContext.prismContext);
}
}
}
LOGGER.trace("Indexing {}:\n items: {}\n values: {}\n words: {}", object, paths, values, allWords);
return createItemsSet(repo, allWords);
}
private static Set<RObjectTextInfo> createItemsSet(RObject repo, List<String> allWords) {
Set<RObjectTextInfo> rv = new HashSet<>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < allWords.size(); i++) {
String word = allWords.get(i);
if (sb.length() + word.length() + 2 <= MAX_TEXT_SIZE) {
sb.append(" ").append(word);
} else {
if (sb.length() > 0) {
sb.append(" ");
rv.add(new RObjectTextInfo(repo, sb.toString()));
sb = new StringBuilder();
} else {
// a problem - too large string
LOGGER.warn("Word too long to be correctly indexed: {}", word);
rv.add(new RObjectTextInfo(repo, " " + word.substring(0, MAX_TEXT_SIZE - 2) + " "));
allWords.set(i, word.substring(MAX_TEXT_SIZE - 2));
i--; // to reiterate
}
}
}
if (sb.length() > 0) {
sb.append(" ");
rv.add(new RObjectTextInfo(repo, sb.toString()));
}
return rv;
}
private static void append(List<String> allWords, String text, PrismContext prismContext) {
if (StringUtils.isBlank(text)) {
return;
}
String normalized = prismContext.getDefaultPolyStringNormalizer().normalize(text);
String[] words = StringUtils.split(normalized);
for (String word : words) {
if (StringUtils.isNotBlank(word)) {
if (!allWords.contains(word)) {
allWords.add(word);
}
}
}
}
private static void append(List<String> allWords, PolyString text, PrismContext prismContext) {
if (text != null) {
append(allWords, text.getOrig(), prismContext);
}
}
@Override
public String toString() {
return "RObjectTextInfo{" +
"ownerOid='" + getOwnerOid()+ '\'' +
", text='" + text + '\'' +
'}';
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.storage.s3;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.io.ByteSource;
import com.google.common.io.Files;
import com.google.inject.Inject;
import io.druid.java.util.common.CompressionUtils;
import io.druid.java.util.common.FileUtils;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.IOE;
import io.druid.java.util.common.MapUtils;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.UOE;
import io.druid.java.util.common.logger.Logger;
import io.druid.segment.loading.DataSegmentPuller;
import io.druid.segment.loading.SegmentLoadingException;
import io.druid.segment.loading.URIDataPuller;
import io.druid.timeline.DataSegment;
import org.jets3t.service.S3ServiceException;
import org.jets3t.service.ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.StorageObject;
import javax.tools.FileObject;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.Writer;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* A data segment puller that also hanldes URI data pulls.
*/
public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller
{
public static final int DEFAULT_RETRY_COUNT = 3;
public static FileObject buildFileObject(final URI uri, final RestS3Service s3Client) throws ServiceException
{
final S3Coords coords = new S3Coords(checkURI(uri));
final StorageObject s3Obj = s3Client.getObjectDetails(coords.bucket, coords.path);
final String path = uri.getPath();
return new FileObject()
{
final Object inputStreamOpener = new Object();
volatile boolean streamAcquired = false;
volatile StorageObject storageObject = s3Obj;
@Override
public URI toUri()
{
return uri;
}
@Override
public String getName()
{
final String ext = Files.getFileExtension(path);
return Files.getNameWithoutExtension(path) + (Strings.isNullOrEmpty(ext) ? "" : ("." + ext));
}
@Override
public InputStream openInputStream() throws IOException
{
try {
synchronized (inputStreamOpener) {
if (streamAcquired) {
return storageObject.getDataInputStream();
}
// lazily promote to full GET
storageObject = s3Client.getObject(s3Obj.getBucketName(), s3Obj.getKey());
final InputStream stream = storageObject.getDataInputStream();
streamAcquired = true;
return stream;
}
}
catch (ServiceException e) {
throw new IOE(e, "Could not load S3 URI [%s]", uri);
}
}
@Override
public OutputStream openOutputStream() throws IOException
{
throw new UOE("Cannot stream S3 output");
}
@Override
public Reader openReader(boolean ignoreEncodingErrors) throws IOException
{
throw new UOE("Cannot open reader");
}
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException
{
throw new UOE("Cannot open character sequence");
}
@Override
public Writer openWriter() throws IOException
{
throw new UOE("Cannot open writer");
}
@Override
public long getLastModified()
{
return s3Obj.getLastModifiedDate().getTime();
}
@Override
public boolean delete()
{
throw new UOE("Cannot delete S3 items anonymously. jetS3t doesn't support authenticated deletes easily.");
}
};
}
public static final String scheme = S3StorageDruidModule.SCHEME;
private static final Logger log = new Logger(S3DataSegmentPuller.class);
protected static final String BUCKET = "bucket";
protected static final String KEY = "key";
protected final RestS3Service s3Client;
@Inject
public S3DataSegmentPuller(
RestS3Service s3Client
)
{
this.s3Client = s3Client;
}
@Override
public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException
{
getSegmentFiles(new S3Coords(segment), outDir);
}
public FileUtils.FileCopyResult getSegmentFiles(final S3Coords s3Coords, final File outDir)
throws SegmentLoadingException
{
log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir);
if (!isObjectInBucket(s3Coords)) {
throw new SegmentLoadingException("IndexFile[%s] does not exist.", s3Coords);
}
try {
org.apache.commons.io.FileUtils.forceMkdir(outDir);
final URI uri = URI.create(StringUtils.format("s3://%s/%s", s3Coords.bucket, s3Coords.path));
final ByteSource byteSource = new ByteSource()
{
@Override
public InputStream openStream() throws IOException
{
try {
return buildFileObject(uri, s3Client).openInputStream();
}
catch (ServiceException e) {
if (e.getCause() != null) {
if (S3Utils.S3RETRY.apply(e)) {
throw new IOException("Recoverable exception", e);
}
}
throw Throwables.propagate(e);
}
}
};
if (CompressionUtils.isZip(s3Coords.path)) {
final FileUtils.FileCopyResult result = CompressionUtils.unzip(
byteSource,
outDir,
S3Utils.S3RETRY,
false
);
log.info("Loaded %d bytes from [%s] to [%s]", result.size(), s3Coords.toString(), outDir.getAbsolutePath());
return result;
}
if (CompressionUtils.isGz(s3Coords.path)) {
final String fname = Files.getNameWithoutExtension(uri.getPath());
final File outFile = new File(outDir, fname);
final FileUtils.FileCopyResult result = CompressionUtils.gunzip(byteSource, outFile, S3Utils.S3RETRY);
log.info("Loaded %d bytes from [%s] to [%s]", result.size(), s3Coords.toString(), outFile.getAbsolutePath());
return result;
}
throw new IAE("Do not know how to load file type at [%s]", uri.toString());
}
catch (Exception e) {
try {
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
}
catch (IOException ioe) {
log.warn(
ioe,
"Failed to remove output directory [%s] for segment pulled from [%s]",
outDir.getAbsolutePath(),
s3Coords.toString()
);
}
throw new SegmentLoadingException(e, e.getMessage());
}
}
public static URI checkURI(URI uri)
{
if (uri.getScheme().equalsIgnoreCase(scheme)) {
uri = URI.create("s3" + uri.toString().substring(scheme.length()));
} else if (!uri.getScheme().equalsIgnoreCase("s3")) {
throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString());
}
return uri;
}
@Override
public InputStream getInputStream(URI uri) throws IOException
{
try {
return buildFileObject(uri, s3Client).openInputStream();
}
catch (ServiceException e) {
throw new IOE(e, "Could not load URI [%s]", uri);
}
}
@Override
public Predicate<Throwable> shouldRetryPredicate()
{
// Yay! smart retries!
return new Predicate<Throwable>()
{
@Override
public boolean apply(Throwable e)
{
if (e == null) {
return false;
}
if (e instanceof ServiceException) {
return S3Utils.isServiceExceptionRecoverable((ServiceException) e);
}
if (S3Utils.S3RETRY.apply(e)) {
return true;
}
// Look all the way down the cause chain, just in case something wraps it deep.
return apply(e.getCause());
}
};
}
/**
* Returns the "version" (aka last modified timestamp) of the URI
*
* @param uri The URI to check the last timestamp
*
* @return The time in ms of the last modification of the URI in String format
*
* @throws IOException
*/
@Override
public String getVersion(URI uri) throws IOException
{
try {
final FileObject object = buildFileObject(uri, s3Client);
return StringUtils.format("%d", object.getLastModified());
}
catch (ServiceException e) {
if (S3Utils.isServiceExceptionRecoverable(e)) {
// The recoverable logic is always true for IOException, so we want to only pass IOException if it is recoverable
throw new IOE(e, "Could not fetch last modified timestamp from URI [%s]", uri);
} else {
throw Throwables.propagate(e);
}
}
}
private boolean isObjectInBucket(final S3Coords coords) throws SegmentLoadingException
{
try {
return S3Utils.retryS3Operation(
new Callable<Boolean>()
{
@Override
public Boolean call() throws Exception
{
return S3Utils.isObjectInBucket(s3Client, coords.bucket, coords.path);
}
}
);
}
catch (S3ServiceException | IOException e) {
throw new SegmentLoadingException(e, "S3 fail! Key[%s]", coords);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
protected static class S3Coords
{
String bucket;
String path;
public S3Coords(URI uri)
{
if (!"s3".equalsIgnoreCase(uri.getScheme())) {
throw new IAE("Unsupported scheme: [%s]", uri.getScheme());
}
bucket = uri.getHost();
String path = uri.getPath();
if (path.startsWith("/")) {
path = path.substring(1);
}
this.path = path;
}
public S3Coords(DataSegment segment)
{
Map<String, Object> loadSpec = segment.getLoadSpec();
bucket = MapUtils.getString(loadSpec, BUCKET);
path = MapUtils.getString(loadSpec, KEY);
if (path.startsWith("/")) {
path = path.substring(1);
}
}
public S3Coords(String bucket, String key)
{
this.bucket = bucket;
this.path = key;
}
@Override
public String toString()
{
return StringUtils.format("s3://%s/%s", bucket, path);
}
}
}
| |
package natlab.tame.mc4.test;
import java.io.File;
import java.io.FilenameFilter;
import java.util.*;
import natlab.FlowAnalysisTestTool;
import natlab.tame.builtin.Builtin;
import natlab.tame.builtin.classprop.ClassPropTool;
import natlab.tame.callgraph.SimpleFunctionCollection;
import natlab.tame.callgraph.StaticFunction;
import natlab.tame.classes.reference.ClassReference;
import natlab.tame.classes.reference.PrimitiveClassReference;
import natlab.tame.valueanalysis.IntraproceduralValueAnalysis;
import natlab.tame.valueanalysis.ValueSet;
import natlab.tame.valueanalysis.aggrvalue.AggrValue;
import natlab.tame.valueanalysis.simplematrix.SimpleMatrixValue;
import natlab.tame.valueanalysis.value.*;
import natlab.toolkits.filehandling.genericFile.GenericFile;
import natlab.toolkits.path.FileEnvironment;
public class Test {
static String testDir = "languages\\Natlab\\src\\natlab\\Static\\mc4\\test\\unit";
private static List<GenericFile> getBenchmarksInDir(File dir){
LinkedList<GenericFile> result = new LinkedList<GenericFile>();
for (File f : dir.listFiles(new FilenameFilter() {
LinkedList<GenericFile> result = new LinkedList<GenericFile>();
public boolean accept(File adir, String name) {
return name.endsWith(".m")
&& (name.toLowerCase().charAt(0) == name.charAt(0))
&& new File(adir,name.replaceFirst(".m", ".xml")).exists();
}
})){
result.add(GenericFile.create(f));
}
return result;
}
public static List<GenericFile> getUnitTestsFiles(){
LinkedList<GenericFile> result = new LinkedList<GenericFile>();
File dir = new File(testDir);
for (File d : dir.listFiles()){
if (d.isDirectory()){
result.addAll(getBenchmarksInDir(d));
for (File d2 : d.listFiles()){
if (d2.isDirectory()){
result.addAll(getBenchmarksInDir(d2));
}
}
}
}
return result;
}
public static ResultValue.XMLResult getExpectedResult(GenericFile testFile){
//read the expected matlab result from xml
GenericFile xmlFile = GenericFile.create(testFile.getPath().replace(".m", ".xml"));
ResultValue.XMLResult matlabResult = ResultValue.readResult(xmlFile);
//System.out.println("matlab result "+matlabResult);
return matlabResult;
}
public static Res<?> runTest(GenericFile testFile){
//load the unit test
FileEnvironment fileEnvironment = new FileEnvironment(testFile);
SimpleFunctionCollection functions = new SimpleFunctionCollection(fileEnvironment);
StaticFunction aFunction = functions.getAsInlinedStaticFunction();
IntraproceduralValueAnalysis<AggrValue<SimpleMatrixValue>> classAnalysis =
new IntraproceduralValueAnalysis<AggrValue<SimpleMatrixValue>>(null,
aFunction,SimpleMatrixValue.FACTORY,
Args.<AggrValue<SimpleMatrixValue>>newInstance(new SimpleMatrixValue(PrimitiveClassReference.DOUBLE)));
try{
FlowAnalysisTestTool test = new FlowAnalysisTestTool(classAnalysis);
String s = test.run(true,true);
//System.out.println(s);
} catch (UnsupportedOperationException e){
System.err.println(e.getMessage());
return Res.<AggrValue<SimpleMatrixValue>>newInstance();
}
System.out.println("test result for "+testFile.getName()+": "+classAnalysis.getResult());
return classAnalysis.getResult();
}
public static class TestResult{
//the analysis should be aware of the following errors
public static HashSet<String> knownErrors = new HashSet<String>(Arrays.asList(new String[]{
"MATLAB:TooManyInputs"
,"MATLAB:UndefinedFunction"
,"MATLAB:minrhs"
//,"MATLAB:mixedClasses" //gets raised when mixing non-scalar
}));
String message = "";
ResultType type;
ResultValue.XMLResult expectedResult;
enum ResultType{
MATCH{{success = true;}},
SUBSET_MATCH{{success = true;}},
RESULT_MISMATCH{{success = false; }},
ANAL_ERR_MATLAB_PASS{{success = false;}},
PASSING_ANAL_PASS_MATLAB_ERR{{success = true;}},
FAILING_ANAL_PASS_MATLAB_ERR{{success = false;}},
ANAL_ERR_MATLAB_ERR{{success = true;}},
ANAL_MULTIPLE_RETURN_VALUES{{success = false;}},
EXCEPTION{{success = false;}},
ANAL_EMPTY_RETURN{{success = false;}};
boolean success;
}
public boolean isSuccess(){ return type.success; }
public TestResult(Res<?> result, ResultValue.XMLResult matlabResult){
expectedResult = matlabResult;
if (result.size() == 0){
type = ResultType.EXCEPTION;
message = type.toString();
} else if (result.size() > 1){
type = ResultType.ANAL_MULTIPLE_RETURN_VALUES;
message = type.toString();
} else {
ValueSet<?> valueSet = result.get(0);
if (valueSet.size() == 0){
type = ResultType.ANAL_EMPTY_RETURN;
message = type.toString();
}
//Value<?> value = valueSet.iterator().next();
else if (!result.isViable()){ //analysis error
if (!matlabResult.success){ //matlab error
type = ResultType.ANAL_ERR_MATLAB_ERR;
} else {
type = ResultType.ANAL_ERR_MATLAB_PASS;
message = ("-a expected:\n"+matlabResult+"\n-received:\n"+valueSet);
}
} else { //analysis pass
if (matlabResult.success == true){ //matlab pass
if (valueSet.contains(matlabResult.resultValue.matlabClass)){
if (valueSet.size() == 1){
type = ResultType.MATCH;
} else {
type = ResultType.SUBSET_MATCH;
}
} else {
type = ResultType.RESULT_MISMATCH;
message = ("-b expected:\n"+matlabResult+"\n-received:\n"+valueSet);
}
} else { //matlab fail
//fail if the errors is among known errors
if (knownErrors.contains(expectedResult.error)){
message = ("-b expected:\n"+matlabResult.error+"-"+matlabResult.message+"\n-received:\n"+valueSet);
type = ResultType.FAILING_ANAL_PASS_MATLAB_ERR;
} else {
type = ResultType.PASSING_ANAL_PASS_MATLAB_ERR;
}
}
}
}
}
}
public static void main(String[] args) {
List<GenericFile> tests = getUnitTestsFiles();
List<Res<?>> results = new LinkedList<Res<?>>();
int failing = 0;
int i = 0;
for (GenericFile testFile : tests){
System.out.println(testFile);
results.add(runTest(testFile));
//if (i++ == 1000) break;
}
//print results
HashMap<String,Integer> resultStats = new HashMap<String,Integer>();
HashMap<String,Integer> passingErrors = new HashMap<String,Integer>();
for (i = 0; i < results.size();i++){
TestResult testResult = new TestResult(results.get(i),getExpectedResult(tests.get(i)));
if (!testResult.isSuccess()){
failing++;
System.out.println("failed: "+tests.get(i));
System.out.println(testResult.message);
} else {
if (testResult.type == TestResult.ResultType.PASSING_ANAL_PASS_MATLAB_ERR){
String k = testResult.expectedResult.error;
if (!passingErrors.containsKey(k)) passingErrors.put(k,0);
passingErrors.put(k,passingErrors.get(k)+1);
}
}
String resultName = testResult.type.toString();
if (!resultStats.containsKey(resultName)) resultStats.put(resultName, 0);
resultStats.put(resultName,resultStats.get(resultName)+1);
}
//print all errors
for (String k : passingErrors.keySet()){
System.out.println(k+" "+passingErrors.get(k));
}
System.out.println("");
//print stats
for (String k : resultStats.keySet()){
System.out.println(k+": "+resultStats.get(k));
}
System.out.println("total: "+results.size()+" (passing "+(results.size()-failing)+"/failing "+failing+")");
}
}
| |
/*
* Copyright 2012-2015 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom;
import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils.COMPLEX_ENDPOINT_RESOURCE_DIR;
import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils.SYNAPSE_CONFIG_DIR;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Date;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.emf.type.core.commands.SetValueCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.SetRequest;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IEditorDescriptor;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorReference;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.FileEditorInput;
import org.wso2.developerstudio.eclipse.artifact.endpoint.validators.EndPointTemplateList;
import org.wso2.developerstudio.eclipse.artifact.sequence.validators.SequenceTemplate;
import org.wso2.developerstudio.eclipse.gmf.esb.ArtifactType;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.FailoverEndPoint;
import org.wso2.developerstudio.eclipse.gmf.esb.LoadBalanceEndPoint;
import org.wso2.developerstudio.eclipse.gmf.esb.ParentEndPoint;
import org.wso2.developerstudio.eclipse.gmf.esb.RecipientListEndPoint;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.OpenEditorUtils;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbEditorInput;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.platform.core.templates.ArtifactTemplate;
import org.wso2.developerstudio.eclipse.platform.ui.editor.Openable;
import org.wso2.developerstudio.eclipse.platform.ui.startup.ESBGraphicalEditor;
public class ComplexFiguredAbstractEndpoint extends AbstractEndpoint{
private static IDeveloperStudioLog log = Logger
.getLog("org.wso2.developerstudio.eclipse.gmf.esb.diagram");
public ComplexFiguredAbstractEndpoint(View view) {
super(view);
}
protected NodeFigure createMainFigure() {
return null;
}
public void openPage(){
final EObject endpoint = (ParentEndPoint) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel())
.getElement();
String name;
long lDateTime = new Date().getTime();
final String endpointName = String.valueOf(lDateTime);
if ((((ParentEndPoint) endpoint).getName() == null)
|| ((ParentEndPoint) endpoint).getName().trim().equals("")) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
SetRequest setRequest = new SetRequest(getEditingDomain(),
endpoint, EsbPackage.eINSTANCE
.getParentEndPoint_Name(), endpointName);
SetValueCommand operation = new SetValueCommand(
setRequest) {
public boolean canUndo() {
return true;
}
public boolean canRedo() {
return true;
}
};
getEditDomain().getCommandStack().execute(
new ICommandProxy(operation));
}
});
name=endpointName;
createFiles(endpoint, name, "complex_endpoint_" + name + ".esb_diagram", "complex_endpoint_"
+ name + ".esb");
} else {
name=((ParentEndPoint) endpoint).getName();
createFiles(endpoint, name, "complex_endpoint_" + name + ".esb_diagram", "complex_endpoint_"
+ name + ".esb");
}
}
public boolean createFiles(EObject endpoint, String name, String fileURI1, String fileURI2) {
/* Resource diagram;
String basePath = "platform:/resource/" + currentProject.getName()
+ "/" + COMPLEX_ENDPOINT_RESOURCE_DIR + "/";*/
IProject currentProject=getActiveProject();
/* IFile file = currentProject.getFile(COMPLEX_ENDPOINT_RESOURCE_DIR + "/"
+ fileURI1);*/
IFile fileTobeOpened = null;
//if (!file.exists()) {
try{
IFolder iFolder = currentProject.getFolder(SYNAPSE_CONFIG_DIR + "/complex-endpoints/");
if (!iFolder.exists()){
iFolder.create(IResource.NONE, true, null);
}
fileTobeOpened = iFolder.getFile(name + ".xml");// currentProject.getFile(SYNAPSE_CONFIG_DIR + "/complex-endpoints/" + name + ".xml");
if (fileTobeOpened.exists()) {
OpenEditorUtils oeUtils = new OpenEditorUtils();
oeUtils.openSeparateEditor(fileTobeOpened);
} else {
String path = fileTobeOpened.getParent().getFullPath() + "/";
ArtifactTemplate complexEndpointArtifactTemplate = null;
if(endpoint instanceof FailoverEndPoint){
complexEndpointArtifactTemplate = EndPointTemplateList.getArtifactTemplates()[4];
}else if(endpoint instanceof LoadBalanceEndPoint){
complexEndpointArtifactTemplate = EndPointTemplateList.getArtifactTemplates()[5];
}else if(endpoint instanceof RecipientListEndPoint){
complexEndpointArtifactTemplate = EndPointTemplateList.getArtifactTemplates()[6];
}
fileTobeOpened.create(complexEndpointArtifactTemplate.getTemplateDataStream(), true,
new NullProgressMonitor());
String source = org.wso2.developerstudio.eclipse.utils.file.FileUtils.getContentAsString(complexEndpointArtifactTemplate.getTemplateDataStream());
source = source.replaceAll("\\{", "<").replaceAll("\\}", ">");
source = StringUtils.replace(source, "<ep.name>", name);
source = MessageFormat.format(source, name);
Openable openable = ESBGraphicalEditor.getOpenable();
openable.editorOpen(fileTobeOpened.getName(), ArtifactType.ENDPOINT.getLiteral(), path, source);
}
}catch (Exception e) {
log.error("Cannot open file " + fileTobeOpened, e);
return false;
}
String path = fileTobeOpened.getParent().getFullPath() + "/";
/* diagram = EsbDiagramEditorUtil.createResource(
URI.createURI(basePath + fileURI1),
URI.createURI(basePath + fileURI2),
new NullProgressMonitor(), "complex_endpoint", name, null);*/
/* String source;
try {
source = FileUtils.readFileToString(fileTobeOpened.getLocation().toFile());
Openable openable = ESBGraphicalEditor.getOpenable();
openable.editorOpen(fileTobeOpened.getName(), ArtifactType.COMPLEX_ENDPOINT.getLiteral(), path, source);
} catch (IOException e1) {
log.error("Error while reading the file : "+fileTobeOpened, e1);
return false;
} catch (Exception e) {
log.error("Error while opening the file : "+fileTobeOpened, e);
return false;
} */
/* try {
EsbDiagramEditorUtil.openDiagram(diagram);
} catch (PartInitException e) {
log.error("Cannot init editor", e);
}*/
return true;
//}
/* else {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
IEditorDescriptor desc = PlatformUI.getWorkbench()
.getEditorRegistry().getDefaultEditor(file.getName());
try {
page.openEditor(new FileEditorInput(file), desc.getId());
} catch (PartInitException e) {
log.error("Cannot init editor", e);
}
return true;
}*/
}
private IProject getActiveProject() {
IEditorPart editorPart = null;
IProject activeProject = null;
IEditorReference editorReferences[] = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage()
.getEditorReferences();
for (int i = 0; i < editorReferences.length; i++) {
IEditorPart editor = editorReferences[i].getEditor(false);
if (editor != null) {
editorPart = editor.getSite().getWorkbenchWindow()
.getActivePage().getActiveEditor();
}
if (editorPart != null) {
EsbEditorInput input = (EsbEditorInput) editorPart
.getEditorInput();
IFile file = input.getXmlResource();
activeProject = file.getProject();
}
}
return activeProject;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.jsoninput;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import junit.framework.ComparisonFailure;
import org.apache.commons.io.IOUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.fileinput.FileInputList;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.logging.LoggingObjectInterface;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaInteger;
import org.pentaho.di.core.row.value.ValueMetaNumber;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.i18n.LanguageChoice;
import org.pentaho.di.trans.step.RowAdapter;
import org.pentaho.di.trans.step.StepErrorMeta;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.steps.mock.StepMockHelper;
public class JsonInputTest {
protected static final String BASE_RAM_DIR = "ram:/jsonInputTest/";
protected StepMockHelper<JsonInputMeta, JsonInputData> helper;
protected static final String getBasicTestJson() {
try {
// Note: Ultimately this would go in src/test/resources but our project is not setup for that yet.
InputStream is = JsonInputTest.class.getResourceAsStream( "/json/sample.json" );
return IOUtils.toString( is );
} catch ( IOException e ) {
throw new RuntimeException( "Unable to read sample JSON file.", e );
}
}
@BeforeClass
public static void init() throws KettleException {
KettleClientEnvironment.init();
}
@Before
public void setUp() {
helper =
new StepMockHelper<JsonInputMeta, JsonInputData>( "json input test", JsonInputMeta.class, JsonInputData.class );
when( helper.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ).thenReturn(
helper.logChannelInterface );
when( helper.trans.isRunning() ).thenReturn( true );
}
@After
public void tearDown() {
helper.cleanUp();
}
@Test
public void testAttrFilter() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$..book[?(@.isbn)].author", new ValueMetaString( "author w/ isbn" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "Herman Melville" },
new Object[] { jsonInputField, "J. R. R. Tolkien" } } );
}
@Test
public void testChildDot() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$.store.bicycle.color", new ValueMetaString( "bcol" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "red" } } );
testSimpleJsonPath( "$.store.bicycle.price", new ValueMetaNumber( "p" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, 19.95 } } );
}
@Test
public void testChildBrackets() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$.['store']['bicycle']['color']", new ValueMetaString( "bcol" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "red" } } );
}
@Test
public void testChildBracketsNDots() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$.['store'].['bicycle'].['color']", new ValueMetaString( "bcol" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "red" } } );
}
@Test
public void testIndex() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$..book[2].title", new ValueMetaString( "title" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "Moby Dick" } } );
}
@Test
public void testIndexFirst() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$..book[:2].category", new ValueMetaString( "category" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, "reference" },
new Object[] { jsonInputField, "fiction" } } );
}
@Test
public void testIndexLastObj() throws Exception {
final String jsonInputField = getBasicTestJson();
JsonInput jsonInput =
createBasicTestJsonInput( "$..book[-1:]", new ValueMetaString( "last book" ), "json",
new Object[] { jsonInputField } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { jsonInputField,
"{ \"category\": \"fiction\",\n"
+ " \"author\": \"J. R. R. Tolkien\",\n"
+ " \"title\": \"The Lord of the Rings\",\n"
+ " \"isbn\": \"0-395-19395-8\",\n"
+ " \"price\": 22.99\n"
+ "}\n" } );
rowComparator.setComparator( 1, new JsonComparison() );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
Assert.assertEquals( 1, jsonInput.getLinesWritten() );
}
@Test
public void testIndexList() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$..book[1,3].price", new ValueMetaNumber( "price" ),
new Object[][] { new Object[] { jsonInputField } },
new Object[][] { new Object[] { jsonInputField, 12.99 },
new Object[] { jsonInputField, 22.99 } } );
}
@Test
public void testSingleField() throws Exception {
JsonInputField isbn = new JsonInputField( "isbn" );
isbn.setPath( "$..book[?(@.isbn)].isbn" );
isbn.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( "json", isbn );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { null, "0-553-21311-3" },
new Object[] { null, "0-395-19395-8" } );
rowComparator.setComparator( 0, null );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 3 );
Assert.assertEquals( "error", 0, jsonInput.getErrors() );
Assert.assertEquals( "lines written", 2, jsonInput.getLinesWritten() );
}
@Test
public void testDualExp() throws Exception {
JsonInputField isbn = new JsonInputField( "isbn" );
isbn.setPath( "$..book[?(@.isbn)].isbn" );
isbn.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField price = new JsonInputField( "price" );
price.setPath( "$..book[?(@.isbn)].price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
JsonInputMeta meta = createSimpleMeta( "json", isbn, price );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { null, "0-553-21311-3", 8.99 },
new Object[] { null, "0-395-19395-8", 22.99 } );
rowComparator.setComparator( 0, null );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 3 );
Assert.assertEquals( "error", 0, jsonInput.getErrors() );
Assert.assertEquals( "lines written", 2, jsonInput.getLinesWritten() );
}
@Test
public void testDualExpMismatchError() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField isbn = new JsonInputField( "isbn" );
isbn.setPath( "$..book[?(@.isbn)].isbn" );
isbn.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField price = new JsonInputField( "price" );
price.setPath( "$..book[*].price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
try ( LocaleChange enUS = new LocaleChange( Locale.US ) ) {
JsonInputMeta meta = createSimpleMeta( "json", isbn, price );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
processRows( jsonInput, 3 );
Assert.assertEquals( "error", 1, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 0, jsonInput.getLinesWritten() );
String errors = IOUtils.toString( new ByteArrayInputStream( out.toByteArray() ), StandardCharsets.UTF_8.name() );
String expectedError =
"The data structure is not the same inside the resource!"
+ " We found 4 values for json path [$..book[*].price],"
+ " which is different that the number returned for path [$..book[?(@.isbn)].isbn] (2 values)."
+ " We MUST have the same number of values for all paths.";
Assert.assertTrue( "expected error", errors.contains( expectedError ) );
}
}
@Test
public void testBadExp() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
try ( LocaleChange enUS = new LocaleChange( Locale.US ) ) {
ValueMetaString outputMeta = new ValueMetaString( "result" );
JsonInputField jpath = new JsonInputField( outputMeta.getName() );
jpath.setPath( "$..fail" );
jpath.setType( outputMeta.getType() );
JsonInputMeta jsonInputMeta = createSimpleMeta( "json", jpath );
jsonInputMeta.setIgnoreMissingPath( false );
JsonInput jsonInput = createJsonInput( "json", jsonInputMeta, new Object[] { getBasicTestJson() } );
processRows( jsonInput, 2 );
Assert.assertEquals( "errors", 1, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 0, jsonInput.getLinesWritten() );
String expectedError = "We can not find any data with path [$..fail]";
String errors = IOUtils.toString( new ByteArrayInputStream( out.toByteArray() ), StandardCharsets.UTF_8.name() );
Assert.assertTrue( "error", errors.contains( expectedError ) );
}
}
@Test
public void testRemoveSourceField() throws Exception {
final String inCol = "json";
JsonInputField jpath = new JsonInputField( "isbn" );
jpath.setPath( "$..book[*].isbn" );
jpath.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( inCol, jpath );
meta.setRemoveSourceField( true );
meta.setIgnoreMissingPath( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { "0-553-21311-3" },
new Object[] { "0-395-19395-8" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 4 );
Assert.assertEquals( "errors", 0, jsonInput.getErrors() );
Assert.assertEquals( "lines written", 2, jsonInput.getLinesWritten() );
}
@Test
public void testRowLimit() throws Exception {
final String inCol = "json";
JsonInputField jpath = new JsonInputField( "isbn" );
jpath.setPath( "$..book[*].isbn" );
jpath.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( inCol, jpath );
meta.setRemoveSourceField( true );
meta.setIgnoreMissingPath( true );
meta.setRowLimit( 2 );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
processRows( jsonInput, 4 );
Assert.assertEquals( "errors", 0, jsonInput.getErrors() );
Assert.assertEquals( "lines written", 2, jsonInput.getLinesWritten() );
}
@Test
public void testSmallDoubles() throws Exception {
// legacy parser handles these but positive exp would read null
for ( String nbr : new String[] { "1e-20", "1.52999996e-20", "2.05E-20" } ) {
final String ibgNbrInput =
"{ \"number\": " + nbr + " }";
testSimpleJsonPath( "$.number", new ValueMetaNumber( "not so big number" ),
new Object[][] { new Object[] { ibgNbrInput } },
new Object[][] { new Object[] { ibgNbrInput, Double.parseDouble( nbr ) } } );
}
}
@Test
public void testJgdArray() throws Exception {
final String input =
" { \"arr\": [ [ { \"a\": 1, \"b\": 1}, { \"a\": 1, \"b\": 2} ], [ {\"a\": 3, \"b\": 4 } ] ] }";
JsonInput jsonInput =
createBasicTestJsonInput( "$.arr", new ValueMetaString( "array" ), "json", new Object[] { input } );
RowComparatorListener rowComparator =
new RowComparatorListener(
new Object[] { input, "[[{\"a\":1,\"b\":1},{\"a\":1,\"b\":2}],[{\"a\":3,\"b\":4}]]" } );
rowComparator.setComparator( 1, new JsonComparison() );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
Assert.assertEquals( 1, jsonInput.getLinesWritten() );
}
@Test
public void testDefaultLeafToNull() throws Exception {
JsonInputField noPath = new JsonInputField( "price" );
noPath.setPath( "$..price" );
noPath.setType( ValueMetaInterface.TYPE_STRING );
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputMeta meta = createSimpleMeta( "json", noPath );
meta.setIgnoreMissingPath( true );
meta.setRemoveSourceField( true );
final String input = getBasicTestJson();
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { input } );
processRows( jsonInput, 8 );
disposeJsonInput( jsonInput );
Assert.assertEquals( 5, jsonInput.getLinesWritten() );
}
@Test
public void testIfIgnorePathDoNotSkipRowIfInputIsNullOrFieldNotFound() throws Exception {
final String input1 = "{ \"value1\": \"1\",\n"
+ " \"value2\": \"2\",\n"
+ "}";
final String input2 = "{ \"value1\": \"3\""
+ "}";
final String input3 = "{ \"value2\": \"4\""
+ "}";
final String input4 = "{ \"value1\": null,\n"
+ " \"value2\": null,\n"
+ "}";
final String input5 = "{}";
final String input6 = null;
final String inCol = "input";
JsonInputField aField = new JsonInputField();
aField.setName( "a" );
aField.setPath( "$.value1" );
aField.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField bField = new JsonInputField();
bField.setName( "b" );
bField.setPath( "$.value2" );
bField.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( inCol, aField, bField );
meta.setIgnoreMissingPath( true );
JsonInput step = createJsonInput( inCol, meta, new Object[] { input1 },
new Object[] { input2 },
new Object[] { input3 },
new Object[] { input4 },
new Object[] { input5 },
new Object[] { input6 }
);
step.addRowListener(
new RowComparatorListener(
new Object[]{ input1, "1", "2" }, new Object[]{ input2, "3", null }, new Object[]{ input3, null, "4" },
new Object[]{ input4, null, null }, new Object[]{ input5, null, null }, new Object[]{ input6, null, null } ) );
processRows( step, 5 );
}
@Test
public void testBfsMatchOrder() throws Exception {
// streaming will be dfs..ref impl is bfs
String input = "{ \"a\": { \"a\" : { \"b\" :2 } , \"b\":1 } }";
JsonInput jsonInput =
createBasicTestJsonInput( "$..a.b", new ValueMetaInteger( "b" ), "in", new Object[] { input } );
RowComparatorListener rowComparator = new RowComparatorListener( jsonInput,
new Object[] { input, 1L },
new Object[] { input, 2L } );
rowComparator.setComparator( 0, null );
processRows( jsonInput, 2 );
Assert.assertEquals( 2, jsonInput.getLinesWritten() );
}
@Test
public void testRepeatFieldSingleObj() throws Exception {
final String input = " { \"items\": [ "
+ "{ \"a\": 1, \"b\": null }, "
+ "{ \"a\":null, \"b\":2 }, "
+ "{ \"a\":3, \"b\":null }, "
+ "{ \"a\":4, \"b\":4 } ] }";
final String inCol = "input";
JsonInputField aField = new JsonInputField();
aField.setName( "a" );
aField.setPath( "$.items[*].a" );
aField.setType( ValueMetaInterface.TYPE_INTEGER );
JsonInputField bField = new JsonInputField();
bField.setName( "b" );
bField.setPath( "$.items[*].b" );
bField.setType( ValueMetaInterface.TYPE_INTEGER );
bField.setRepeated( true );
JsonInputMeta meta = createSimpleMeta( inCol, aField, bField );
meta.setIgnoreMissingPath( true );
JsonInput step = createJsonInput( inCol, meta, new Object[] { input } );
step.addRowListener(
new RowComparatorListener(
new Object[] { input, 1L, null },
new Object[] { input, null, 2L },
new Object[] { input, 3L, 2L },
new Object[] { input, 4L, 4L } ) );
processRows( step, 4 );
Assert.assertEquals( 4, step.getLinesWritten() );
}
@Test
public void testPathMissingIgnore() throws Exception {
final String input = "{ \"value1\": \"1\",\n"
+ " \"value2\": \"2\",\n"
+ "}";
final String inCol = "input";
JsonInputField aField = new JsonInputField();
aField.setName( "a" );
aField.setPath( "$.value1" );
aField.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField bField = new JsonInputField();
bField.setName( "b" );
bField.setPath( "$.value2" );
bField.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField cField = new JsonInputField();
cField.setName( "c" );
cField.setPath( "$.notexistpath.value3" );
cField.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( inCol, aField, bField, cField );
meta.setIgnoreMissingPath( true );
JsonInput step = createJsonInput( inCol, meta, new Object[] { input } );
step.addRowListener(
new RowComparatorListener(
new Object[] { input, "1", "2", null } ) );
processRows( step, 1 );
Assert.assertEquals( 1, step.getLinesWritten() );
}
/**
* PDI-10384 Huge numbers causing exception in JSON input step<br>
*/
@Test
public void testLargeDoubles() throws Exception {
// legacy mode yields null for these
for ( String nbr : new String[] { "1e20", "2.05E20", "1.52999996e20" } ) {
final String ibgNbrInput =
"{ \"number\": " + nbr + " }";
testSimpleJsonPath( "$.number", new ValueMetaNumber( "not so big number" ),
new Object[][] { new Object[] { ibgNbrInput } },
new Object[][] { new Object[] { ibgNbrInput, Double.parseDouble( nbr ) } } );
}
}
@Test
public void testNullProp() throws Exception {
final String input = "{ \"obj\": [ { \"nval\": null, \"val\": 2 }, { \"val\": 1 } ] }";
JsonInput jsonInput =
createBasicTestJsonInput( "$.obj[?(@.nval)].val", new ValueMetaString( "obj" ), "json", new Object[] { input } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { input, "2" } );
rowComparator.setComparator( 1, new JsonComparison() );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
// in jsonpath 2.0->2.1, null value properties started being counted as existing
Assert.assertEquals( 1, jsonInput.getLinesWritten() );
}
@Test
public void testDualExpMismatchPathLeafToNull() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField isbn = new JsonInputField( "isbn" );
isbn.setPath( "$..book[*].isbn" );
isbn.setType( ValueMetaInterface.TYPE_STRING );
JsonInputField price = new JsonInputField( "price" );
price.setPath( "$..book[*].price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
JsonInputMeta meta = createSimpleMeta( "json", isbn, price );
meta.setIgnoreMissingPath( true );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { null, 8.95d },
new Object[] { null, 12.99d },
new Object[] { "0-553-21311-3", 8.99d },
new Object[] { "0-395-19395-8", 22.99d } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 5 );
Assert.assertEquals( out.toString(), 0, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 4, jsonInput.getLinesWritten() );
}
@Test
public void testSingleObjPred() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField bic = new JsonInputField( "color" );
bic.setPath( "$.store.bicycle[?(@.price)].color" );
bic.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( "json", bic );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { "red" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
Assert.assertEquals( out.toString(), 0, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 1, jsonInput.getLinesWritten() );
}
@Test
public void testArrayOut() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField byc = new JsonInputField( "books (array)" );
byc.setPath( "$.store.book" );
byc.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( "json", byc );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] {
"[{\"category\":\"reference\",\"author\":\"Nigel Rees\",\"title\":\"Sayings of the Century\",\"price\":8.95},"
+ "{\"category\":\"fiction\",\"author\":\"Evelyn Waugh\",\"title\":\"Sword of Honour\",\"price\":12.99},"
+ "{\"category\":\"fiction\",\"author\":\"Herman Melville\",\"title\":\"Moby Dick\","
+ "\"isbn\":\"0-553-21311-3\",\"price\":8.99},{\"category\":\"fiction\",\"author\":\"J. R. R. Tolkien\","
+ "\"title\":\"The Lord of the Rings\",\"isbn\":\"0-395-19395-8\",\"price\":22.99}]" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
Assert.assertEquals( out.toString(), 0, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 1, jsonInput.getLinesWritten() );
}
@Test
public void testObjectOut() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField bic = new JsonInputField( "the bicycle (obj)" );
bic.setPath( "$.store.bicycle" );
bic.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( "json", bic );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { "{\"color\":\"red\",\"price\":19.95}" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
Assert.assertEquals( out.toString(), 0, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 1, jsonInput.getLinesWritten() );
}
@Test
public void testBicycleAsterisk() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField byc = new JsonInputField( "badger" );
byc.setPath( "$.store.bicycle[*]" );
byc.setType( ValueMetaInterface.TYPE_STRING );
JsonInputMeta meta = createSimpleMeta( "json", byc );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { getBasicTestJson() } );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { "red" },
new Object[] { "19.95" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 2 );
assertEquals( out.toString(), 0, jsonInput.getErrors() );
assertEquals( "rows written", 2, jsonInput.getLinesWritten() );
}
@Test
public void testNullInputs() throws Exception {
final String jsonInputField = getBasicTestJson();
testSimpleJsonPath( "$..book[?(@.isbn)].author", new ValueMetaString( "author w/ isbn" ),
new Object[][] {
new Object[] { null },
new Object[] { jsonInputField },
new Object[] { null } },
new Object[][] {
new Object[] { null, null },
new Object[] { jsonInputField, "Herman Melville" },
new Object[] { jsonInputField, "J. R. R. Tolkien" },
new Object[] { null, null }
} );
}
/**
* File tests
*/
@Test
public void testNullFileList() throws Exception {
ByteArrayOutputStream err = new ByteArrayOutputStream();
helper.redirectLog( err, LogLevel.ERROR );
try {
JsonInputField price = new JsonInputField();
price.setName( "price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
price.setPath( "$..book[*].price" );
List<FileObject> fileList = Arrays.asList( null, null );
JsonInputMeta meta = createFileListMeta( fileList );
meta.setInputFields( new JsonInputField[] { price } );
meta.setIncludeRowNumber( true );
meta.setRowNumberField( "rownbr" );
meta.setShortFileNameField( "fname" );
JsonInput jsonInput = createJsonInput( meta );
processRows( jsonInput, 5 );
disposeJsonInput( jsonInput );
assertEquals( err.toString(), 2, jsonInput.getErrors() );
} finally {
deleteFiles();
}
}
@Test
public void testFileList() throws Exception {
ByteArrayOutputStream err = new ByteArrayOutputStream();
helper.redirectLog( err, LogLevel.ERROR );
final String input1 = getBasicTestJson();
final String input2 = "{ \"store\": { \"book\": [ { \"price\": 9.99 } ] } }";
try ( FileObject fileObj1 = KettleVFS.getFileObject( BASE_RAM_DIR + "test1.json" );
FileObject fileObj2 = KettleVFS.getFileObject( BASE_RAM_DIR + "test2.json" ) ) {
try ( OutputStream out = fileObj1.getContent().getOutputStream() ) {
out.write( input1.getBytes() );
}
try ( OutputStream out = fileObj2.getContent().getOutputStream() ) {
out.write( input2.getBytes() );
}
JsonInputField price = new JsonInputField();
price.setName( "price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
price.setPath( "$..book[*].price" );
List<FileObject> fileList = Arrays.asList( fileObj1, fileObj2 );
JsonInputMeta meta = createFileListMeta( fileList );
meta.setInputFields( new JsonInputField[] { price } );
meta.setIncludeRowNumber( true );
meta.setRowNumberField( "rownbr" );
meta.setShortFileNameField( "fname" );
JsonInput jsonInput = createJsonInput( meta );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { 8.95d, 1L, "test1.json" },
new Object[] { 12.99d, 2L, "test1.json" },
new Object[] { 8.99d, 3L, "test1.json" },
new Object[] { 22.99d, 4L, "test1.json" },
new Object[] { 9.99d, 5L, "test2.json" } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 5 );
disposeJsonInput( jsonInput );
assertEquals( err.toString(), 0, jsonInput.getErrors() );
} finally {
deleteFiles();
}
}
@Test
public void testNoFilesInListError() throws Exception {
ByteArrayOutputStream err = new ByteArrayOutputStream();
helper.redirectLog( err, LogLevel.ERROR );
JsonInputMeta meta = createFileListMeta( Collections.<FileObject>emptyList() );
meta.setDoNotFailIfNoFile( false );
JsonInputField price = new JsonInputField();
price.setName( "price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
price.setPath( "$..book[*].price" );
meta.setInputFields( new JsonInputField[] { price } );
try ( LocaleChange enUS = new LocaleChange( Locale.US ) ) {
JsonInput jsonInput = createJsonInput( meta );
processRows( jsonInput, 1 );
}
String errMsgs = err.toString();
assertTrue( errMsgs, errMsgs.contains( "No file(s) specified!" ) );
}
@Test
public void testZipFileInput() throws Exception {
ByteArrayOutputStream err = new ByteArrayOutputStream();
helper.redirectLog( err, LogLevel.ERROR );
final String input = getBasicTestJson();
try ( FileObject fileObj = KettleVFS.getFileObject( BASE_RAM_DIR + "test.zip" ) ) {
fileObj.createFile();
try ( OutputStream out = fileObj.getContent().getOutputStream() ) {
try ( ZipOutputStream zipOut = new ZipOutputStream( out ) ) {
ZipEntry jsonFile = new ZipEntry( "test.json" );
zipOut.putNextEntry( jsonFile );
zipOut.write( input.getBytes() );
zipOut.closeEntry();
zipOut.flush();
}
}
JsonInputField price = new JsonInputField();
price.setName( "price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
price.setPath( "$..book[*].price" );
JsonInputMeta meta = createSimpleMeta( "in file", price );
meta.setIsAFile( true );
meta.setRemoveSourceField( true );
JsonInput jsonInput = createJsonInput( "in file", meta, new Object[][] {
new Object[] { "zip:" + BASE_RAM_DIR + "test.zip!/test.json" }
} );
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { 8.95d },
new Object[] { 12.99d },
new Object[] { 8.99d },
new Object[] { 22.99d } );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 5 );
Assert.assertEquals( err.toString(), 0, jsonInput.getErrors() );
} finally {
deleteFiles();
}
}
@Test
public void testExtraFileFields() throws Exception {
ByteArrayOutputStream err = new ByteArrayOutputStream();
helper.redirectLog( err, LogLevel.ERROR );
final String input1 = getBasicTestJson();
final String input2 = "{ \"store\": { \"bicycle\": { \"color\": \"blue\" } } }";
final String path1 = BASE_RAM_DIR + "test1.json";
final String path2 = BASE_RAM_DIR + "test2.js";
try ( FileObject fileObj1 = KettleVFS.getFileObject( path1 );
FileObject fileObj2 = KettleVFS.getFileObject( path2 ) ) {
try ( OutputStream out = fileObj1.getContent().getOutputStream() ) {
out.write( input1.getBytes() );
}
try ( OutputStream out = fileObj2.getContent().getOutputStream() ) {
out.write( input2.getBytes() );
}
JsonInputField color = new JsonInputField();
color.setName( "color" );
color.setType( ValueMetaInterface.TYPE_STRING );
color.setPath( "$.store.bicycle.color" );
JsonInputMeta meta = createSimpleMeta( "in file", color );
meta.setInFields( true );
meta.setIsAFile( true );
meta.setRemoveSourceField( true );
meta.setExtensionField( "extension" );
meta.setPathField( "dir path" );
meta.setSizeField( "size" );
meta.setIsHiddenField( "hidden?" );
meta.setLastModificationDateField( "last modified" );
meta.setUriField( "URI" );
meta.setRootUriField( "root URI" );
// custom checkers for size and last modified
RowComparatorListener rowComparator = new RowComparatorListener(
new Object[] { "red",
"json", "ram:///jsonInputTest", -1L, false, new Date( 0 ), "ram:///jsonInputTest/test1.json", "ram:///" },
new Object[] { "blue",
"js", "ram:///jsonInputTest", -1L, false, new Date( 0 ), "ram:///jsonInputTest/test2.js", "ram:///" } );
rowComparator.setComparator( 3, new RowComparatorListener.Comparison<Object>() {
@Override
public boolean equals( Object expected, Object actual ) throws Exception {
// just want a valid size
return ( (long) actual ) > 0L;
}
} );
rowComparator.setComparator( 5, new RowComparatorListener.Comparison<Object>() {
@Override
public boolean equals( Object expected, Object actual ) throws Exception {
return ( (Date) actual ).after( new Date( 0 ) );
}
} );
JsonInput jsonInput = createJsonInput( "in file", meta, new Object[][] {
new Object[] { path1 },
new Object[] { path2 }
} );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, 3 );
Assert.assertEquals( err.toString(), 0, jsonInput.getErrors() );
} finally {
deleteFiles();
}
}
@Test
public void testZeroSizeFile() throws Exception {
ByteArrayOutputStream log = new ByteArrayOutputStream();
helper.redirectLog( log, LogLevel.BASIC );
try ( FileObject fileObj = KettleVFS.getFileObject( BASE_RAM_DIR + "test.json" );
LocaleChange enUs = new LocaleChange( Locale.US ); ) {
fileObj.createFile();
JsonInputField price = new JsonInputField();
price.setName( "price" );
price.setType( ValueMetaInterface.TYPE_NUMBER );
price.setPath( "$..book[*].price" );
JsonInputMeta meta = createSimpleMeta( "in file", price );
meta.setIsAFile( true );
meta.setRemoveSourceField( true );
meta.setIgnoreEmptyFile( false );
JsonInput jsonInput = createJsonInput( "in file", meta, new Object[][] {
new Object[] { BASE_RAM_DIR + "test.json" }
} );
processRows( jsonInput, 1 );
String logMsgs = log.toString();
assertTrue( logMsgs, logMsgs.contains( "is empty!" ) );
} finally {
deleteFiles();
}
}
/**
* PDI-13859
*/
@Test
public void testBracketEscape() throws Exception {
String input = "{\"a\":1,\"b(1)\":2}";
testSimpleJsonPath( "$.['b(1)']", new ValueMetaInteger( "b(1)" ),
new Object[][] { new Object[] { input } },
new Object[][] { new Object[] { input, 2L } } );
}
@Test
public void testBadInput() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
helper.redirectLog( out, LogLevel.ERROR );
JsonInputField isbn = new JsonInputField( "isbn" );
isbn.setPath( "$..book[?(@.isbn)].isbn" );
isbn.setType( ValueMetaInterface.TYPE_STRING );
String input = "{{";
try ( LocaleChange enUS = new LocaleChange( Locale.US ) ) {
JsonInputMeta meta = createSimpleMeta( "json", isbn );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { input } );
processRows( jsonInput, 3 );
Assert.assertEquals( "error", 1, jsonInput.getErrors() );
Assert.assertEquals( "rows written", 0, jsonInput.getLinesWritten() );
String errors = IOUtils.toString( new ByteArrayInputStream( out.toByteArray() ), StandardCharsets.UTF_8.name() );
Assert.assertTrue( "expected error", errors.contains( "Error parsing string" ) );
}
}
@Test
public void testErrorRedirect() throws Exception {
JsonInputField field = new JsonInputField( "value" );
field.setPath( "$.value" );
field.setType( ValueMetaInterface.TYPE_STRING );
String input1 = "{{";
String input2 = "{ \"value\": \"ok\" }";
JsonInputMeta meta = createSimpleMeta( "json", field );
meta.setRemoveSourceField( true );
when( helper.stepMeta.isDoingErrorHandling() ).thenReturn( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { input1 }, new Object[] { input2 } );
StepErrorMeta errMeta = new StepErrorMeta( jsonInput, helper.stepMeta );
errMeta.setEnabled( true );
errMeta.setErrorFieldsValuename( "err field" );
when( helper.stepMeta.getStepErrorMeta() ).thenReturn( errMeta );
final List<Object[]> errorLines = new ArrayList<>();
jsonInput.addRowListener( new RowComparatorListener( new Object[] { "ok" } ) {
@Override
public void errorRowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException {
errorLines.add( row );
}
} );
processRows( jsonInput, 3 );
Assert.assertEquals( "fwd error", 1, errorLines.size() );
Assert.assertEquals( "input in err line", input1, errorLines.get( 0 )[ 0 ] );
Assert.assertEquals( "rows written", 1, jsonInput.getLinesWritten() );
}
@Test
public void testUrlInput() throws Exception {
JsonInputField field = new JsonInputField( "value" );
field.setPath( "$.value" );
field.setType( ValueMetaInterface.TYPE_STRING );
String input1 = "http://localhost/test.json";
JsonInputMeta meta = createSimpleMeta( "json", field );
meta.setReadUrl( true );
JsonInput jsonInput = createJsonInput( "json", meta, new Object[] { input1 } );
processRows( jsonInput, 3 );
Assert.assertEquals( 1, jsonInput.getErrors() );
}
protected JsonInputMeta createSimpleMeta( String inputColumn, JsonInputField... jsonPathFields ) {
JsonInputMeta jsonInputMeta = new JsonInputMeta();
jsonInputMeta.setDefault();
jsonInputMeta.setInFields( true );
jsonInputMeta.setFieldValue( inputColumn );
jsonInputMeta.setInputFields( jsonPathFields );
jsonInputMeta.setIgnoreMissingPath( true );
return jsonInputMeta;
}
private void deleteFiles() throws FileSystemException, KettleFileException {
try ( FileObject baseDir = KettleVFS.getFileObject( BASE_RAM_DIR ) ) {
baseDir.deleteAll();
}
}
protected JsonInput createJsonInput( JsonInputMeta meta ) {
JsonInputData data = new JsonInputData();
JsonInput jsonInput = new JsonInput( helper.stepMeta, helper.stepDataInterface, 0, helper.transMeta, helper.trans );
jsonInput.init( meta, data );
return jsonInput;
}
protected void disposeJsonInput( JsonInput jsonInput ) {
jsonInput.dispose( null, helper.stepDataInterface );
}
protected JsonInputMeta createFileListMeta( final List<FileObject> files ) {
JsonInputMeta meta = new JsonInputMeta() {
@Override
public FileInputList getFileInputList( VariableSpace space ) {
return new FileInputList() {
@Override
public List<FileObject> getFiles() {
return files;
}
@Override
public int nrOfFiles() {
return files.size();
}
};
}
};
meta.setDefault();
meta.setInFields( false );
meta.setIgnoreMissingPath( false );
return meta;
}
protected void testSimpleJsonPath( String jsonPath,
ValueMetaInterface outputMeta,
Object[][] inputRows, Object[][] outputRows ) throws Exception {
final String inCol = "in";
JsonInput jsonInput = createBasicTestJsonInput( jsonPath, outputMeta, inCol, inputRows );
RowComparatorListener rowComparator = new RowComparatorListener( outputRows );
jsonInput.addRowListener( rowComparator );
processRows( jsonInput, outputRows.length + 1 );
Assert.assertEquals( "rows written", outputRows.length, jsonInput.getLinesWritten() );
Assert.assertEquals( "errors", 0, jsonInput.getErrors() );
}
protected void processRows( StepInterface step, final int maxCalls ) throws Exception {
for ( int outRowIdx = 0; outRowIdx < maxCalls; outRowIdx++ ) {
if ( !step.processRow( helper.processRowsStepMetaInterface, helper.processRowsStepDataInterface ) ) {
break;
}
}
}
protected JsonInput createBasicTestJsonInput( String jsonPath, ValueMetaInterface outputMeta, final String inCol,
Object[]... inputRows ) {
JsonInputField jpath = new JsonInputField( outputMeta.getName() );
jpath.setPath( jsonPath );
jpath.setType( outputMeta.getType() );
JsonInputMeta meta = createSimpleMeta( inCol, jpath );
return createJsonInput( inCol, meta, inputRows );
}
protected JsonInput createJsonInput( final String inCol, JsonInputMeta meta, Object[]... inputRows ) {
return createJsonInput( inCol, meta, null, inputRows );
}
protected JsonInput createJsonInput( final String inCol, JsonInputMeta meta, VariableSpace variables, Object[]... inputRows ) {
JsonInputData data = new JsonInputData();
JsonInput jsonInput = new JsonInput( helper.stepMeta, helper.stepDataInterface, 0, helper.transMeta, helper.trans );
RowSet input = helper.getMockInputRowSet( inputRows );
RowMetaInterface rowMeta = createRowMeta( new ValueMetaString( inCol ) );
input.setRowMeta( rowMeta );
jsonInput.getInputRowSets().add( input );
jsonInput.setInputRowMeta( rowMeta );
jsonInput.initializeVariablesFrom( variables );
jsonInput.init( meta, data );
return jsonInput;
}
protected static class RowComparatorListener extends RowAdapter {
Object[][] data;
int rowNbr = 0;
private Map<Integer, Comparison<Object>> comparators = new HashMap<>();
public RowComparatorListener( Object[]... data ) {
this.data = data;
}
public RowComparatorListener( StepInterface step, Object[]... data ) {
this.data = data;
step.addRowListener( this );
}
/**
* @param colIdx
* @param comparator
*/
public void setComparator( int colIdx, Comparison<Object> comparator ) {
comparators.put( colIdx, comparator );
}
@Override
public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException {
if ( rowNbr >= data.length ) {
throw new ComparisonFailure( "too many output rows", "" + data.length, "" + rowNbr + 1 );
} else {
for ( int i = 0; i < data[ rowNbr ].length; i++ ) {
try {
boolean eq = true;
if ( comparators.containsKey( i ) ) {
Comparison<Object> comp = comparators.get( i );
if ( comp != null ) {
eq = comp.equals( data[ rowNbr ][ i ], row[ i ] );
}
} else {
ValueMetaInterface valueMeta = rowMeta.getValueMeta( i );
eq = valueMeta.compare( data[ rowNbr ][ i ], row[ i ] ) == 0;
}
if ( !eq ) {
throw new ComparisonFailure( String.format( "Mismatch row %d, column %d", rowNbr, i ), rowMeta
.getString( data[ rowNbr ] ), rowMeta.getString( row ) );
}
} catch ( Exception e ) {
throw new AssertionError( String.format( "Value type at row %d, column %d", rowNbr, i ), e );
}
}
rowNbr++;
}
}
protected static interface Comparison<T> {
public boolean equals( T expected, T actual ) throws Exception;
}
}
protected static class JsonComparison implements RowComparatorListener.Comparison<Object> {
@Override
public boolean equals( Object expected, Object actual ) throws Exception {
return jsonEquals( (String) expected, (String) actual );
}
}
protected static class LocaleChange implements AutoCloseable {
private Locale original;
public LocaleChange( Locale newLocale ) {
original = LanguageChoice.getInstance().getDefaultLocale();
LanguageChoice.getInstance().setDefaultLocale( newLocale );
}
@Override
public void close() throws Exception {
LanguageChoice.getInstance().setDefaultLocale( original );
}
}
/**
* compare json (deep equals ignoring order)
*/
protected static final boolean jsonEquals( String json1, String json2 ) throws Exception {
ObjectMapper om = new ObjectMapper();
JsonNode parsedJson1 = om.readTree( json1 );
JsonNode parsedJson2 = om.readTree( json2 );
return parsedJson1.equals( parsedJson2 );
}
protected static RowMetaInterface createRowMeta( ValueMetaInterface... valueMetas ) {
RowMeta rowMeta = new RowMeta();
rowMeta.setValueMetaList( Arrays.asList( valueMetas ) );
return rowMeta;
}
@Test
public void testJsonInputMetaInputFieldsNotOverwritten() throws Exception {
JsonInputField inputField = new JsonInputField();
final String PATH = "$..book[?(@.category=='${category}')].price";
inputField.setPath( PATH );
inputField.setType( ValueMetaInterface.TYPE_STRING );
final JsonInputMeta inputMeta = createSimpleMeta( "json", inputField );
VariableSpace variables = new Variables();
variables.setVariable( "category", "fiction" );
JsonInput jsonInput = createJsonInput( "json", inputMeta, variables, new Object[] { getBasicTestJson() } );
processRows( jsonInput, 2 );
assertEquals( "Meta input fields paths should be the same after processRows", PATH, inputMeta.getInputFields()[0].getPath() );
}
}
| |
/*
Copyright (c) 2000-2012 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.medknow;
import java.io.InputStream;
import java.util.regex.Pattern;
import org.lockss.config.ConfigManager;
import org.lockss.config.Configuration;
import org.lockss.daemon.ConfigParamDescr;
import org.lockss.extractor.ArticleMetadataExtractor;
import org.lockss.extractor.FileMetadataExtractor;
import org.lockss.extractor.MetadataTarget;
import org.lockss.plugin.*;
import org.lockss.plugin.simulated.SimulatedArchivalUnit;
import org.lockss.plugin.simulated.SimulatedContentGenerator;
import org.lockss.test.*;
import org.lockss.util.*;
import java.util.Iterator;
/*
* Stores sample article URLs with different format: abstract, full text HTML
* and full text PDF.
*
* Issue table of content:
* http://www.medknowarticleiteratortest.org/showBackIssue.asp?issn=0189-6725;year=2012;volume=9;issue=1
*
* Articles:
* Abstract - http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=0
* Full-text HTML - http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe
* Full-text PDF - http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=2
* Full-text Mobile- http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=3
* Full-text EPUB- http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=4
*/
public class TestMedknowArticleIteratorFactory extends ArticleIteratorTestCase {
private SimulatedArchivalUnit sau; // Simulated AU to generate content
private final String PLUGIN_NAME = "org.lockss.plugin.medknow.MedknowPlugin";
static final String BASE_URL_KEY = ConfigParamDescr.BASE_URL.getKey();
static final String JOURNAL_ISSN_KEY = ConfigParamDescr.JOURNAL_ISSN.getKey();
static final String YEAR_KEY = ConfigParamDescr.YEAR.getKey();
static final String VOLUME_NAME_KEY = ConfigParamDescr.VOLUME_NAME.getKey();
private final String BASE_URL = "http://www.afrjpaedsurg.org/";
private final String JOURNAL_ISSN = "0189-6725";
private final String VOLUME_NAME = "9";
private final String YEAR = "2012";
private static final int DEFAULT_FILESIZE = 3000;
private final String EXPECTED_ABS_URL = "http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=0";
private final String EXPECTED_PDF_URL = "http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=2";
protected String cuRole = null;
ArticleMetadataExtractor.Emitter emitter;
protected boolean emitDefaultIfNone = false;
FileMetadataExtractor me = null;
MetadataTarget target;
public void setUp() throws Exception {
super.setUp();
String tempDirPath = setUpDiskSpace();
au = createAu();
sau = PluginTestUtil.createAndStartSimAu(simAuConfig(tempDirPath));
}
public void tearDown() throws Exception {
sau.deleteContentTree();
super.tearDown();
}
protected ArchivalUnit createAu() throws ArchivalUnit.ConfigurationException {
return
PluginTestUtil.createAndStartAu(PLUGIN_NAME, MedknowAuConfig());
}
Configuration simAuConfig(String rootPath) {
Configuration conf = ConfigManager.newConfiguration();
conf.put("root", rootPath);
conf.put("base_url", "http://www.afrjpaedsurg.org/");
conf.put("depth", "1");
conf.put("branch", "4");
conf.put("numFiles", "7");
conf.put("fileTypes",
"" + (SimulatedContentGenerator.FILE_TYPE_PDF)
+ (SimulatedContentGenerator.FILE_TYPE_HTML)
+ (SimulatedContentGenerator.FILE_TYPE_TXT));
conf.put("binFileSize", "" + DEFAULT_FILESIZE);
return conf;
}
// Set configuration attributes to create plugin AU (archival unit)
Configuration MedknowAuConfig() {
Configuration conf = ConfigManager.newConfiguration();
conf.put(BASE_URL_KEY, BASE_URL);
conf.put(JOURNAL_ISSN_KEY, JOURNAL_ISSN);
conf.put(VOLUME_NAME_KEY, VOLUME_NAME);
conf.put(YEAR_KEY, YEAR);
return conf;
}
public void testRoots() throws Exception {
SubTreeArticleIterator artIter = createSubTreeIter();
assertEquals(ListUtil.list("http://www.afrjpaedsurg.org/"),
getRootUrls(artIter));
}
public void testUrlsWithPrefixes() throws Exception {
SubTreeArticleIterator artIter = createSubTreeIter();
Pattern pat = getPattern(artIter);
assertNotMatchesRE(pat, "http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=wrong");
assertMatchesRE(pat, "http://www.afrjpaedsurg.org/article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=0");
assertNotMatchesRE(pat, "http://www.example.com/content/");
assertNotMatchesRE(pat, "http://www.example.com/content/j");
assertNotMatchesRE(pat, "http://www.example.com/content/j0123/j383.pdfwrong");
}
public void testCreateArticleFiles() throws Exception {
PluginTestUtil.crawlSimAu(sau);
// create urls to store in UrlCacher
String[] urls = { BASE_URL + "article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=0",
BASE_URL + "article.asp?issn=0189-6725;year=2012;volume=9;issue=1;spage=3;epage=7;aulast=Ibekwe;type=2" };
// get cached url content type and properties from simulated contents
// for UrclCacher.storeContent()
CachedUrl cuAbs = null;
CachedUrl cuPdf = null;
for (CachedUrl cu : AuUtil.getCuIterable(sau)) {
if (cuPdf == null
&& cu.getContentType().toLowerCase().startsWith(Constants.MIME_TYPE_PDF)) {
//log.info("pdf contenttype: " + cu.getContentType());
cuPdf = cu;
} else if (cuAbs == null
&& cu.getContentType().toLowerCase().startsWith(Constants.MIME_TYPE_HTML)) {
// log.info("abs html contenttype: " + cu.getContentType());
cuAbs = cu;
}
if (cuPdf != null && cuAbs != null) {
break;
}
}
// store content using cached url content type and properties
UrlCacher uc;
for (String url : urls) {
//log.info("url: " + url);
InputStream input = null;
CIProperties props = null;
if (url.contains("type=0")) {
input = cuAbs.getUnfilteredInputStream();
props = cuAbs.getProperties();
} else if (url.contains("type=2")) {
input = cuPdf.getUnfilteredInputStream();
props = cuPdf.getProperties();
}
UrlData ud = new UrlData(input, props, url);
uc = au.makeUrlCacher(ud);
uc.storeContent();
}
// get article iterator, get article files and the appropriate urls according
// to their roles.
String [] expectedUrls = { EXPECTED_ABS_URL,
EXPECTED_PDF_URL };
for (SubTreeArticleIterator artIter = createSubTreeIter(); artIter.hasNext(); ) {
ArticleFiles af = artIter.next();
String[] actualUrls = { af.getRoleUrl(ArticleFiles.ROLE_ABSTRACT),
af.getRoleUrl(ArticleFiles.ROLE_FULL_TEXT_PDF) };
//log.info("actualUrls: " + actualUrls.length);
for (int i = 0;i< actualUrls.length; i++) {
//log.info("expected url: " + expectedUrls[i]);
//log.info(" actual url: " + actualUrls[i]);
assertEquals(expectedUrls[i], actualUrls[i]);
}
}
}
}
| |
package com.axelby.gpodder;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Vector;
import javax.net.ssl.HttpsURLConnection;
import net.iHarder.Base64;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
public class Client {
private static class Config {
public String mygpo = "https://gpodder.net/";
public String mygpo_feedservice = "https://mygpo-feedservice.appspot.com/";
public long update_timeout = 604800L;
}
private static Config _config;
private static Calendar _configRefresh = null;
public static void verifyCurrentConfig() {
if (_configRefresh == null || _configRefresh.before(new GregorianCalendar())) {
_config = retrieveGPodderConfig();
// do NOT use basic auth over HTTP without SSL
if (_config.mygpo.startsWith("http://"))
_config.mygpo = "https://" + _config.mygpo.substring(7);
if (_config.mygpo_feedservice.startsWith("http://"))
_config.mygpo_feedservice = "https://" + _config.mygpo_feedservice.substring(7);
_configRefresh = new GregorianCalendar();
_configRefresh.add(Calendar.MILLISECOND, (int) _config.update_timeout);
}
}
private static Config retrieveGPodderConfig() {
Config config = new Config();
try {
URL url = new URL("http://gpodder.net/clientconfig.json");
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
conn.addRequestProperty("User-Agent", "gpodder_provider/1.4");
JsonReader reader = new JsonReader(new InputStreamReader(conn.getInputStream()));
reader.beginObject();
// get mygpo
reader.nextName(); // should be mygpo
reader.beginObject();
reader.nextName(); // should be baseurl
config.mygpo = reader.nextString();
reader.endObject();
// get mygpo-feedservice
reader.nextName(); // should be mygpo-feedservice
reader.beginObject();
reader.nextName(); // should be baseurl
config.mygpo_feedservice = reader.nextString();
reader.endObject();
// get update_timeout
reader.nextName();
config.update_timeout = reader.nextLong();
reader.endObject();
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
return config;
}
private Context _context;
private String _username;
private String _password;
private String _sessionId;
public Client(Context context, String username, String password) {
_context = context;
_username = username;
_password = password;
}
private void writePost(HttpsURLConnection conn, String toPost)
throws IOException {
conn.setDoOutput(true);
OutputStream output = null;
try {
output = conn.getOutputStream();
output.write(toPost.getBytes());
} finally {
if (output != null) try { output.close(); } catch (IOException logOrIgnore) {}
}
}
public HttpsURLConnection createConnection(URL url) throws IOException, Exception {
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
if (_sessionId == null) {
// basic authentication
String toBase64 = _username + ":" + _password;
conn.addRequestProperty("Authorization", "basic " + new String(Base64.encodeBytes(toBase64.getBytes())));
} else {
conn.addRequestProperty("Cookie", "sessionid=" + _sessionId);
}
conn.addRequestProperty("User-Agent", "GPodder.net Account for Android");
return conn;
}
public boolean authenticate() {
verifyCurrentConfig();
URL url;
HttpsURLConnection conn = null;
try {
url = new URL(_config.mygpo + "api/2/auth/" + _username + "/login.json");
conn = createConnection(url);
writePost(conn, " ");
conn.connect();
int code = conn.getResponseCode();
if (code != 200)
return false;
for (String val : conn.getHeaderFields().get("Set-Cookie")) {
String[] data = val.split(";")[0].split("=");
if (data[0].equals("sessionid"))
_sessionId = data[1];
}
return true;
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (conn != null)
conn.disconnect();
}
return true;
}
public String getDeviceName() {
verifyCurrentConfig();
URL url;
HttpsURLConnection conn = null;
try {
url = new URL(_config.mygpo + "api/2/devices/" + _username + ".json");
conn = createConnection(url);
conn.connect();
int code = conn.getResponseCode();
if (code != 200)
return null;
InputStream stream = conn.getInputStream();
JsonReader reader = new JsonReader(new InputStreamReader(stream));
try {
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
String id = null;
String caption = null;
while (reader.hasNext()) {
String key = reader.nextName();
if (key.equals("id"))
id = reader.nextString();
else if (key.equals("caption"))
caption = reader.nextString();
else
reader.skipValue();
}
if (id != null && id.equals("podax"))
return caption;
reader.endObject();
}
reader.endArray();
} finally {
reader.close();
}
return null;
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (conn != null)
conn.disconnect();
}
return null;
}
public boolean setDeviceName(String deviceName) {
verifyCurrentConfig();
URL url;
HttpsURLConnection conn = null;
try {
url = new URL(_config.mygpo + "api/2/devices/" + _username + "/podax.json");
conn = createConnection(url);
writePost(conn, "{\"caption\": \"" + deviceName + "\", \"type\": \"mobile\"}");
conn.connect();
int code = conn.getResponseCode();
if (code != 200)
return false;
return true;
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (conn != null)
conn.disconnect();
}
return true;
}
public class Changes {
public Vector<String> added = new Vector<String>();
public Vector<String> removed = new Vector<String>();
public int timestamp = 0;
public boolean isValid() { return timestamp != 0; }
public boolean isEmpty() { return added.size() > 0 || removed.size() > 0; }
}
public Changes getSubscriptionChanges(int lastCheck) {
verifyCurrentConfig();
URL url;
HttpsURLConnection conn = null;
Changes changes = new Changes();
try {
url = new URL(_config.mygpo + "api/2/subscriptions/" + _username + "/podax.json?since=" + String.valueOf(lastCheck));
conn = createConnection(url);
conn.connect();
int code = conn.getResponseCode();
if (code != 200)
return changes;
InputStream stream = conn.getInputStream();
JsonReader reader = new JsonReader(new InputStreamReader(stream));
reader.beginObject();
// get add
while (reader.hasNext()) {
String key = reader.nextName();
if (key.equals("timestamp")) {
changes.timestamp = reader.nextInt();
} else if (key.equals("add")) {
reader.beginArray();
while (reader.hasNext())
changes.added.add(reader.nextString());
reader.endArray();
} else if (key.equals("remove")) {
reader.beginArray();
while (reader.hasNext())
changes.removed.add(reader.nextString());
reader.endArray();
}
}
reader.endObject();
reader.close();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (conn != null)
conn.disconnect();
}
return changes;
}
public void syncDiffs() {
verifyCurrentConfig();
SQLiteDatabase db = new DBAdapter(_context).getWritableDatabase();
Vector<String> toAdd = new Vector<String>();
Cursor c = db.rawQuery("SELECT url FROM pending_add", null);
while (c.moveToNext())
toAdd.add(c.getString(0));
c.close();
Vector<String> toRemove = new Vector<String>();
c = db.rawQuery("SELECT url FROM pending_remove", null);
while (c.moveToNext())
toRemove.add(c.getString(0));
c.close();
if (toAdd.size() == 0 && toRemove.size() == 0) {
db.close();
return;
}
URL url;
HttpsURLConnection conn = null;
try {
url = new URL(_config.mygpo + "api/2/subscriptions/" + _username + "/podax.json");
conn = createConnection(url);
conn.setDoOutput(true);
OutputStreamWriter streamWriter = new OutputStreamWriter(conn.getOutputStream());
JsonWriter writer = new JsonWriter(streamWriter);
writer.beginObject();
// rectify removed urls by adding them
writer.name("add");
writer.beginArray();
for (String s : toAdd)
writer.value(s);
writer.endArray();
// rectify added urls by removing them
writer.name("remove");
writer.beginArray();
for (String s : toRemove)
writer.value(s);
writer.endArray();
writer.endObject();
writer.close();
conn.connect();
int code = conn.getResponseCode();
if (code != 200)
return;
// clear out the pending tables
db.execSQL("DELETE FROM pending_add");
db.execSQL("DELETE FROM pending_remove");
// no need to handle the output
Log.d("gpodder", "done syncing");
db.close();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* Copyright (c) 2002-2016, the original author or authors.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*
* http://www.opensource.org/licenses/bsd-license.php
*/
package jline;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import jline.internal.Configuration;
import jline.internal.Log;
import org.fusesource.jansi.internal.WindowsSupport;
import org.fusesource.jansi.internal.Kernel32;
import static org.fusesource.jansi.internal.Kernel32.*;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_ECHO_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_LINE_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_PROCESSED_INPUT;
import static jline.WindowsTerminal.ConsoleMode.ENABLE_WINDOW_INPUT;
/**
* Terminal implementation for Microsoft Windows. Terminal initialization in
* {@link #init} is accomplished by extracting the
* <em>jline_<i>version</i>.dll</em>, saving it to the system temporary
* directoy (determined by the setting of the <em>java.io.tmpdir</em> System
* property), loading the library, and then calling the Win32 APIs <a
* href="http://msdn.microsoft.com/library/default.asp?
* url=/library/en-us/dllproc/base/setconsolemode.asp">SetConsoleMode</a> and
* <a href="http://msdn.microsoft.com/library/default.asp?
* url=/library/en-us/dllproc/base/getconsolemode.asp">GetConsoleMode</a> to
* disable character echoing.
* <p/>
* <p>
* By default, the {@link #wrapInIfNeeded(java.io.InputStream)} method will attempt
* to test to see if the specified {@link InputStream} is {@link System#in} or a wrapper
* around {@link FileDescriptor#in}, and if so, will bypass the character reading to
* directly invoke the readc() method in the JNI library. This is so the class
* can read special keys (like arrow keys) which are otherwise inaccessible via
* the {@link System#in} stream. Using JNI reading can be bypassed by setting
* the <code>jline.WindowsTerminal.directConsole</code> system property
* to <code>false</code>.
* </p>
*
* @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @since 2.0
*/
public class WindowsTerminal
extends TerminalSupport
{
public static final String DIRECT_CONSOLE = WindowsTerminal.class.getName() + ".directConsole";
public static final String ANSI = WindowsTerminal.class.getName() + ".ansi";
private boolean directConsole;
private int originalMode;
public WindowsTerminal() throws Exception {
super(true);
}
@Override
public void init() throws Exception {
super.init();
setAnsiSupported(Configuration.getBoolean(ANSI, true));
//
// FIXME: Need a way to disable direct console and sysin detection muck
//
setDirectConsole(Configuration.getBoolean(DIRECT_CONSOLE, true));
this.originalMode = getConsoleMode();
setConsoleMode(originalMode & ~ENABLE_ECHO_INPUT.code);
setEchoEnabled(false);
}
/**
* Restore the original terminal configuration, which can be used when
* shutting down the console reader. The ConsoleReader cannot be
* used after calling this method.
*/
@Override
public void restore() throws Exception {
// restore the old console mode
setConsoleMode(originalMode);
super.restore();
}
@Override
public int getWidth() {
int w = getWindowsTerminalWidth();
return w < 1 ? DEFAULT_WIDTH : w;
}
@Override
public int getHeight() {
int h = getWindowsTerminalHeight();
return h < 1 ? DEFAULT_HEIGHT : h;
}
@Override
public void setEchoEnabled(final boolean enabled) {
// Must set these four modes at the same time to make it work fine.
if (enabled) {
setConsoleMode(getConsoleMode() |
ENABLE_ECHO_INPUT.code |
ENABLE_LINE_INPUT.code |
ENABLE_WINDOW_INPUT.code);
}
else {
setConsoleMode(getConsoleMode() &
~(ENABLE_LINE_INPUT.code |
ENABLE_ECHO_INPUT.code |
ENABLE_WINDOW_INPUT.code));
}
super.setEchoEnabled(enabled);
}
public void disableInterruptCharacter() {
setConsoleMode(getConsoleMode() &
~(ENABLE_PROCESSED_INPUT.code));
}
public void enableInterruptCharacter() {
setConsoleMode(getConsoleMode() |
ENABLE_PROCESSED_INPUT.code);
}
/**
* Whether or not to allow the use of the JNI console interaction.
*/
public void setDirectConsole(final boolean flag) {
this.directConsole = flag;
Log.debug("Direct console: ", flag);
}
/**
* Whether or not to allow the use of the JNI console interaction.
*/
public Boolean getDirectConsole() {
return directConsole;
}
@Override
public InputStream wrapInIfNeeded(InputStream in) throws IOException {
if (directConsole && isSystemIn(in)) {
return new InputStream() {
private byte[] buf = null;
int bufIdx = 0;
@Override
public int read() throws IOException {
while (buf == null || bufIdx == buf.length) {
buf = readConsoleInput();
bufIdx = 0;
}
int c = buf[bufIdx] & 0xFF;
bufIdx++;
return c;
}
};
} else {
return super.wrapInIfNeeded(in);
}
}
protected boolean isSystemIn(final InputStream in) throws IOException {
if (in == null) {
return false;
}
else if (in == System.in) {
return true;
}
else if (in instanceof FileInputStream && ((FileInputStream) in).getFD() == FileDescriptor.in) {
return true;
}
return false;
}
@Override
public String getOutputEncoding() {
int codepage = getConsoleOutputCodepage();
//http://docs.oracle.com/javase/6/docs/technotes/guides/intl/encoding.doc.html
String charsetMS = "ms" + codepage;
if (java.nio.charset.Charset.isSupported(charsetMS)) {
return charsetMS;
}
String charsetCP = "cp" + codepage;
if (java.nio.charset.Charset.isSupported(charsetCP)) {
return charsetCP;
}
Log.debug("can't figure out the Java Charset of this code page (" + codepage + ")...");
return super.getOutputEncoding();
}
//
// Native Bits
//
private static int getConsoleMode() {
return WindowsSupport.getConsoleMode();
}
private static void setConsoleMode(int mode) {
WindowsSupport.setConsoleMode(mode);
}
private byte[] readConsoleInput() {
// XXX does how many events to read in one call matter?
INPUT_RECORD[] events = null;
try {
events = WindowsSupport.readConsoleInput(1);
} catch (IOException e) {
Log.debug("read Windows console input error: ", e);
}
if (events == null) {
return new byte[0];
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < events.length; i++ ) {
KEY_EVENT_RECORD keyEvent = events[i].keyEvent;
//Log.trace(keyEvent.keyDown? "KEY_DOWN" : "KEY_UP", "key code:", keyEvent.keyCode, "char:", (long)keyEvent.uchar);
if (keyEvent.keyDown) {
if (keyEvent.uchar > 0) {
// support some C1 control sequences: ALT + [@-_] (and [a-z]?) => ESC <ascii>
// http://en.wikipedia.org/wiki/C0_and_C1_control_codes#C1_set
final int altState = KEY_EVENT_RECORD.LEFT_ALT_PRESSED | KEY_EVENT_RECORD.RIGHT_ALT_PRESSED;
// Pressing "Alt Gr" is translated to Alt-Ctrl, hence it has to be checked that Ctrl is _not_ pressed,
// otherwise inserting of "Alt Gr" codes on non-US keyboards would yield errors
final int ctrlState = KEY_EVENT_RECORD.LEFT_CTRL_PRESSED | KEY_EVENT_RECORD.RIGHT_CTRL_PRESSED;
if (((keyEvent.uchar >= '@' && keyEvent.uchar <= '_') || (keyEvent.uchar >= 'a' && keyEvent.uchar <= 'z'))
&& ((keyEvent.controlKeyState & altState) != 0) && ((keyEvent.controlKeyState & ctrlState) == 0)) {
sb.append('\u001B'); // ESC
}
sb.append(keyEvent.uchar);
continue;
}
// virtual keycodes: http://msdn.microsoft.com/en-us/library/windows/desktop/dd375731(v=vs.85).aspx
// just add support for basic editing keys (no control state, no numpad keys)
String escapeSequence = null;
switch (keyEvent.keyCode) {
case 0x21: // VK_PRIOR PageUp
escapeSequence = "\u001B[5~";
break;
case 0x22: // VK_NEXT PageDown
escapeSequence = "\u001B[6~";
break;
case 0x23: // VK_END
escapeSequence = "\u001B[4~";
break;
case 0x24: // VK_HOME
escapeSequence = "\u001B[1~";
break;
case 0x25: // VK_LEFT
escapeSequence = "\u001B[D";
break;
case 0x26: // VK_UP
escapeSequence = "\u001B[A";
break;
case 0x27: // VK_RIGHT
escapeSequence = "\u001B[C";
break;
case 0x28: // VK_DOWN
escapeSequence = "\u001B[B";
break;
case 0x2D: // VK_INSERT
escapeSequence = "\u001B[2~";
break;
case 0x2E: // VK_DELETE
escapeSequence = "\u001B[3~";
break;
default:
break;
}
if (escapeSequence != null) {
for (int k = 0; k < keyEvent.repeatCount; k++) {
sb.append(escapeSequence);
}
}
} else {
// key up event
// support ALT+NumPad input method
if (keyEvent.keyCode == 0x12/*VK_MENU ALT key*/ && keyEvent.uchar > 0) {
sb.append(keyEvent.uchar);
}
}
}
return sb.toString().getBytes();
}
private static int getConsoleOutputCodepage() {
return Kernel32.GetConsoleOutputCP();
}
private static int getWindowsTerminalWidth() {
return WindowsSupport.getWindowsTerminalWidth();
}
private static int getWindowsTerminalHeight() {
return WindowsSupport.getWindowsTerminalHeight();
}
/**
* Console mode
* <p/>
* Constants copied <tt>wincon.h</tt>.
*/
public static enum ConsoleMode
{
/**
* The ReadFile or ReadConsole function returns only when a carriage return
* character is read. If this mode is disable, the functions return when one
* or more characters are available.
*/
ENABLE_LINE_INPUT(2),
/**
* Characters read by the ReadFile or ReadConsole function are written to
* the active screen buffer as they are read. This mode can be used only if
* the ENABLE_LINE_INPUT mode is also enabled.
*/
ENABLE_ECHO_INPUT(4),
/**
* CTRL+C is processed by the system and is not placed in the input buffer.
* If the input buffer is being read by ReadFile or ReadConsole, other
* control keys are processed by the system and are not returned in the
* ReadFile or ReadConsole buffer. If the ENABLE_LINE_INPUT mode is also
* enabled, backspace, carriage return, and linefeed characters are handled
* by the system.
*/
ENABLE_PROCESSED_INPUT(1),
/**
* User interactions that change the size of the console screen buffer are
* reported in the console's input buffee. Information about these events
* can be read from the input buffer by applications using
* theReadConsoleInput function, but not by those using ReadFile
* orReadConsole.
*/
ENABLE_WINDOW_INPUT(8),
/**
* If the mouse pointer is within the borders of the console window and the
* window has the keyboard focus, mouse events generated by mouse movement
* and button presses are placed in the input buffer. These events are
* discarded by ReadFile or ReadConsole, even when this mode is enabled.
*/
ENABLE_MOUSE_INPUT(16),
/**
* When enabled, text entered in a console window will be inserted at the
* current cursor location and all text following that location will not be
* overwritten. When disabled, all following text will be overwritten. An OR
* operation must be performed with this flag and the ENABLE_EXTENDED_FLAGS
* flag to enable this functionality.
*/
ENABLE_PROCESSED_OUTPUT(1),
/**
* This flag enables the user to use the mouse to select and edit text. To
* enable this option, use the OR to combine this flag with
* ENABLE_EXTENDED_FLAGS.
*/
ENABLE_WRAP_AT_EOL_OUTPUT(2),;
public final int code;
ConsoleMode(final int code) {
this.code = code;
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master;
import static alluxio.util.network.NetworkAddressUtils.ServiceType;
import alluxio.AlluxioURI;
import alluxio.executor.ExecutorServiceBuilder;
import alluxio.RuntimeConstants;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.grpc.GrpcServer;
import alluxio.grpc.GrpcServerAddress;
import alluxio.grpc.GrpcServerBuilder;
import alluxio.grpc.GrpcService;
import alluxio.grpc.JournalDomain;
import alluxio.master.journal.DefaultJournalMaster;
import alluxio.master.journal.JournalMasterClientServiceHandler;
import alluxio.master.journal.JournalSystem;
import alluxio.master.journal.JournalUtils;
import alluxio.master.journal.raft.RaftJournalSystem;
import alluxio.metrics.MetricKey;
import alluxio.metrics.MetricsSystem;
import alluxio.resource.CloseableResource;
import alluxio.security.user.ServerUserState;
import alluxio.underfs.MasterUfsManager;
import alluxio.underfs.UnderFileSystem;
import alluxio.underfs.UnderFileSystemConfiguration;
import alluxio.util.CommonUtils.ProcessType;
import alluxio.util.JvmPauseMonitor;
import alluxio.util.URIUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.web.MasterWebServer;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.concurrent.ThreadSafe;
/**
* An Alluxio Master which runs a web and rpc server to handle FileSystem operations.
*/
@NotThreadSafe
public class AlluxioMasterProcess extends MasterProcess {
private static final Logger LOG = LoggerFactory.getLogger(AlluxioMasterProcess.class);
/** The master registry. */
private final MasterRegistry mRegistry;
/** The JVMMonitor Progress. */
private JvmPauseMonitor mJvmPauseMonitor;
/** The connect address for the rpc server. */
final InetSocketAddress mRpcConnectAddress;
/** The manager of safe mode state. */
protected final SafeModeManager mSafeModeManager;
/** Master context. */
protected final MasterContext mContext;
/** The manager for creating and restoring backups. */
private final BackupManager mBackupManager;
/** The manager of all ufs. */
private final MasterUfsManager mUfsManager;
private AlluxioExecutorService mRPCExecutor = null;
/**
* Creates a new {@link AlluxioMasterProcess}.
*/
AlluxioMasterProcess(JournalSystem journalSystem) {
super(journalSystem, ServiceType.MASTER_RPC, ServiceType.MASTER_WEB);
mRpcConnectAddress = NetworkAddressUtils.getConnectAddress(ServiceType.MASTER_RPC,
ServerConfiguration.global());
try {
if (!mJournalSystem.isFormatted()) {
throw new RuntimeException(
String.format("Journal %s has not been formatted!", mJournalSystem));
}
// Create masters.
mRegistry = new MasterRegistry();
mSafeModeManager = new DefaultSafeModeManager();
mBackupManager = new BackupManager(mRegistry);
String baseDir = ServerConfiguration.get(PropertyKey.MASTER_METASTORE_DIR);
mUfsManager = new MasterUfsManager();
mContext = CoreMasterContext.newBuilder()
.setJournalSystem(mJournalSystem)
.setSafeModeManager(mSafeModeManager)
.setBackupManager(mBackupManager)
.setBlockStoreFactory(MasterUtils.getBlockStoreFactory(baseDir))
.setInodeStoreFactory(MasterUtils.getInodeStoreFactory(baseDir))
.setStartTimeMs(mStartTimeMs)
.setPort(NetworkAddressUtils
.getPort(ServiceType.MASTER_RPC, ServerConfiguration.global()))
.setUfsManager(mUfsManager)
.build();
MasterUtils.createMasters(mRegistry, mContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public <T extends Master> T getMaster(Class<T> clazz) {
return mRegistry.get(clazz);
}
/**
* @return true if Alluxio is running in safe mode, false otherwise
*/
public boolean isInSafeMode() {
return mSafeModeManager.isInSafeMode();
}
@Override
@Nullable
public InetSocketAddress getWebAddress() {
if (mWebServer != null) {
return new InetSocketAddress(mWebServer.getBindHost(), mWebServer.getLocalPort());
}
return null;
}
@Override
public InetSocketAddress getRpcAddress() {
return mRpcConnectAddress;
}
@Override
public void start() throws Exception {
LOG.info("Starting...");
mJournalSystem.start();
mJournalSystem.gainPrimacy();
startMasters(true);
startServing();
}
@Override
public void stop() throws Exception {
LOG.info("Stopping...");
stopRejectingServers();
if (isServing()) {
stopServing();
}
mJournalSystem.stop();
closeMasters();
LOG.info("Stopped.");
}
private void initFromBackup(AlluxioURI backup) throws IOException {
CloseableResource<UnderFileSystem> ufsResource;
if (URIUtils.isLocalFilesystem(backup.toString())) {
UnderFileSystem ufs = UnderFileSystem.Factory.create("/",
UnderFileSystemConfiguration.defaults(ServerConfiguration.global()));
ufsResource = new CloseableResource<UnderFileSystem>(ufs) {
@Override
public void closeResource() { }
};
} else {
ufsResource = mUfsManager.getRoot().acquireUfsResource();
}
try (CloseableResource<UnderFileSystem> closeUfs = ufsResource;
InputStream ufsIn = closeUfs.get().open(backup.getPath())) {
LOG.info("Initializing metadata from backup {}", backup);
mBackupManager.initFromBackup(ufsIn);
}
}
/**
* Starts all masters, including block master, FileSystem master, and additional masters.
*
* @param isLeader if the Master is leader
*/
protected void startMasters(boolean isLeader) throws IOException {
LOG.info("Starting all masters as: {}.", (isLeader) ? "leader" : "follower");
if (isLeader) {
if (ServerConfiguration.isSet(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP)) {
AlluxioURI backup =
new AlluxioURI(ServerConfiguration.get(PropertyKey.MASTER_JOURNAL_INIT_FROM_BACKUP));
if (mJournalSystem.isEmpty()) {
initFromBackup(backup);
} else {
LOG.info("The journal system is not freshly formatted, skipping restoring backup from "
+ backup);
}
}
mSafeModeManager.notifyPrimaryMasterStarted();
} else {
startRejectingServers();
}
mRegistry.start(isLeader);
// Signal state-lock-manager that masters are ready.
mContext.getStateLockManager().mastersStartedCallback();
LOG.info("All masters started.");
}
/**
* Stops all masters, including block master, fileSystem master and additional masters.
*/
protected void stopMasters() {
try {
LOG.info("Stopping all masters.");
mRegistry.stop();
LOG.info("All masters stopped.");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Closes all masters, including block master, fileSystem master and additional masters.
*/
protected void closeMasters() {
try {
LOG.info("Closing all masters.");
mRegistry.close();
LOG.info("Closed all masters.");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Starts serving web ui server, resetting master web port, adding the metrics servlet to the web
* server and starting web ui.
*/
protected void startServingWebServer() {
stopRejectingWebServer();
mWebServer =
new MasterWebServer(ServiceType.MASTER_WEB.getServiceName(), mWebBindAddress, this);
// reset master web port
// start web ui
mWebServer.start();
}
/**
* Starts jvm monitor process, to monitor jvm.
*/
protected void startJvmMonitorProcess() {
if (ServerConfiguration.getBoolean(PropertyKey.MASTER_JVM_MONITOR_ENABLED)) {
mJvmPauseMonitor = new JvmPauseMonitor(
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_SLEEP_INTERVAL_MS),
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_WARN_THRESHOLD_MS),
ServerConfiguration.getMs(PropertyKey.JVM_MONITOR_INFO_THRESHOLD_MS));
mJvmPauseMonitor.start();
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.TOTAL_EXTRA_TIME.getName()),
mJvmPauseMonitor::getTotalExtraTime);
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.INFO_TIME_EXCEEDED.getName()),
mJvmPauseMonitor::getInfoTimeExceeded);
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getMetricName(MetricKey.WARN_TIME_EXCEEDED.getName()),
mJvmPauseMonitor::getWarnTimeExceeded);
}
}
/**
* Starts serving, letting {@link MetricsSystem} start sink and starting the web ui server and RPC
* Server.
*
* @param startMessage empty string or the message that the master gains the leadership
* @param stopMessage empty string or the message that the master loses the leadership
*/
protected void startServing(String startMessage, String stopMessage) {
MetricsSystem.startSinks(ServerConfiguration.get(PropertyKey.METRICS_CONF_FILE));
startServingRPCServer();
LOG.info("Alluxio master web server version {} starting{}. webAddress={}",
RuntimeConstants.VERSION, startMessage, mWebBindAddress);
startServingWebServer();
startJvmMonitorProcess();
LOG.info(
"Alluxio master version {} started{}. bindAddress={}, connectAddress={}, webAddress={}",
RuntimeConstants.VERSION, startMessage, mRpcBindAddress, mRpcConnectAddress,
mWebBindAddress);
// Blocks until RPC server is shut down. (via #stopServing)
mGrpcServer.awaitTermination();
LOG.info("Alluxio master ended {}", stopMessage);
}
/**
* Starts the gRPC server. The AlluxioMaster registers the Services of registered
* {@link Master}s and meta services.
*/
protected void startServingRPCServer() {
stopRejectingRpcServer();
LOG.info("Starting gRPC server on address:{}", mRpcBindAddress);
mGrpcServer = createRPCServer();
try {
// Start serving.
mGrpcServer.start();
mSafeModeManager.notifyRpcServerStarted();
// Acquire and log bind port from newly started server.
InetSocketAddress listeningAddress = InetSocketAddress
.createUnresolved(mRpcBindAddress.getHostName(), mGrpcServer.getBindPort());
LOG.info("gRPC server listening on: {}", listeningAddress);
} catch (IOException e) {
LOG.error("gRPC serving failed.", e);
throw new RuntimeException("gRPC serving failed");
}
}
private GrpcServer createRPCServer() {
// Create an executor for Master RPC server.
mRPCExecutor = ExecutorServiceBuilder.buildExecutorService(
ExecutorServiceBuilder.RpcExecutorHost.MASTER);
MetricsSystem.registerGaugeIfAbsent(MetricKey.MASTER_RPC_QUEUE_LENGTH.getName(),
mRPCExecutor::getRpcQueueLength);
// Create underlying gRPC server.
GrpcServerBuilder builder = GrpcServerBuilder
.forAddress(GrpcServerAddress.create(mRpcConnectAddress.getHostName(), mRpcBindAddress),
ServerConfiguration.global(), ServerUserState.global())
.executor(mRPCExecutor)
.flowControlWindow(
(int) ServerConfiguration.getBytes(PropertyKey.MASTER_NETWORK_FLOWCONTROL_WINDOW))
.keepAliveTime(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_KEEPALIVE_TIME_MS),
TimeUnit.MILLISECONDS)
.keepAliveTimeout(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_KEEPALIVE_TIMEOUT_MS),
TimeUnit.MILLISECONDS)
.permitKeepAlive(
ServerConfiguration.getMs(PropertyKey.MASTER_NETWORK_PERMIT_KEEPALIVE_TIME_MS),
TimeUnit.MILLISECONDS)
.maxInboundMessageSize((int) ServerConfiguration.getBytes(
PropertyKey.MASTER_NETWORK_MAX_INBOUND_MESSAGE_SIZE));
// Bind manifests of each Alluxio master to RPC server.
for (Master master : mRegistry.getServers()) {
registerServices(builder, master.getServices());
}
// Bind manifest of Alluxio JournalMaster service.
// TODO(ggezer) Merge this with registerServices() logic.
builder.addService(alluxio.grpc.ServiceType.JOURNAL_MASTER_CLIENT_SERVICE,
new GrpcService(new JournalMasterClientServiceHandler(
new DefaultJournalMaster(JournalDomain.MASTER, mJournalSystem))));
// Builds a server that is not started yet.
return builder.build();
}
/**
* Stops serving, trying stop RPC server and web ui server and letting {@link MetricsSystem} stop
* all the sinks.
*/
protected void stopServing() throws Exception {
if (isServing()) {
if (!mGrpcServer.shutdown()) {
LOG.warn("Alluxio master RPC server shutdown timed out.");
}
}
if (mRPCExecutor != null) {
mRPCExecutor.shutdownNow();
try {
mRPCExecutor.awaitTermination(
ServerConfiguration.getMs(PropertyKey.NETWORK_CONNECTION_SERVER_SHUTDOWN_TIMEOUT),
TimeUnit.MILLISECONDS);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
if (mJvmPauseMonitor != null) {
mJvmPauseMonitor.stop();
}
if (mWebServer != null) {
mWebServer.stop();
mWebServer = null;
}
MetricsSystem.stopSinks();
}
@Override
public String toString() {
return "Alluxio master @" + mRpcConnectAddress;
}
/**
* Factory for creating {@link AlluxioMasterProcess}.
*/
@ThreadSafe
public static final class Factory {
/**
* Creates a new {@link AlluxioMasterProcess}.
*
* @return a new instance of {@link MasterProcess} using the given sockets for the master
*/
public static AlluxioMasterProcess create() {
URI journalLocation = JournalUtils.getJournalLocation();
JournalSystem journalSystem = new JournalSystem.Builder()
.setLocation(journalLocation).build(ProcessType.MASTER);
if (ServerConfiguration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)) {
Preconditions.checkState(!(journalSystem instanceof RaftJournalSystem),
"Raft-based embedded journal and Zookeeper cannot be used at the same time.");
PrimarySelector primarySelector = PrimarySelector.Factory.createZkPrimarySelector();
return new FaultTolerantAlluxioMasterProcess(journalSystem, primarySelector);
} else if (journalSystem instanceof RaftJournalSystem) {
PrimarySelector primarySelector = ((RaftJournalSystem) journalSystem).getPrimarySelector();
return new FaultTolerantAlluxioMasterProcess(journalSystem, primarySelector);
}
return new AlluxioMasterProcess(journalSystem);
}
private Factory() {} // prevent instantiation
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2013 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.corext.codemanipulation;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.ITypeRoot;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.core.dom.ASTNode;
import org.eclipse.jdt.core.dom.AbstractTypeDeclaration;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.core.dom.IBinding;
import org.eclipse.jdt.core.dom.ITypeBinding;
import org.eclipse.jdt.core.dom.Modifier;
import org.eclipse.jdt.core.dom.Name;
import org.eclipse.jdt.core.dom.SimpleName;
import org.eclipse.jdt.core.dom.rewrite.ImportRewrite;
import org.eclipse.jdt.core.dom.rewrite.ImportRewrite.ImportRewriteContext;
import org.eclipse.jdt.internal.corext.dom.ScopeAnalyzer;
import org.eclipse.jdt.internal.corext.util.JavaModelUtil;
/**
* This <code>ImportRewriteContext</code> is aware of all the types visible in
* <code>compilationUnit</code> at <code>position</code>.
* <p>
* <b>Note:</b> This context only works if the AST was created with bindings!
* </p>
*/
public class ContextSensitiveImportRewriteContext extends ImportRewriteContext {
private final CompilationUnit fCompilationUnit;
private final int fPosition;
private IBinding[] fDeclarationsInScope;
private Name[] fImportedNames;
private final ImportRewrite fImportRewrite;
/**
* Creates an import rewrite context at the given node's start position.
*
* @param node the node to use as context
* @param importRewrite the import rewrite
*
* @since 3.6
*/
public ContextSensitiveImportRewriteContext(ASTNode node, ImportRewrite importRewrite) {
this((CompilationUnit)node.getRoot(), node.getStartPosition(), importRewrite);
}
/**
* Creates an import rewrite context at the given start position.
*
* @param compilationUnit the root (must have resolved bindings)
* @param position the context position
* @param importRewrite the import rewrite
*/
public ContextSensitiveImportRewriteContext(CompilationUnit compilationUnit, int position, ImportRewrite importRewrite) {
fCompilationUnit= compilationUnit;
fPosition= position;
fImportRewrite= importRewrite;
fDeclarationsInScope= null;
fImportedNames= null;
}
@Override
public int findInContext(String qualifier, String name, int kind) {
IBinding[] declarationsInScope= getDeclarationsInScope();
for (int i= 0; i < declarationsInScope.length; i++) {
if (declarationsInScope[i] instanceof ITypeBinding) {
ITypeBinding typeBinding= (ITypeBinding)declarationsInScope[i];
if (isSameType(typeBinding, qualifier, name)) {
return RES_NAME_FOUND;
} else if (isConflicting(typeBinding, name)) {
return RES_NAME_CONFLICT;
}
} else if (declarationsInScope[i] != null) {
if (isConflicting(declarationsInScope[i], name)) {
return RES_NAME_CONFLICT;
}
}
}
Name[] names= getImportedNames();
for (int i= 0; i < names.length; i++) {
IBinding binding= names[i].resolveBinding();
if (binding instanceof ITypeBinding && !binding.isRecovered()) {
ITypeBinding typeBinding= (ITypeBinding)binding;
if (isConflictingType(typeBinding, qualifier, name)) {
return RES_NAME_CONFLICT;
}
}
}
List<AbstractTypeDeclaration> list= fCompilationUnit.types();
for (Iterator<AbstractTypeDeclaration> iter= list.iterator(); iter.hasNext();) {
AbstractTypeDeclaration type= iter.next();
ITypeBinding binding= type.resolveBinding();
if (binding != null) {
if (isSameType(binding, qualifier, name)) {
return RES_NAME_FOUND;
} else {
ITypeBinding decl= containingDeclaration(binding, qualifier, name);
while (decl != null && !decl.equals(binding)) {
int modifiers= decl.getModifiers();
if (Modifier.isPrivate(modifiers))
return RES_NAME_CONFLICT;
decl= decl.getDeclaringClass();
}
}
}
}
String[] addedImports= fImportRewrite.getAddedImports();
String qualifiedName= JavaModelUtil.concatenateName(qualifier, name);
for (int i= 0; i < addedImports.length; i++) {
String addedImport= addedImports[i];
if (qualifiedName.equals(addedImport)) {
return RES_NAME_FOUND;
} else {
if (isConflicting(name, addedImport))
return RES_NAME_CONFLICT;
}
}
if (qualifier.equals("java.lang")) { //$NON-NLS-1$
//No explicit import statement required
ITypeRoot typeRoot= fCompilationUnit.getTypeRoot();
if (typeRoot != null) {
IPackageFragment packageFragment= (IPackageFragment) typeRoot.getParent();
try {
ICompilationUnit[] compilationUnits= packageFragment.getCompilationUnits();
for (int i= 0; i < compilationUnits.length; i++) {
ICompilationUnit cu= compilationUnits[i];
IType[] allTypes= cu.getAllTypes();
for (int j= 0; j < allTypes.length; j++) {
IType type= allTypes[j];
String packageTypeName= type.getFullyQualifiedName();
if (isConflicting(name, packageTypeName))
return RES_NAME_CONFLICT;
}
}
} catch (JavaModelException e) {
}
}
}
return fImportRewrite.getDefaultImportRewriteContext().findInContext(qualifier, name, kind);
}
private boolean isConflicting(String name, String importt) {
int index= importt.lastIndexOf('.');
String importedName;
if (index == -1) {
importedName= importt;
} else {
importedName= importt.substring(index + 1, importt.length());
}
if (importedName.equals(name)) {
return true;
}
return false;
}
private ITypeBinding containingDeclaration(ITypeBinding binding, String qualifier, String name) {
ITypeBinding[] declaredTypes= binding.getDeclaredTypes();
for (int i= 0; i < declaredTypes.length; i++) {
ITypeBinding childBinding= declaredTypes[i];
if (isSameType(childBinding, qualifier, name)) {
return childBinding;
} else {
ITypeBinding result= containingDeclaration(childBinding, qualifier, name);
if (result != null) {
return result;
}
}
}
return null;
}
private boolean isConflicting(IBinding binding, String name) {
return binding.getName().equals(name);
}
private boolean isSameType(ITypeBinding binding, String qualifier, String name) {
String qualifiedName= JavaModelUtil.concatenateName(qualifier, name);
return binding.getQualifiedName().equals(qualifiedName);
}
private boolean isConflictingType(ITypeBinding binding, String qualifier, String name) {
binding= binding.getTypeDeclaration();
return !isSameType(binding, qualifier, name) && isConflicting(binding, name);
}
private IBinding[] getDeclarationsInScope() {
if (fDeclarationsInScope == null) {
ScopeAnalyzer analyzer= new ScopeAnalyzer(fCompilationUnit);
fDeclarationsInScope= analyzer.getDeclarationsInScope(fPosition, ScopeAnalyzer.METHODS | ScopeAnalyzer.TYPES | ScopeAnalyzer.VARIABLES);
}
return fDeclarationsInScope;
}
private Name[] getImportedNames() {
if (fImportedNames == null) {
IJavaProject project= null;
IJavaElement javaElement= fCompilationUnit.getJavaElement();
if (javaElement != null)
project= javaElement.getJavaProject();
List<SimpleName> imports= new ArrayList<SimpleName>();
ImportReferencesCollector.collect(fCompilationUnit, project, null, imports, null);
fImportedNames= imports.toArray(new Name[imports.size()]);
}
return fImportedNames;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.job.config;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import java.time.DateTimeException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescription> {
public void testDefault() {
DataDescription dataDescription = new DataDescription.Builder().build();
assertThat(dataDescription.getFormat(), equalTo(DataFormat.XCONTENT));
assertThat(dataDescription.getTimeField(), equalTo("time"));
assertThat(dataDescription.getTimeFormat(), equalTo("epoch_ms"));
assertThat(dataDescription.getFieldDelimiter(), is(nullValue()));
assertThat(dataDescription.getQuoteCharacter(), is(nullValue()));
}
public void testDefaultDelimited() {
DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder();
dataDescriptionBuilder.setFormat(DataFormat.DELIMITED);
DataDescription dataDescription = dataDescriptionBuilder.build();
assertThat(dataDescription.getFormat(), equalTo(DataFormat.DELIMITED));
assertThat(dataDescription.getTimeField(), equalTo("time"));
assertThat(dataDescription.getTimeFormat(), equalTo("epoch_ms"));
assertThat(dataDescription.getFieldDelimiter(), is('\t'));
assertThat(dataDescription.getQuoteCharacter(), is('"'));
}
public void testVerify_GivenValidFormat() {
DataDescription.Builder description = new DataDescription.Builder();
description.setTimeFormat("epoch");
description.setTimeFormat("epoch_ms");
description.setTimeFormat("yyyy-MM-dd HH");
}
@AwaitsFix(bugUrl = "https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8206980")
public void testVerify_GivenValidFormat_Java11Bug() {
DataDescription.Builder description = new DataDescription.Builder();
description.setTimeFormat("yyyy.MM.dd G 'at' HH:mm:ss z");
}
public void testVerify_GivenInValidFormat() {
DataDescription.Builder description = new DataDescription.Builder();
expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat(null));
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("invalid"));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "invalid"), e.getMessage());
e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat(""));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, ""), e.getMessage());
e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("y-M-dd"));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage());
expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
Throwable cause = e.getCause();
assertNotNull(cause);
assertThat(cause, instanceOf(DateTimeException.class));
}
public void testTransform_GivenDelimitedAndEpoch() {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setFormat(DataFormat.DELIMITED);
dd.setTimeFormat("epoch");
assertFalse(dd.build().transform());
}
public void testTransform_GivenDelimitedAndEpochMs() {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setFormat(DataFormat.DELIMITED);
dd.setTimeFormat("epoch_ms");
assertTrue(dd.build().transform());
}
public void testIsTransformTime_GivenTimeFormatIsEpoch() {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setTimeFormat("epoch");
assertFalse(dd.build().isTransformTime());
}
public void testIsTransformTime_GivenTimeFormatIsEpochMs() {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setTimeFormat("epoch_ms");
assertTrue(dd.build().isTransformTime());
}
public void testIsTransformTime_GivenTimeFormatPattern() {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setTimeFormat("yyyy-MM-dd HH:mm:ss.SSSZ");
assertTrue(dd.build().isTransformTime());
}
public void testEquals_GivenDifferentDateFormat() {
DataDescription.Builder description1 = new DataDescription.Builder();
description1.setFormat(DataFormat.XCONTENT);
description1.setQuoteCharacter('"');
description1.setTimeField("timestamp");
description1.setTimeFormat("epoch");
description1.setFieldDelimiter(',');
DataDescription.Builder description2 = new DataDescription.Builder();
description2.setFormat(DataFormat.DELIMITED);
description2.setQuoteCharacter('"');
description2.setTimeField("timestamp");
description2.setTimeFormat("epoch");
description2.setFieldDelimiter(',');
assertFalse(description1.build().equals(description2.build()));
assertFalse(description2.build().equals(description1.build()));
}
public void testEquals_GivenDifferentQuoteCharacter() {
DataDescription.Builder description1 = new DataDescription.Builder();
description1.setFormat(DataFormat.XCONTENT);
description1.setQuoteCharacter('"');
description1.setTimeField("timestamp");
description1.setTimeFormat("epoch");
description1.setFieldDelimiter(',');
DataDescription.Builder description2 = new DataDescription.Builder();
description2.setFormat(DataFormat.XCONTENT);
description2.setQuoteCharacter('\'');
description2.setTimeField("timestamp");
description2.setTimeFormat("epoch");
description2.setFieldDelimiter(',');
assertFalse(description1.build().equals(description2.build()));
assertFalse(description2.build().equals(description1.build()));
}
public void testEquals_GivenDifferentTimeField() {
DataDescription.Builder description1 = new DataDescription.Builder();
description1.setFormat(DataFormat.XCONTENT);
description1.setQuoteCharacter('"');
description1.setTimeField("timestamp");
description1.setTimeFormat("epoch");
description1.setFieldDelimiter(',');
DataDescription.Builder description2 = new DataDescription.Builder();
description2.setFormat(DataFormat.XCONTENT);
description2.setQuoteCharacter('"');
description2.setTimeField("time");
description2.setTimeFormat("epoch");
description2.setFieldDelimiter(',');
assertFalse(description1.build().equals(description2.build()));
assertFalse(description2.build().equals(description1.build()));
}
public void testEquals_GivenDifferentTimeFormat() {
DataDescription.Builder description1 = new DataDescription.Builder();
description1.setFormat(DataFormat.XCONTENT);
description1.setQuoteCharacter('"');
description1.setTimeField("timestamp");
description1.setTimeFormat("epoch");
description1.setFieldDelimiter(',');
DataDescription.Builder description2 = new DataDescription.Builder();
description2.setFormat(DataFormat.XCONTENT);
description2.setQuoteCharacter('"');
description2.setTimeField("timestamp");
description2.setTimeFormat("epoch_ms");
description2.setFieldDelimiter(',');
assertFalse(description1.build().equals(description2.build()));
assertFalse(description2.build().equals(description1.build()));
}
public void testEquals_GivenDifferentFieldDelimiter() {
DataDescription.Builder description1 = new DataDescription.Builder();
description1.setFormat(DataFormat.XCONTENT);
description1.setQuoteCharacter('"');
description1.setTimeField("timestamp");
description1.setTimeFormat("epoch");
description1.setFieldDelimiter(',');
DataDescription.Builder description2 = new DataDescription.Builder();
description2.setFormat(DataFormat.XCONTENT);
description2.setQuoteCharacter('"');
description2.setTimeField("timestamp");
description2.setTimeFormat("epoch");
description2.setFieldDelimiter(';');
assertFalse(description1.build().equals(description2.build()));
assertFalse(description2.build().equals(description1.build()));
}
public void testInvalidDataFormat() throws Exception {
BytesArray json = new BytesArray("{ \"format\":\"INEXISTENT_FORMAT\" }");
XContentParser parser = JsonXContent.jsonXContent
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json.streamInput());
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DataDescription.STRICT_PARSER.apply(parser, null));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]"));
Throwable cause = ex.getCause();
assertNotNull(cause);
assertThat(cause, instanceOf(IllegalArgumentException.class));
assertThat(cause.getMessage(),
containsString("No enum constant org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat.INEXISTENT_FORMAT"));
}
public void testInvalidFieldDelimiter() throws Exception {
BytesArray json = new BytesArray("{ \"field_delimiter\":\",,\" }");
XContentParser parser = JsonXContent.jsonXContent
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json.streamInput());
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DataDescription.STRICT_PARSER.apply(parser, null));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]"));
Throwable cause = ex.getCause();
assertNotNull(cause);
assertThat(cause, instanceOf(IllegalArgumentException.class));
assertThat(cause.getMessage(),
containsString("String must be a single character, found [,,]"));
}
public void testInvalidQuoteCharacter() throws Exception {
BytesArray json = new BytesArray("{ \"quote_character\":\"''\" }");
XContentParser parser = JsonXContent.jsonXContent
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json.streamInput());
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DataDescription.STRICT_PARSER.apply(parser, null));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]"));
Throwable cause = ex.getCause();
assertNotNull(cause);
assertThat(cause, instanceOf(IllegalArgumentException.class));
assertThat(cause.getMessage(), containsString("String must be a single character, found ['']"));
}
@Override
protected DataDescription createTestInstance() {
DataDescription.Builder dataDescription = new DataDescription.Builder();
if (randomBoolean()) {
dataDescription.setFormat(randomFrom(DataFormat.values()));
}
if (randomBoolean()) {
dataDescription.setTimeField(randomAlphaOfLengthBetween(1, 20));
}
if (randomBoolean()) {
String format;
if (randomBoolean()) {
format = DataDescription.EPOCH;
} else if (randomBoolean()) {
format = DataDescription.EPOCH_MS;
} else {
format = "yyyy-MM-dd HH:mm:ss.SSS";
}
dataDescription.setTimeFormat(format);
}
if (randomBoolean()) {
dataDescription.setFieldDelimiter(randomAlphaOfLength(1).charAt(0));
}
if (randomBoolean()) {
dataDescription.setQuoteCharacter(randomAlphaOfLength(1).charAt(0));
}
return dataDescription.build();
}
@Override
protected Reader<DataDescription> instanceReader() {
return DataDescription::new;
}
@Override
protected DataDescription doParseInstance(XContentParser parser) {
return DataDescription.STRICT_PARSER.apply(parser, null).build();
}
protected DataDescription mutateInstance(DataDescription instance) throws java.io.IOException {
DataFormat format = instance.getFormat();
String timeField = instance.getTimeField();
String timeFormat = instance.getTimeFormat();
Character delimiter = instance.getFieldDelimiter();
Character quoteChar = instance.getQuoteCharacter();
switch (between(0, 4)) {
case 0:
if (format == DataFormat.DELIMITED) {
format = DataFormat.XCONTENT;
} else {
format = DataFormat.DELIMITED;
}
break;
case 1:
timeField += randomAlphaOfLengthBetween(1, 10);
break;
case 2:
timeFormat = "yyyy-MM-dd-HH-mm-ss";
break;
case 3:
if (delimiter == null) {
delimiter = randomAlphaOfLength(1).charAt(0);
} else {
delimiter = null;
}
break;
case 4:
if (quoteChar == null) {
quoteChar = randomAlphaOfLength(1).charAt(0);
} else {
quoteChar = null;
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new DataDescription(format, timeField, timeFormat, delimiter, quoteChar);
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.