gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.spoofax.interpreter.terms;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.opentest4j.TestAbortedException;
import org.spoofax.terms.attachments.ITermAttachment;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
import static org.spoofax.TestUtils.TEST_INSTANCE_NOT_CREATED;
/**
* Tests the {@link IStrategoInt} interface.
*/
@DisplayName("IStrategoInt")
@SuppressWarnings("unused")
public interface IStrategoIntTests {
interface Fixture extends IStrategoTermTests.Fixture {
/**
* Creates a new instance of {@link IStrategoInt} for testing (with fixed hashCode 0).
*
* @param value the value of the term; or {@code null} to use a sensible default
* @param annotations the annotations of the term; or {@code null} to use a sensible default
* @param attachments the attachments of the term; or {@code null} to use a sensible default
* @return the created object
* @throws org.opentest4j.TestAbortedException when an instance with the given parameters could not be created
*/
IStrategoInt createIStrategoInt(@Nullable Integer value, @Nullable IStrategoList annotations,
@Nullable List<ITermAttachment> attachments);
@Override
default IStrategoTerm createIStrategoTerm(@Nullable List<IStrategoTerm> subterms,
@Nullable IStrategoList annotations,
@Nullable List<ITermAttachment> attachments) {
if (subterms != null && subterms.size() != 0) throw new TestAbortedException(TEST_INSTANCE_NOT_CREATED);
return createIStrategoInt(null, annotations, attachments);
}
}
/**
* Tests the {@link IStrategoInt#intValue()} method.
*/
@DisplayName("intValue()")
interface IntValueTests extends Fixture {
@Test
@DisplayName("returns the value of the term")
default void returnsTheValueOfTheTerm() {
// Arrange
int value = 10;
IStrategoInt sut = createIStrategoInt(value, null, null);
// Act
int result = sut.intValue();
// Assert
assertEquals(value, result);
}
}
/**
* Tests the {@link IStrategoInt#getType()} method.
*/
@DisplayName("getType()")
interface GetTypeTests extends Fixture, IStrategoTermTests.GetTypeTests {
@Test
@DisplayName("returns the correct term type")
default void returnsTheCorrectTermType() {
// Arrange
IStrategoInt sut = createIStrategoInt(null, null, null);
// Act
TermType result = sut.getType();
// Assert
assertEquals(TermType.INT, result);
}
}
/**
* Tests the {@link IStrategoInt#getSubtermCount()} method.
*/
@DisplayName("getSubtermCount()")
interface GetSubtermCountTests extends Fixture, IStrategoTermTests.GetSubtermCountTests {
@Test
@DisplayName("alwaysReturnsZero")
default void alwaysReturnsZero() {
// Arrange
IStrategoTerm sut = createIStrategoInt(null, null, null);
// Act
int result = sut.getSubtermCount();
// Assert
assertEquals(0, result);
}
}
/**
* Tests the {@link IStrategoInt#getAllSubterms()} method.
*/
@DisplayName("getAllSubterms(int)")
interface GetAllSubtermsTests extends Fixture, IStrategoTermTests.GetAllSubtermsTests {
@Test
@DisplayName("always returns empty array")
default void alwaysReturnsEmptyArray() {
// Arrange
IStrategoTerm sut = createIStrategoInt(null, null, null);
// Act
IStrategoTerm[] result = sut.getAllSubterms();
// Assert
assertEquals(0, result.length);
}
}
/**
* Tests the {@link IStrategoInt#getSubterms()} method.
*/
@DisplayName("getSubterms(int)")
interface GetSubtermsTests extends Fixture, IStrategoTermTests.GetSubtermsTests {
@Test
@DisplayName("always returns empty list")
default void alwaysReturnsEmptyList() {
// Arrange
IStrategoTerm sut = createIStrategoInt(null, null, null);
// Act
List<IStrategoTerm> result = sut.getSubterms();
// Assert
assertEquals(0, result.size());
}
}
/**
* Tests the {@link IStrategoInt#match(IStrategoTerm)} method.
*/
@DisplayName("match(IStrategoTerm)")
interface MatchTests extends Fixture, IStrategoTermTests.MatchTests {
@Test
@DisplayName("when both have the same value, returns true")
default void whenBothHaveTheSameValue_returnsTrue() {
// Arrange
IStrategoInt sut = createIStrategoInt(42, null, null);
IStrategoInt other = createIStrategoInt(42, null, null);
// Act
boolean result = sut.match(other);
// Assert
assertTrue(result);
}
@Test
@DisplayName("when other has different value, returns false")
default void whenOtherHasDifferentValue_returnsFalse() {
// Arrange
IStrategoInt sut = createIStrategoInt(42, null, null);
IStrategoInt other = createIStrategoInt(1337, null, null);
// Act
boolean result = sut.match(other);
// Assert
assertFalse(result);
}
}
/**
* Tests the {@link IStrategoInt#toString(int)} and {@link IStrategoInt#toString()} methods.
*/
@DisplayName("toString(..)")
interface ToStringTests extends Fixture, IStrategoTermTests.ToStringTests {
@Test
@DisplayName("returns the correct string representation")
default void returnsTheCorrectStringRepresentation() {
// Arrange
IStrategoInt sut = createIStrategoInt(42, null, null);
// Act
String result = sut.toString();
// Assert
assertEquals("42", result);
}
}
/**
* Tests the {@link IStrategoInt#writeAsString(Appendable, int)} and {@link IStrategoInt#writeAsString(Appendable)}
* methods.
*/
@DisplayName("writeAsString(..)")
interface WriteAsStringTests extends Fixture, IStrategoTermTests.WriteAsStringTests {
@Test
@DisplayName("returns the correct string representation")
default void returnsTheCorrectStringRepresentation() throws IOException {
// Arrange
StringBuilder sb = new StringBuilder();
IStrategoInt sut = createIStrategoInt(42, null, null);
// Act
sut.writeAsString(sb);
// Assert
assertEquals("42", sb.toString());
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.servicebus.implementation;
import com.azure.core.annotation.BodyParam;
import com.azure.core.annotation.Delete;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.Put;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.servicebus.fluent.RulesClient;
import com.azure.resourcemanager.servicebus.fluent.models.RuleInner;
import com.azure.resourcemanager.servicebus.models.RuleListResult;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in RulesClient. */
public final class RulesClientImpl implements RulesClient {
private final ClientLogger logger = new ClientLogger(RulesClientImpl.class);
/** The proxy service used to perform REST calls. */
private final RulesService service;
/** The service client containing this operation class. */
private final ServiceBusManagementClientImpl client;
/**
* Initializes an instance of RulesClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
RulesClientImpl(ServiceBusManagementClientImpl client) {
this.service = RestProxy.create(RulesService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for ServiceBusManagementClientRules to be used by the proxy service to
* perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "ServiceBusManagement")
private interface RulesService {
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus"
+ "/namespaces/{namespaceName}/topics/{topicName}/subscriptions/{subscriptionName}/rules")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RuleListResult>> listBySubscriptions(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("namespaceName") String namespaceName,
@PathParam("topicName") String topicName,
@PathParam("subscriptionName") String subscriptionName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("$skip") Integer skip,
@QueryParam("$top") Integer top,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Put(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus"
+ "/namespaces/{namespaceName}/topics/{topicName}/subscriptions/{subscriptionName}/rules/{ruleName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RuleInner>> createOrUpdate(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("namespaceName") String namespaceName,
@PathParam("topicName") String topicName,
@PathParam("subscriptionName") String subscriptionName,
@PathParam("ruleName") String ruleName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@BodyParam("application/json") RuleInner parameters,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Delete(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus"
+ "/namespaces/{namespaceName}/topics/{topicName}/subscriptions/{subscriptionName}/rules/{ruleName}")
@ExpectedResponses({200, 204})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<Void>> delete(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("namespaceName") String namespaceName,
@PathParam("topicName") String topicName,
@PathParam("subscriptionName") String subscriptionName,
@PathParam("ruleName") String ruleName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus"
+ "/namespaces/{namespaceName}/topics/{topicName}/subscriptions/{subscriptionName}/rules/{ruleName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RuleInner>> get(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("namespaceName") String namespaceName,
@PathParam("topicName") String topicName,
@PathParam("subscriptionName") String subscriptionName,
@PathParam("ruleName") String ruleName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<RuleListResult>> listBySubscriptionsNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RuleInner>> listBySubscriptionsSinglePageAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
Integer skip,
Integer top) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.listBySubscriptions(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
skip,
top,
accept,
context))
.<PagedResponse<RuleInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RuleInner>> listBySubscriptionsSinglePageAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
Integer skip,
Integer top,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listBySubscriptions(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
skip,
top,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<RuleInner> listBySubscriptionsAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
Integer skip,
Integer top) {
return new PagedFlux<>(
() ->
listBySubscriptionsSinglePageAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, skip, top),
nextLink -> listBySubscriptionsNextSinglePageAsync(nextLink));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<RuleInner> listBySubscriptionsAsync(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName) {
final Integer skip = null;
final Integer top = null;
return new PagedFlux<>(
() ->
listBySubscriptionsSinglePageAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, skip, top),
nextLink -> listBySubscriptionsNextSinglePageAsync(nextLink));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<RuleInner> listBySubscriptionsAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
Integer skip,
Integer top,
Context context) {
return new PagedFlux<>(
() ->
listBySubscriptionsSinglePageAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, skip, top, context),
nextLink -> listBySubscriptionsNextSinglePageAsync(nextLink, context));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<RuleInner> listBySubscriptions(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName) {
final Integer skip = null;
final Integer top = null;
return new PagedIterable<>(
listBySubscriptionsAsync(resourceGroupName, namespaceName, topicName, subscriptionName, skip, top));
}
/**
* List all the rules within given topic-subscription.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param skip Skip is only used if a previous operation returned a partial result. If a previous response contains
* a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting
* point to use for subsequent calls.
* @param top May be used to limit the number of results to the most recent N usageDetails.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<RuleInner> listBySubscriptions(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
Integer skip,
Integer top,
Context context) {
return new PagedIterable<>(
listBySubscriptionsAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, skip, top, context));
}
/**
* Creates a new rule and updates an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param parameters Parameters supplied to create a rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<RuleInner>> createOrUpdateWithResponseAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
RuleInner parameters) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.createOrUpdate(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
parameters,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Creates a new rule and updates an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param parameters Parameters supplied to create a rule.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<RuleInner>> createOrUpdateWithResponseAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
RuleInner parameters,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.createOrUpdate(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
parameters,
accept,
context);
}
/**
* Creates a new rule and updates an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param parameters Parameters supplied to create a rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<RuleInner> createOrUpdateAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
RuleInner parameters) {
return createOrUpdateWithResponseAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, ruleName, parameters)
.flatMap(
(Response<RuleInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Creates a new rule and updates an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param parameters Parameters supplied to create a rule.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RuleInner createOrUpdate(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
RuleInner parameters) {
return createOrUpdateAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName, parameters)
.block();
}
/**
* Creates a new rule and updates an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param parameters Parameters supplied to create a rule.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RuleInner> createOrUpdateWithResponse(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
RuleInner parameters,
Context context) {
return createOrUpdateWithResponseAsync(
resourceGroupName, namespaceName, topicName, subscriptionName, ruleName, parameters, context)
.block();
}
/**
* Deletes an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> deleteWithResponseAsync(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.delete(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Deletes an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Void>> deleteWithResponseAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.delete(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context);
}
/**
* Deletes an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteAsync(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
return deleteWithResponseAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName)
.flatMap((Response<Void> res) -> Mono.empty());
}
/**
* Deletes an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
deleteAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName).block();
}
/**
* Deletes an existing rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteWithResponse(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
Context context) {
return deleteWithResponseAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName, context)
.block();
}
/**
* Retrieves the description for the specified rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<RuleInner>> getWithResponseAsync(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.get(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Retrieves the description for the specified rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<RuleInner>> getWithResponseAsync(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (namespaceName == null) {
return Mono.error(new IllegalArgumentException("Parameter namespaceName is required and cannot be null."));
}
if (topicName == null) {
return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null."));
}
if (subscriptionName == null) {
return Mono
.error(new IllegalArgumentException("Parameter subscriptionName is required and cannot be null."));
}
if (ruleName == null) {
return Mono.error(new IllegalArgumentException("Parameter ruleName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.get(
this.client.getEndpoint(),
resourceGroupName,
namespaceName,
topicName,
subscriptionName,
ruleName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context);
}
/**
* Retrieves the description for the specified rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<RuleInner> getAsync(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
return getWithResponseAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName)
.flatMap(
(Response<RuleInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Retrieves the description for the specified rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RuleInner get(
String resourceGroupName, String namespaceName, String topicName, String subscriptionName, String ruleName) {
return getAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName).block();
}
/**
* Retrieves the description for the specified rule.
*
* @param resourceGroupName Name of the Resource group within the Azure subscription.
* @param namespaceName The namespace name.
* @param topicName The topic name.
* @param subscriptionName The subscription name.
* @param ruleName The rule name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return description of Rule Resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RuleInner> getWithResponse(
String resourceGroupName,
String namespaceName,
String topicName,
String subscriptionName,
String ruleName,
Context context) {
return getWithResponseAsync(resourceGroupName, namespaceName, topicName, subscriptionName, ruleName, context)
.block();
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RuleInner>> listBySubscriptionsNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context -> service.listBySubscriptionsNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<RuleInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response of the List rule operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<RuleInner>> listBySubscriptionsNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listBySubscriptionsNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.siyeh.ig.logging;
import com.intellij.codeInspection.LocalInspectionTool;
import com.siyeh.ig.LightJavaInspectionTestCase;
@SuppressWarnings("PlaceholderCountMatchesArgumentCount")
public class PlaceholderCountMatchesArgumentCountInspectionTest extends LightJavaInspectionTestCase {
@Override
protected LocalInspectionTool getInspection() {
return new PlaceholderCountMatchesArgumentCountInspection();
}
@Override
protected String[] getEnvironmentClasses() {
return new String[]{
"package org.slf4j; public interface Logger { void info(String format, Object... arguments); }",
"package org.slf4j; public class LoggerFactory { public static Logger getLogger(Class clazz) { return null; }}",
"package org.apache.logging.log4j;" +
"import org.apache.logging.log4j.util.Supplier;" +
"public interface Logger {" +
" void info(String message, Object... params);" +
" void fatal(String message, Object... params);" +
" void error(Supplier<?> var1, Throwable var2);" +
" LogBuilder atInfo();" +
" LogBuilder atFatal();" +
" LogBuilder atError();" +
"}",
"package org.apache.logging.log4j;" +
"public class LogManager {" +
" public static Logger getLogger() {" +
" return null;" +
" }" +
"}",
"package org.apache.logging.log4j.util;" +
"public interface Supplier<T> {" +
" T get();" +
"}",
"package org.apache.logging.log4j;" +
"import org.apache.logging.log4j.util.Supplier;" +
"public interface LogBuilder {" +
" public void log(String format, Object p0);" +
" public void log(String format, Object... params);" +
" public void log(String format, Supplier<?>... params);" +
"}"
};
}
public void testLog4j2() {
doTest("import org.apache.logging.log4j.*;\n" +
"class Logging {\n" +
" private static final Logger LOG = LogManager.getLogger();\n" +
" void m(int i) {\n" +
" LOG.info(/*Fewer arguments provided (1) than placeholders specified (3)*/\"hello? {}{}{}\"/**/, i);\n" +
" LOG.fatal(/*More arguments provided (1) than placeholders specified (0)*/\"you got me \"/**/, i);\n" +
" LOG.error(() -> \"\", new Exception());\n" +
" }\n" +
"}");
}
public void testLog4j2LogBuilder() {
doTest("import org.apache.logging.log4j.*;\n" +
"class Logging {\n" +
" private static final Logger LOG = LogManager.getLogger();\n" +
" void m(int i) {\n" +
" LOG.atInfo().log(/*Fewer arguments provided (1) than placeholders specified (3)*/\"hello? {}{}{}\"/**/, i);\n" +
" LOG.atFatal().log(/*More arguments provided (2) than placeholders specified (0)*/\"you got me \"/**/, i, i);\n" +
" LOG.atError().log(/*More arguments provided (1) than placeholders specified (0)*/\"what does the supplier say? \"/**/, () -> \"\");\n" +
" }\n" +
"}");
}
public void testOneExceptionArgument() {
doTest("import org.slf4j.*;" +
"class X {" +
" void foo() {" +
" RuntimeException e = new RuntimeException();" +
" LoggerFactory.getLogger(X.class).info(/*Fewer arguments provided (0) than placeholders specified (1)*/\"this: {}\"/**/, e);" +
" }" +
"}");
}
public void testExceptionTwoPlaceholders() {
doTest("import org.slf4j.*;" +
"class X {" +
" void foo() {" +
" RuntimeException e = new RuntimeException();" +
" LoggerFactory.getLogger(X.class).info(\"1: {} e: {}\", 1, e);" +
" }" +
"}");
}
public void testExceptionThreePlaceholder() {
doTest("import org.slf4j.*;" +
"class X {" +
" void foo() {" +
" RuntimeException e = new RuntimeException();" +
" LoggerFactory.getLogger(X.class).info(/*Fewer arguments provided (1) than placeholders specified (3)*/\"1: {} {} {}\"/**/, 1, e);" +
" }" +
"}");
}
public void testNoWarn() {
doTest("import org.slf4j.*;\n" +
"class X {\n" +
" void foo() {\n" +
" Logger logger = LoggerFactory.getLogger(X.class);\n" +
" logger.info(\"string {}\", 1);\n" +
" }\n" +
"}"
);
}
public void testMorePlaceholders() {
doTest("import org.slf4j.*;\n" +
"class X {\n" +
" void foo() {\n" +
" Logger logger = LoggerFactory.getLogger(X.class);\n" +
" logger.info(/*Fewer arguments provided (1) than placeholders specified (2)*/\"string {}{}\"/**/, 1);\n" +
" }\n" +
"}"
);
}
public void testFewerPlaceholders() {
doTest("import org.slf4j.*;\n" +
"class X {\n" +
" void foo() {\n" +
" Logger logger = LoggerFactory.getLogger(X.class);\n" +
" logger.info(/*More arguments provided (1) than placeholders specified (0)*/\"string\"/**/, 1);\n" +
" }\n" +
"}"
);
}
public void testThrowable() {
doTest("import org.slf4j.*;\n" +
"class X {\n" +
" void foo() {\n" +
" Logger logger = LoggerFactory.getLogger(X.class);\n" +
" logger.info(\"string {}\", 1, new RuntimeException());\n" +
" }\n" +
"}"
);
}
public void testMultiCatch() {
doTest("import org.slf4j.*;\n" +
"class X {\n" +
" private static final Logger logger = LoggerFactory.getLogger( X.class );\n" +
" public void multiCatch() {\n" +
" try {\n" +
" method();\n" +
" } catch ( FirstException|SecondException e ) {\n" +
" logger.info( \"failed with first or second\", e );\n" +
" }\n" +
" }\n" +
" public void method() throws FirstException, SecondException {}\n" +
" public static class FirstException extends Exception { }\n" +
" public static class SecondException extends Exception { }\n" +
"}");
}
public void testNoSlf4j() {
doTest("class FalsePositiveSLF4J {\n" +
" public void method( DefinitelyNotSLF4J definitelyNotSLF4J ) {\n" +
" definitelyNotSLF4J.info( \"not a trace message\", \"not a trace parameter\" );\n" +
" }\n" +
" public interface DefinitelyNotSLF4J {\n" +
" void info( String firstParameter, Object secondParameter );\n" +
" }\n" +
"}");
}
public void testArrayArgument() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger( X.class );" +
" void m(String a, int b, Object c) {" +
" LOG.info(\"schnizzle {} for blurb {} in quark {}\", new Object[] {a, b, c});" +
" }" +
"}");
}
public void testArrayWithException() {
doTest("import org.slf4j.*;" +
"class X {" +
" void z(int i) {" +
" RuntimeException e = new RuntimeException();" +
" LoggerFactory.getLogger(X.class).info(\"Freak mulching accident {} : {}\", new Object[] {i, e.getMessage(), e});" +
" }" +
"}");
}
public void testUncountableArray() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger( X.class );" +
" void m(Object[] objects) {" +
" LOG.info(\"deep cover {} quantum disstressor {} at light speed {}\", objects);" +
" }" +
"}");
}
public void testConstant() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger(X.class);" +
" private static final String message = \"HELLO {}\";" +
" void m() {" +
" LOG.info(/*Fewer arguments provided (0) than placeholders specified (1)*/message/**/);" +
" }" +
"}");
}
public void testNonConstantString() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger(X.class);" +
" private static final String S = \"{}\";" +
" void m() {" +
" LOG.info(/*Fewer arguments provided (0) than placeholders specified (3)*/S +\"{}\" + (1 + 2) + '{' + '}' +Integer.class/**/);" +
" }" +
"}");
}
public void testEscaping1() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger(X.class);" +
" void m() {" +
" LOG.info(\"Created registry key {}\\\\\\\\{}\", 1, 2);" +
" }" +
"}");
}
public void testEscaping2() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger(X.class);" +
" void m() {" +
" LOG.info(/*More arguments provided (2) than placeholders specified (1)*/\"Created registry key {}\\\\{}\"/**/, 1, 2);" +
" }" +
"}");
}
public void testNullArgument() {
doTest("import org.slf4j.*;" +
"class X {" +
" Logger LOG = LoggerFactory.getLogger(X.class);" +
" void m() {" +
" LOG.info(null, new Exception());" +
" LOG.info(\"\", new Exception());" +
" }" +
"}");
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kns.web.struts.action;
import org.apache.commons.beanutils.ConvertUtils;
import org.apache.commons.beanutils.converters.BigDecimalConverter;
import org.apache.commons.beanutils.converters.BigIntegerConverter;
import org.apache.commons.beanutils.converters.BooleanConverter;
import org.apache.commons.beanutils.converters.ByteConverter;
import org.apache.commons.beanutils.converters.CharacterConverter;
import org.apache.commons.beanutils.converters.DoubleConverter;
import org.apache.commons.beanutils.converters.FloatConverter;
import org.apache.commons.beanutils.converters.IntegerConverter;
import org.apache.commons.beanutils.converters.LongConverter;
import org.apache.commons.beanutils.converters.ShortConverter;
import org.apache.commons.collections.iterators.IteratorEnumeration;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionServlet;
import org.kuali.rice.core.api.config.ConfigurationException;
import org.kuali.rice.core.api.config.property.ConfigContext;
import org.kuali.rice.core.framework.config.module.ModuleConfigurer;
import org.kuali.rice.core.framework.config.module.WebModuleConfiguration;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
/**
* @deprecated KNS Struts deprecated, use KRAD and the Spring MVC framework.
*/
@Deprecated
public class KualiActionServlet extends ActionServlet {
private static final Logger LOG = Logger.getLogger(KualiActionServlet.class);
// KULRICE-8176: KFS Notes/Attachments Tab Functionality for Note Text Error - Visible/Special characters, spaces, or tabs
private String parameterEncoding = "";
/**
* <p>Initialize other global characteristics of the controller servlet.</p>
* Overridden to remove the ConvertUtils.deregister() command that caused problems
* with the concurrent data dictionary load. (KULRNE-4405)
*
* @exception ServletException if we cannot initialize these resources
*/
@Override
protected void initOther() throws ServletException {
String value = null;
value = getServletConfig().getInitParameter("config");
if (value != null) {
config = value;
}
// Backwards compatibility for form beans of Java wrapper classes
// Set to true for strict Struts 1.0 compatibility
value = getServletConfig().getInitParameter("convertNull");
if ("true".equalsIgnoreCase(value)
|| "yes".equalsIgnoreCase(value)
|| "on".equalsIgnoreCase(value)
|| "y".equalsIgnoreCase(value)
|| "1".equalsIgnoreCase(value)) {
convertNull = true;
}
if (convertNull) {
ConvertUtils.register(new BigDecimalConverter(null), BigDecimal.class);
ConvertUtils.register(new BigIntegerConverter(null), BigInteger.class);
ConvertUtils.register(new BooleanConverter(null), Boolean.class);
ConvertUtils.register(new ByteConverter(null), Byte.class);
ConvertUtils.register(new CharacterConverter(null), Character.class);
ConvertUtils.register(new DoubleConverter(null), Double.class);
ConvertUtils.register(new FloatConverter(null), Float.class);
ConvertUtils.register(new IntegerConverter(null), Integer.class);
ConvertUtils.register(new LongConverter(null), Long.class);
ConvertUtils.register(new ShortConverter(null), Short.class);
}
// KULRICE-8176: KFS Notes/Attachments Tab Functionality for Note Text Error - Visible/Special characters, spaces, or tabs
parameterEncoding = getServletConfig().getInitParameter("PARAMETER_ENCODING");
}
KualiActionServletConfig serverConfigOverride = null;
@Override
public ServletConfig getServletConfig() {
if ( serverConfigOverride == null ) {
ServletConfig sConfig = super.getServletConfig();
if ( sConfig == null ) {
return null;
}
serverConfigOverride = new KualiActionServletConfig(sConfig);
}
return serverConfigOverride;
}
/**
* A custom ServletConfig implementation which dynamically includes web content based on the installed modules in the RiceConfigurer object.
* Accomplishes this by implementing custom
* {@link #getInitParameter(String)} and {@link #getInitParameterNames()} methods.
*/
private class KualiActionServletConfig implements ServletConfig {
private ServletConfig wrapped;
private Map<String,String> initParameters = new HashMap<String, String>();
public KualiActionServletConfig(ServletConfig wrapped) {
this.wrapped = wrapped;
// copy out all the init parameters so they can be augmented
@SuppressWarnings("unchecked")
final Enumeration<String> initParameterNames = wrapped.getInitParameterNames();
while ( initParameterNames.hasMoreElements() ) {
String paramName = initParameterNames.nextElement();
initParameters.put( paramName, wrapped.getInitParameter(paramName) );
}
// loop over the installed modules, adding their struts configuration to the servlet
// if they have a web interface
final Collection<ModuleConfigurer> riceModules = ModuleConfigurer.getCurrentContextConfigurers();
if ( LOG.isInfoEnabled() ) {
LOG.info( "Configuring init parameters of the KualiActionServlet from riceModules: " + riceModules );
}
for ( ModuleConfigurer module : riceModules ) {
// only install the web configuration if the module has web content
// and it is running in a "local" mode
// in "embedded" or "remote" modes, the UIs are hosted on a central server
if ( module.shouldRenderWebInterface() ) {
WebModuleConfiguration webModuleConfiguration = module.getWebModuleConfiguration();
if (webModuleConfiguration == null) {
throw new ConfigurationException("Attempting to load WebModuleConfiguration for module '" + module.getModuleName() + "' but no configuration was provided!");
}
if ( LOG.isInfoEnabled() ) {
LOG.info( "Configuring Web Content for Module: " + webModuleConfiguration.getModuleName()
+ " / " + webModuleConfiguration.getWebModuleStrutsConfigName()
+ " / " + webModuleConfiguration.getWebModuleStrutsConfigurationFiles()
+ " / Base URL: " + webModuleConfiguration.getWebModuleBaseUrl() );
}
if ( !initParameters.containsKey( webModuleConfiguration.getWebModuleStrutsConfigName() ) ) {
initParameters.put( webModuleConfiguration.getWebModuleStrutsConfigName(), webModuleConfiguration.getWebModuleStrutsConfigurationFiles() );
}
}
}
}
@Override
public String getInitParameter(String name) {
return initParameters.get(name);
}
@Override
@SuppressWarnings("unchecked")
public Enumeration<String> getInitParameterNames() {
return new IteratorEnumeration( initParameters.keySet().iterator() );
}
@Override
public ServletContext getServletContext() {
return wrapped.getServletContext();
}
@Override
public String getServletName() {
return wrapped.getServletName();
}
}
/**
* KULRICE-8176: KFS Notes/Attachments Tab Functionality for Note Text Error - Visible/Special characters, spaces, or tabs
*
* @see org.apache.struts.action.ActionServlet#process(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
@Override
protected void process(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
if (StringUtils.isNotBlank(parameterEncoding)) {
request.setCharacterEncoding(parameterEncoding);
response.setCharacterEncoding(parameterEncoding);
}
super.process(request, response);
}
}
| |
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.sql.operator;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.query.OQueryRuntimeValueMulti;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemFieldAny;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* TRAVERSE operator.
*
* @author Luca Garulli
*
*/
public class OQueryOperatorTraverse extends OQueryOperatorEqualityNotNulls {
private int startDeepLevel = 0; // FIRST
private int endDeepLevel = -1; // INFINITE
private String[] cfgFields;
public OQueryOperatorTraverse() {
super("TRAVERSE", 5, false, 1, true);
}
public OQueryOperatorTraverse(final int startDeepLevel, final int endDeepLevel, final String[] iFieldList) {
this();
this.startDeepLevel = startDeepLevel;
this.endDeepLevel = endDeepLevel;
this.cfgFields = iFieldList;
}
@Override
public String getSyntax() {
return "<left> TRAVERSE[(<begin-deep-level> [,<maximum-deep-level> [,<fields>]] )] ( <conditions> )";
}
@Override
protected boolean evaluateExpression(final OIdentifiable iRecord, final OSQLFilterCondition iCondition, final Object iLeft,
final Object iRight, final OCommandContext iContext) {
final OSQLFilterCondition condition;
final Object target;
if (iCondition.getLeft() instanceof OSQLFilterCondition) {
condition = (OSQLFilterCondition) iCondition.getLeft();
target = iRight;
} else {
condition = (OSQLFilterCondition) iCondition.getRight();
target = iLeft;
}
final Set<ORID> evaluatedRecords = new HashSet<ORID>();
return traverse(target, condition, 0, evaluatedRecords, iContext);
}
@SuppressWarnings("unchecked")
private boolean traverse(Object iTarget, final OSQLFilterCondition iCondition, final int iLevel,
final Set<ORID> iEvaluatedRecords, final OCommandContext iContext) {
if (endDeepLevel > -1 && iLevel > endDeepLevel)
return false;
if (iTarget instanceof OIdentifiable) {
if (iEvaluatedRecords.contains(((OIdentifiable) iTarget).getIdentity()))
// ALREADY EVALUATED
return false;
// TRANSFORM THE ORID IN ODOCUMENT
iTarget = ((OIdentifiable) iTarget).getRecord();
}
if (iTarget instanceof ODocument) {
final ODocument target = (ODocument) iTarget;
iEvaluatedRecords.add(target.getIdentity());
if (target.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED)
try {
target.load();
} catch (final ORecordNotFoundException e) {
// INVALID RID
return false;
}
if (iLevel >= startDeepLevel && (Boolean) iCondition.evaluate(target, null, iContext) == Boolean.TRUE)
return true;
// TRAVERSE THE DOCUMENT ITSELF
if (cfgFields != null)
for (final String cfgField : cfgFields) {
if (cfgField.equalsIgnoreCase(OSQLFilterItemFieldAny.FULL_NAME)) {
// ANY
for (final String fieldName : target.fieldNames())
if (traverse(target.rawField(fieldName), iCondition, iLevel + 1, iEvaluatedRecords, iContext))
return true;
} else if (cfgField.equalsIgnoreCase(OSQLFilterItemFieldAny.FULL_NAME)) {
// ALL
for (final String fieldName : target.fieldNames())
if (!traverse(target.rawField(fieldName), iCondition, iLevel + 1, iEvaluatedRecords, iContext))
return false;
return true;
} else {
if (traverse(target.rawField(cfgField), iCondition, iLevel + 1, iEvaluatedRecords, iContext))
return true;
}
}
} else if (iTarget instanceof OQueryRuntimeValueMulti) {
final OQueryRuntimeValueMulti multi = (OQueryRuntimeValueMulti) iTarget;
for (final Object o : multi.getValues()) {
if (traverse(o, iCondition, iLevel + 1, iEvaluatedRecords, iContext) == Boolean.TRUE)
return true;
}
} else if (iTarget instanceof Map<?, ?>) {
final Map<Object, Object> map = (Map<Object, Object>) iTarget;
for (final Object o : map.values()) {
if (traverse(o, iCondition, iLevel + 1, iEvaluatedRecords, iContext) == Boolean.TRUE)
return true;
}
} else if (OMultiValue.isMultiValue(iTarget)) {
final Iterable<Object> collection = OMultiValue.getMultiValueIterable(iTarget, false);
for (final Object o : collection) {
if (traverse(o, iCondition, iLevel + 1, iEvaluatedRecords, iContext) == Boolean.TRUE)
return true;
}
} else if (iTarget instanceof Iterator) {
final Iterator iterator = (Iterator) iTarget;
while (iterator.hasNext()) {
if (traverse(iterator.next(), iCondition, iLevel + 1, iEvaluatedRecords, iContext) == Boolean.TRUE)
return true;
}
}
return false;
}
@Override
public OQueryOperator configure(final List<String> iParams) {
if (iParams == null)
return this;
final int start = !iParams.isEmpty() ? Integer.parseInt(iParams.get(0)) : startDeepLevel;
final int end = iParams.size() > 1 ? Integer.parseInt(iParams.get(1)) : endDeepLevel;
String[] fields = new String[] { "any()" };
if (iParams.size() > 2) {
String f = iParams.get(2);
if (f.startsWith("'") || f.startsWith("\""))
f = f.substring(1, f.length() - 1);
fields = f.split(",");
}
return new OQueryOperatorTraverse(start, end, fields);
}
public int getStartDeepLevel() {
return startDeepLevel;
}
public int getEndDeepLevel() {
return endDeepLevel;
}
public String[] getCfgFields() {
return cfgFields;
}
@Override
public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight) {
return OIndexReuseType.NO_INDEX;
}
@Override
public String toString() {
return String.format("%s(%d,%d,%s)", keyword, startDeepLevel, endDeepLevel, Arrays.toString(cfgFields));
}
@Override
public ORID getBeginRidRange(Object iLeft, Object iRight) {
return null;
}
@Override
public ORID getEndRidRange(Object iLeft, Object iRight) {
return null;
}
}
| |
package com.github.flowersinthesand.spheres.portal;
import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.net.URI;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.flowersinthesand.spheres.Action;
import com.github.flowersinthesand.spheres.Actions;
import com.github.flowersinthesand.spheres.ConcurrentActions;
import com.github.flowersinthesand.spheres.SerializableAction;
import com.github.flowersinthesand.spheres.SessionBaseSupport;
import com.github.flowersinthesand.spheres.Socket;
import com.github.flowersinthesand.spheres.VoidAction;
public class DefaultSession extends SessionBaseSupport implements Session {
private static ObjectMapper mapper = new ObjectMapper();
private ConcurrentMap<String, ActionsHolder<?>> holders = new ConcurrentHashMap<>();
private ConcurrentMap<String, Action<Object>> replies = new ConcurrentHashMap<>();
public DefaultSession(Socket socket) {
super(socket);
final Actions<Void> closeActions = new ConcurrentActions<Void>(new Actions.Options().once(true).memory(true));
holders.put("close", new ActionsHolder<>(Void.class, closeActions));
socket.closeAction(new VoidAction() {
@Override
public void on() {
closeActions.fire();
}
});
socket.textAction(new Action<String>() {
@Override
public void on(String text) {
Map<String, Object> m;
try {
m = mapper.readValue(text, new TypeReference<Map<String, Object>>() {});
} catch (IOException e) {
throw new RuntimeException(e);
}
@SuppressWarnings("unchecked")
ActionsHolder<Object> holder = (ActionsHolder<Object>) holders.get((String) m.get("type"));
if (holder != null) {
Object data;
if (Reply.class == holder.dataType.getRawClass()) {
data = new SimpleReply(m, holder.dataType.containedType(0));
} else {
data = mapper.convertValue(m.get("data"), holder.dataType);
}
holder.actions.fire(data);
}
}
});
on("reply", new Action<Map<String, Object>>() {
@Override
public void on(Map<String, Object> data) {
String eventId = (String) data.get("id");
Object response = data.get("data");
Action<Object> reply = replies.remove(eventId);
if (reply != null) {
reply.on(mapper.convertValue(response, mapper.constructType(findRequiredDataType(reply.getClass()))));
}
}
});
}
private class SimpleReply implements Reply<Object> {
private AtomicBoolean sent = new AtomicBoolean();
private Map<String, Object> m;
private Object data;
public SimpleReply(Map<String, Object> m, JavaType type) {
this.m = m;
data = mapper.convertValue(m.get("data"), type);
}
@Override
public Object data() {
return data;
}
@Override
public void done() {
done(null);
}
@Override
public void done(Object value) {
sendReply(value, false);
}
@Override
public void fail() {
fail(null);
}
@Override
public void fail(Object value) {
sendReply(value, true);
}
private void sendReply(Object value, boolean exception) {
if (sent.compareAndSet(false, true)) {
Map<String, Object> result = new LinkedHashMap<String, Object>();
result.put("id", m.get("id"));
result.put("data", value);
result.put("exception", exception);
DefaultSession.this.send("reply", result);
} else {
throw new IllegalStateException("The reply for the event [" + m + "] is already sent");
}
}
}
@Override
public String id() {
return socket.id();
}
@Override
public URI uri() {
return socket.uri();
}
@Override
public Map<String, List<String>> params() {
return socket.params();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Session on(String event, Action<?> action) {
ActionsHolder<?> holder = holders.get(event);
if (holder == null) {
Type type = findRequiredDataType(action.getClass());
ActionsHolder<?> value = new ActionsHolder<>(type, new ConcurrentActions<>(new Actions.Options()));
holder = holders.putIfAbsent(event, value);
if (holder == null) {
holder = value;
}
}
holder.actions.add((Action) action);
return this;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Session off(String event, Action<?> action) {
ActionsHolder<?> holder = holders.get(event);
if (holder != null) {
// To delete the given action and its proxy added by once
holder.actions.remove(new ActionProxy(action));
}
return this;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Session once(final String event, final Action<?> action) {
return on(event, new ActionProxy(action) {
@Override
public void on(Object object) {
off(event, this);
super.on(object);
}
});
}
private Type findRequiredDataType(Class<?> clazz) {
for (Type genericInterface : clazz.getGenericInterfaces()) {
if (genericInterface instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) genericInterface;
Type rawType = parameterizedType.getRawType();
if (rawType == Action.class || rawType == SerializableAction.class) {
return parameterizedType.getActualTypeArguments()[0];
}
}
}
Class<?> superClass = clazz.getSuperclass();
return superClass != null ? findRequiredDataType(superClass) : null;
}
@Override
public Set<String> tags() {
return socket.tags();
}
@Override
public Session tag(String... name) {
socket.tags().addAll(Arrays.asList(name));
return this;
}
@Override
public Session untag(String... name) {
socket.tags().removeAll(Arrays.asList(name));
return this;
}
@Override
public Session send(String event) {
return send(event, null);
}
@Override
public Session send(String event, Object data) {
return send(event, data, null);
}
@SuppressWarnings("unchecked")
@Override
public Session send(String event, Object data, Action<?> reply) {
String eventId = UUID.randomUUID().toString();
Map<String, Object> message = new LinkedHashMap<String, Object>();
message.put("id", eventId);
message.put("type", event);
message.put("data", data);
message.put("reply", reply != null);
String text;
try {
text = mapper.writeValueAsString(message);
} catch (Exception e) {
throw new RuntimeException(e);
}
socket.send(text);
if (reply != null) {
replies.put(eventId, (Action<Object>) reply);
}
return this;
}
@Override
public void close() {
socket.close();
}
private static class ActionsHolder<T> {
JavaType dataType;
Actions<T> actions;
public ActionsHolder(Type dataType, Actions<T> actions) {
this.dataType = mapper.constructType(dataType);
this.actions = actions;
}
}
private static class ActionProxy<T> implements Action<T> {
private Action<T> action;
public ActionProxy(Action<T> action) {
this.action = action;
}
@Override
public void on(T object) {
action.on(object);
}
@Override
public int hashCode() {
return action.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ActionProxy) {
obj = ((ActionProxy<?>) obj).action;
}
return action.equals(obj);
}
@Override
public String toString() {
return "Proxy for " + action.toString();
}
}
}
| |
/*
Copyright (c) 2010 Adrian Colomitchi
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
package com.caffeineowl.graphics.samples;
import java.awt.BasicStroke;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Stroke;
import java.awt.font.LineMetrics;
import java.awt.geom.CubicCurve2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.QuadCurve2D;
import java.awt.geom.CubicCurve2D.Double;
import javax.swing.JFrame;
import com.caffeineowl.graphics.bezier.BezierUtils;
import com.caffeineowl.graphics.bezier.CubicFlatnessAlgorithm;
import com.caffeineowl.graphics.bezier.CubicSegmentConsumer;
import com.caffeineowl.graphics.bezier.CubicSubdivisionCriterion;
import com.caffeineowl.graphics.bezier.QuadFlatnessAlgorithm;
import com.caffeineowl.graphics.bezier.QuadSegmentConsumer;
import com.caffeineowl.graphics.bezier.QuadSubdivisionCriterion;
import com.caffeineowl.graphics.bezier.flatnessalgos.ConvexHullSubdivCriterion;
import com.caffeineowl.graphics.bezier.flatnessalgos.LineDefectFlatnessAlgo;
import com.caffeineowl.graphics.bezier.flatnessalgos.LineDefectSubdivCriterion;
import com.caffeineowl.graphics.bezier.flatnessalgos.SimpleConvexHullFlatness;
import com.caffeineowl.graphics.bezier.flatnessalgos.SimpleConvexHullSubdivCriterion;
public class BezierFlatteningPanel
extends BezierPanel {
static Font numSegsFont=new Font("monospaced", Font.PLAIN, 12);
static class QuadOrCubicSegsFormatter
implements CubicSegmentConsumer, QuadSegmentConsumer {
GeneralPath segsChain;
int numSegs;
public QuadOrCubicSegsFormatter() {
this.segsChain=new GeneralPath();
}
public int getNumSegs() {
return this.numSegs;
}
@Override
public void processSegment(CubicCurve2D segment, double startT, double endT) {
if(0.0==startT) {
this.segsChain.reset();
this.segsChain.moveTo(segment.getX1(), segment.getY1());
this.numSegs=0;
}
this.segsChain.lineTo(segment.getX2(), segment.getY2());
this.numSegs++;
}
@Override
public void processSegment(QuadCurve2D segment, double startT, double endT) {
if(0.0==startT) {
this.segsChain.reset();
this.segsChain.moveTo(segment.getX1(), segment.getY1());
this.numSegs=0;
}
this.segsChain.lineTo(segment.getX2(), segment.getY2());
this.numSegs++;
}
public GeneralPath getSegsChain() {
return this.segsChain;
}
}
static class FlattenerByAdaptiveHalving
implements BezierPanelListener {
BezierFlatteningPanel served;
FlattenerByAdaptiveHalving(BezierFlatteningPanel parent) {
this.served=parent;
}
@Override
public void curveChanged(BezierPanel panel) {
if(this.served==panel) {
if(this.served.isRepresentingCubic()) {
CubicSubdivisionCriterion crit=this.served.createCubicSubdivCriterion();
CubicCurve2D c=this.served.getRepresentedCubic();
BezierUtils.adaptiveHalving(c, crit, this.served.getSegsFormatter());
}
else {
QuadSubdivisionCriterion crit=this.served.createQuadSubdivCriterion();
QuadCurve2D c=this.served.getRepresentedQuad();
BezierUtils.adaptiveHalving(c, crit, this.served.getSegsFormatter());
}
if(this.served.isDisplayable()) {
this.served.repaint(33);
}
}
}
}
public enum FlatnessAlgoType {
SIMPLE_CONVEX_HULL,
ROBUST_CONVEX_HULL,
LINE_DEFECT
};
public enum DistanceType {
EUCLID,
MANHATTAN,
CHEBYSHEV
};
FlatnessAlgoType flatnessAlgoType=FlatnessAlgoType.ROBUST_CONVEX_HULL;
DistanceType distanceType=DistanceType.EUCLID;
double tolerance=50.0;
Color linePaint;
Stroke lineStroke;
QuadOrCubicSegsFormatter segsFormatter;
FlattenerByAdaptiveHalving flattener;
BezierFlatteningPanel(boolean representingCubic) {
super();
this.setRepresentingCubic(representingCubic);
// appearance
this.linePaint=Color.magenta;
this.lineStroke=new BasicStroke(
1.0f,
BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND, 10.0f,
new float[] {5, 5}, 0.0f
);
// logic
this.segsFormatter=new QuadOrCubicSegsFormatter();
this.flattener=new FlattenerByAdaptiveHalving(this);
this.addCurveChangeListener(this.flattener);
this.flattener.curveChanged(this); // ask to recompute
}
protected final CubicSubdivisionCriterion createCubicSubdivCriterion() {
CubicSubdivisionCriterion toRet=null;
switch(this.flatnessAlgoType) {
case LINE_DEFECT:
switch(this.distanceType) {
case CHEBYSHEV:
toRet=new LineDefectSubdivCriterion(LineDefectFlatnessAlgo.CBSV_DIST, this.tolerance);
break;
case MANHATTAN:
toRet=new LineDefectSubdivCriterion(LineDefectFlatnessAlgo.MNHT_DIST, this.tolerance);
break;
default:
toRet=new LineDefectSubdivCriterion(LineDefectFlatnessAlgo.EUCL_DIST, this.tolerance);
break;
}
break;
case SIMPLE_CONVEX_HULL:
toRet=new SimpleConvexHullSubdivCriterion(this.tolerance);
break;
default:
toRet=new ConvexHullSubdivCriterion(this.tolerance);
break;
}
return toRet;
}
protected final QuadSubdivisionCriterion createQuadSubdivCriterion() {
QuadSubdivisionCriterion toRet=null;
switch(this.flatnessAlgoType) {
case LINE_DEFECT:
toRet=new LineDefectSubdivCriterion(this.tolerance);
break;
case SIMPLE_CONVEX_HULL:
toRet=new SimpleConvexHullSubdivCriterion(this.tolerance);
break;
default:
toRet=new ConvexHullSubdivCriterion(this.tolerance);
break;
}
return toRet;
}
public QuadOrCubicSegsFormatter getSegsFormatter() {
return this.segsFormatter;
}
public double getTolerance() {
return this.tolerance;
}
public FlatnessAlgoType getFlatnessAlgoType() {
return this.flatnessAlgoType;
}
public void setFlatnessAlgoType(FlatnessAlgoType flatnessAlgoType) {
this.flatnessAlgoType=flatnessAlgoType;
this.flattener.curveChanged(this); // recompute
}
public DistanceType getDistanceType() {
return this.distanceType;
}
public void setDistanceType(DistanceType distanceType) {
this.distanceType=distanceType;
this.flattener.curveChanged(this); // recompute
}
public void setTolerance(double tolerance) {
this.tolerance=tolerance;
this.flattener.curveChanged(this); // recompute
}
public Color getLinePaint() {
return this.linePaint;
}
public void setLinePaint(Color linePaint) {
this.linePaint=linePaint;
this.repaint(50);
}
public Stroke getLineStroke() {
return this.lineStroke;
}
public void setLineStroke(Stroke lineStroke) {
this.lineStroke=lineStroke;
this.repaint(50);
}
public GeneralPath getSegsChain() {
return this.segsFormatter.getSegsChain();
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g); // paint the curve
Graphics2D g2=(Graphics2D)g;
g2.setPaint(this.getLinePaint());
g2.setStroke(this.getLineStroke());
g2.draw(this.getSegsChain());
// printing the number of resulted segments
StringBuffer numTxt=new StringBuffer("Num segs: ");
numTxt.append(this.segsFormatter.getNumSegs());
g2.setFont(BezierFlatteningPanel.numSegsFont);
LineMetrics lm=
BezierFlatteningPanel.numSegsFont.getLineMetrics(
numTxt.toString(), g2.getFontRenderContext()
)
;
g2.setPaint(this.getDotPaint());
g2.drawString(numTxt.toString(), 10, this.getHeight()-10-lm.getAscent());
}
@Override
public void setBounds(int x, int y, int w, int h) {
super.setBounds(x, y, w, h);
if(null!=this.flattener) {
this.flattener.curveChanged(this); // ask a recomputation
}
}
@Override
public void setRepresentingCubic(boolean b) {
super.setRepresentingCubic(b);
if(null!=this.flattener) {
this.flattener.curveChanged(this); // ask a recomputation
}
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.data.dao.assessment;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sakaiproject.tool.assessment.data.dao.shared.TypeD;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentAccessControlIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentFeedbackIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.EvaluationModelIfc;
import org.sakaiproject.tool.assessment.data.ifc.shared.TypeIfc;
//import org.sakaiproject.tool.assessment.facade.TypeFacadeQueriesAPI;
//import org.sakaiproject.tool.assessment.services.PersistenceService;
public class AssessmentBaseData
implements java.io.Serializable,
org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentBaseIfc
{
static Logger errorLogger = LoggerFactory.getLogger("errorLogger");
private static final long serialVersionUID = 7526471155622776147L;
public static final int TITLE_LENGTH = 255;
private Long assessmentBaseId;
private Boolean isTemplate;
private Long parentId;
private String title;
private String description;
private String comments;
private Long typeId;
private Integer instructorNotification;
private Integer testeeNotification;
private Integer multipartAllowed;
private Integer status;
private String createdBy;
private Date createdDate;
private String lastModifiedBy;
private Date lastModifiedDate;
private AssessmentAccessControlIfc assessmentAccessControl;
private EvaluationModelIfc evaluationModel;
private AssessmentFeedbackIfc assessmentFeedback;
private Set assessmentMetaDataSet;
private HashMap assessmentMetaDataMap = new HashMap();
private HashMap assessmentFeedbackMap = new HashMap();
private Set securedIPAddressSet;
private Integer questionSize;
public AssessmentBaseData() {}
/**
* This is a cheap object created for holding just the Id & title. This is
* by AssessmentFacadeQueries.getTitleXXX() when we only need the Id & title
* and nothing else. This object is not used for persistence.
* @param assessmentBaseId
* @param title
*/
public AssessmentBaseData(Long assessmentBaseId, String title){
this.assessmentBaseId = assessmentBaseId;
this.title = title;
}
/**
* This is another cheap object created for holding just the Id, title &
* lastModifiedDate. This object is merely used for display. It is not used
* for persistence.
*/
public AssessmentBaseData(Long assessmentBaseId, String title,Date lastModifiedDate){
this.assessmentBaseId = assessmentBaseId;
this.title = title;
this.lastModifiedDate = lastModifiedDate;
}
/**
* This is another cheap object created for holding just the Id, title &
* lastModifiedDate. This object is merely used for display. It is not used
* for persistence.
*/
public AssessmentBaseData(Long assessmentBaseId, String title,Date lastModifiedDate, String lastModifiedBy){
this.assessmentBaseId = assessmentBaseId;
this.title = title;
this.lastModifiedDate = lastModifiedDate;
this.lastModifiedBy = lastModifiedBy;
}
public AssessmentBaseData(Long assessmentBaseId, String title,Date lastModifiedDate, String lastModifiedBy, Integer questionSize){
this.assessmentBaseId = assessmentBaseId;
this.title = title;
this.lastModifiedDate = lastModifiedDate;
this.lastModifiedBy = lastModifiedBy;
this.questionSize = questionSize;
}
public AssessmentBaseData(Long assessmentBaseId, String title,Date lastModifiedDate, Long typeId){
this.assessmentBaseId = assessmentBaseId;
this.title = title;
this.lastModifiedDate = lastModifiedDate;
this.typeId = typeId;
}
public AssessmentBaseData(Boolean isTemplate, Long parentId,
String title, String description, String comments,
Long typeId,
Integer instructorNotification, Integer testeeNotification,
Integer multipartAllowed, Integer status, String createdBy,
Date createdDate, String lastModifiedBy,
Date lastModifiedDate) {
this.isTemplate = isTemplate;
this.parentId = parentId;
this.title = title;
this.description = description;
this.comments = comments;
this.typeId = typeId;
this.instructorNotification = instructorNotification;
this.testeeNotification = testeeNotification;
this.multipartAllowed = multipartAllowed;
this.status = status;
this.createdBy = createdBy;
this.createdDate = createdDate;
this.lastModifiedBy = lastModifiedBy;
this.lastModifiedDate = lastModifiedDate;
}
public Long getAssessmentBaseId() {
return this.assessmentBaseId;
}
public void setAssessmentBaseId(Long assessmentBaseId) {
this.assessmentBaseId = assessmentBaseId;
}
public Boolean getIsTemplate() {
return this.isTemplate;
}
public void setIsTemplate(Boolean isTemplate) {
this.isTemplate = isTemplate;
}
public Long getParentId() {
return this.parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public String getTitle() {
return this.title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public String getComments() {
return this.comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public Integer getInstructorNotification() {
return this.instructorNotification;
}
public void setInstructorNotification(Integer instructorNotification) {
this.instructorNotification = instructorNotification;
}
public Integer getTesteeNotification() {
return this.testeeNotification;
}
public void setTesteeNotification(Integer testeeNotification) {
this.testeeNotification = testeeNotification;
}
public Integer getMultipartAllowed() {
return this.multipartAllowed;
}
public void setMultipartAllowed(Integer multipartAllowed) {
this.multipartAllowed = multipartAllowed;
}
public Long getTypeId() {
return this.typeId;
}
public void setTypeId(Long typeId) {
this.typeId = typeId;
}
public Integer getStatus() {
return this.status;
}
public void setStatus(Integer status) {
this.status = status;
}
public String getCreatedBy() {
return this.createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public Date getCreatedDate() {
return this.createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public String getLastModifiedBy() {
return this.lastModifiedBy;
}
public void setLastModifiedBy(String lastModifiedBy) {
this.lastModifiedBy = lastModifiedBy;
}
public Date getLastModifiedDate() {
return this.lastModifiedDate;
}
public void setLastModifiedDate(Date lastModifiedDate) {
this.lastModifiedDate = lastModifiedDate;
}
public AssessmentAccessControlIfc getAssessmentAccessControl() {
return this.assessmentAccessControl;
}
public void setAssessmentAccessControl(AssessmentAccessControlIfc assessmentAccessControl) {
this.assessmentAccessControl = assessmentAccessControl;
}
public EvaluationModelIfc getEvaluationModel() {
return this.evaluationModel;
}
public void setEvaluationModel(EvaluationModelIfc evaluationModel) {
this.evaluationModel = evaluationModel;
}
public AssessmentFeedbackIfc getAssessmentFeedback() {
return this.assessmentFeedback;
}
public void setAssessmentFeedback(AssessmentFeedbackIfc assessmentFeedback) {
this.assessmentFeedback = assessmentFeedback;
}
public Set getSecuredIPAddressSet() {
return securedIPAddressSet;
}
public void setSecuredIPAddressSet(Set securedIPAddressSet) {
this.securedIPAddressSet = securedIPAddressSet;
}
public Set getAssessmentMetaDataSet() {
return assessmentMetaDataSet;
}
public void setAssessmentMetaDataSet(Set assessmentMetaDataSet) {
this.assessmentMetaDataSet = assessmentMetaDataSet;
this.assessmentMetaDataMap = getAssessmentMetaDataMap(assessmentMetaDataSet);
}
public HashMap getAssessmentMetaDataMap(Set assessmentMetaDataSet) {
HashMap assessmentMetaDataMap = new HashMap();
if (assessmentMetaDataSet != null){
for (Iterator i = assessmentMetaDataSet.iterator(); i.hasNext(); ) {
AssessmentMetaData assessmentMetaData = (AssessmentMetaData) i.next();
assessmentMetaDataMap.put(assessmentMetaData.getLabel(), assessmentMetaData.getEntry());
}
}
return assessmentMetaDataMap;
}
public HashMap getAssessmentMetaDataMap() {
HashMap assessmentMetaDataMap = new HashMap();
if (this.assessmentMetaDataSet != null){
for (Iterator i = this.assessmentMetaDataSet.iterator(); i.hasNext(); ) {
AssessmentMetaData assessmentMetaData = (AssessmentMetaData) i.next();
assessmentMetaDataMap.put(assessmentMetaData.getLabel(), assessmentMetaData.getEntry());
}
}
return assessmentMetaDataMap;
}
public String getAssessmentMetaDataByLabel(String label) {
return (String)this.assessmentMetaDataMap.get(label);
}
public void addAssessmentMetaData(String label, String entry) {
if (this.assessmentMetaDataMap.get(label)!=null){
// just update
Iterator iter = this.assessmentMetaDataSet.iterator();
while (iter.hasNext()){
AssessmentMetaData metadata = (AssessmentMetaData) iter.next();
if (metadata.getLabel().equals(label))
metadata.setEntry(entry);
}
}
else{ // add
AssessmentMetaData metadata = null;
if (!("").equals(entry.trim())){
metadata = new AssessmentMetaData(this, label, entry);
this.assessmentMetaDataSet.add(metadata);
}
setAssessmentMetaDataSet(this.assessmentMetaDataSet);
}
}
public void updateAssessmentMetaData(String label, String entry) {
addAssessmentMetaData(label, entry);
}
private void writeObject(java.io.ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(java.io.ObjectInputStream in) throws IOException,
ClassNotFoundException {
in.defaultReadObject();
}
public TypeIfc getType() {
/*
TypeFacadeQueriesAPI typeFacadeQueries = PersistenceService.getInstance().getTypeFacadeQueries();
TypeIfc type = typeFacadeQueries.getTypeFacadeById(this.typeId);
TypeD typeD = new TypeD(type.getAuthority(), type.getDomain(),
type.getKeyword(), type.getDescription());
typeD.setTypeId(this.typeId);
return typeD;
*/
return null;
}
public Integer getQuestionSize() {
return this.questionSize;
}
public void setQuestionSize(Integer questionSize) {
this.questionSize = questionSize;
}
}
| |
/*
*/
package naoth.me.controls.motionneteditor;
import naoth.me.core.Transition;
import java.awt.Color;
import java.awt.Rectangle;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionListener;
import java.awt.geom.Point2D;
/**
*
* @author Heinrich Mellmann
*/
public class TransitionControl extends ArrowControl implements MouseMotionListener
{
private final Color mouseOverColor = new Color(128, 128, 193);
private final Color mouseOutColor = new Color(200, 200, 225);
private final Color selectedColor = new Color(128, 128, 193);
private boolean selected;
public boolean isSelected() {
return selected;
}
private boolean focused;
private KeyFrameControl startKeyFrame;
private KeyFrameControl endKeyFrame;
private Transition transition;
private Point2D from;
private Point2D to;
private MotionNetEditorPanel canvas;
public TransitionControl(MotionNetEditorPanel canvas, Transition transition, KeyFrameControl startKeyFrame, KeyFrameControl endKeyFrame)
{
this.canvas = canvas;
this.setBounds(canvas.getBounds());
this.startKeyFrame = startKeyFrame;
this.endKeyFrame = endKeyFrame;
this.transition = transition;
if(this.startKeyFrame != null)
this.startKeyFrame.addMouseMotionListener(this);
if(this.endKeyFrame != null)
this.endKeyFrame.addMouseMotionListener(this);
addMouseMotionListener(this);
this.setOpaque(false);
this.setRequestFocusEnabled(true);
setColor(mouseOutColor);
this.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if(isContain(e.getPoint()))
setSelected(true);
}
@Override
public void mousePressed(MouseEvent e) {
//if(e.isPopupTrigger())
{
if(isContain(e.getPoint()))
initComponents();
else
{
setInheritsPopupMenu(true);
setComponentPopupMenu(null);
}
}
}
});
update();
}
private void moveToBack()
{
canvas.moveToBack(this);
}
public Transition getTransion()
{
return this.transition;
}
private void initComponents() {
javax.swing.JPopupMenu jPopupMenu = new javax.swing.JPopupMenu();
javax.swing.JMenuItem jMenuItemDeleteTransition = new javax.swing.JMenuItem();
jMenuItemDeleteTransition.setText("Remove Transition");
jPopupMenu.add(jMenuItemDeleteTransition);
jMenuItemDeleteTransition.addMouseListener(new java.awt.event.MouseAdapter() {
@Override
public void mouseReleased(java.awt.event.MouseEvent evt) {
removeThisControl();
}
});
setComponentPopupMenu(jPopupMenu);
}
private void removeThisControl()
{
canvas.removeTransitionControl(this);
}
public void mouseMoved(MouseEvent e)
{
setFocused(isContain(e.getPoint()));
if(selected) setColor(selectedColor);
}
public void mouseDragged(MouseEvent m)
{
update();
}
public boolean isFocused() {
return focused;
}
public void setFocused(boolean focused) {
if(focused)
{
this.setColor(mouseOverColor);
this.canvas.moveToFront(this);
}else
{
this.setColor(mouseOutColor);
this.canvas.moveToBack(this);
}
if(focused == this.focused) return;
this.focused = focused;
repaint();
}//end setFocused
public void setSelected(boolean selected) {
if(selected == this.selected) return;
this.selected = selected;
if(this.selected)
{
setColor(selectedColor);
setDrawingStrokeWidth(2f);
canvas.transitionControlSelected(this);
}
else
{
setColor(mouseOutColor);
setDrawingStrokeWidth(1f);
}
this.repaint();
}//end setSelected
private void update()
{
if(endKeyFrame == null || startKeyFrame == null)
{
System.out.println("ups");
return;
}
double radius = (double)(startKeyFrame.getWidth()) / 2.0;
double c0x = startKeyFrame.getX() + radius;
double c0y = startKeyFrame.getY() + radius;
double c1x = endKeyFrame.getX() + radius;
double c1y = endKeyFrame.getY() + radius;
double length = Math.sqrt((c1x - c0x)*(c1x - c0x) + (c1y - c0y)*(c1y - c0y));
double delta = radius/length;
this.from = new Point2D.Double(c0x+delta*(c1x-c0x), c0y+delta*(c1y-c0y));
this.to = new Point2D.Double(c1x, c1y);
this.setP1(new Point2D.Double(from.getX() - this.getX(), from.getY() - this.getY()));
this.setP2(new Point2D.Double(to.getX() - this.getX(), to.getY() - this.getY()));
repaint();
}//end update
}//end class NewTransitionControl
| |
/*
* Copyright (C) 2013, Daniel Abraham
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iojjj.androidbootstrap.utils.security;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.util.Base64;
import android.util.Log;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.spec.SecretKeySpec;
/**
* Wrapper class for Android's {@link android.content.SharedPreferences} interface, which adds a
* layer of encryption to the persistent storage and retrieval of sensitive
* key-value pairs of primitive data types.
* <p/>
* This class provides important - but nevertheless imperfect - protection
* against simple attacks by casual snoopers. It is crucial to remember that
* even encrypted data may still be susceptible to attacks, especially on rooted
* or stolen devices!
* <p/>
* This class requires API level 8 (Android 2.2, a.k.a. "Froyo") or greater.
*
* @see <a
* href="http://www.codeproject.com/Articles/549119/Encryption-Wrapper-for-Android-SharedPreferences">CodeProject
* article</a>
*/
public class SecurePreferences implements SharedPreferences {
private SharedPreferences sFile;
private byte[] key;
private byte[] sKey;
private String filename = "default_prefs.xml";
/**
* Constructor.
*
* @param context the caller's context
*/
public SecurePreferences(Context context) {
init(context);
}
/**
* Constructor.
*
* @param context the caller's context
*/
public SecurePreferences(Context context, String prefsName) {
filename = prefsName;
init(context);
}
public SecurePreferences(Context context, String filename, byte[] key) {
this.filename = filename;
this.key = key;
init(context);
}
private void init(Context context) {
// Proxy design pattern
if (sFile == null) {
sFile = context.getSharedPreferences(filename, Context.MODE_PRIVATE);
}
// Initialize encryption/decryption key
try {
final String key = generateAesKeyName(this.key);
String value = sFile.getString(key, null);
if (value == null) {
value = SecurePreferences.generateAesKeyValue();
sFile.edit().putString(key, value).apply();
}
sKey = SecurePreferences.decode(value);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private static String encode(byte[] input) {
return Base64.encodeToString(input, Base64.NO_PADDING | Base64.NO_WRAP);
}
private static byte[] decode(String input) {
return Base64.decode(input, Base64.NO_PADDING | Base64.NO_WRAP);
}
private static String generateAesKeyName(byte[] key)
throws InvalidKeySpecException, NoSuchAlgorithmException {
return SecurePreferences.encode(key);
}
private static String generateAesKeyValue() throws NoSuchAlgorithmException {
// Do *not* seed secureRandom! Automatically seeded from system entropy
final SecureRandom random = new SecureRandom();
// Use the largest AES key length which is supported by the OS
KeyGenerator gen = null;
try {
gen = KeyGenerator.getInstance("AES/CBC/PKCS5Padding", "SC");
} catch (Exception e) {
try {
gen = KeyGenerator.getInstance("AES", "SC");
} catch (NoSuchProviderException e1) {
e1.printStackTrace();
}
}
if (gen == null) throw new IllegalArgumentException();
final KeyGenerator generator = gen;
try {
generator.init(256, random);
} catch (Exception e) {
try {
generator.init(192, random);
} catch (Exception e1) {
generator.init(128, random);
}
}
return SecurePreferences.encode(generator.generateKey().getEncoded());
}
private String encrypt(String cleartext) {
if (cleartext == null || cleartext.length() == 0) {
return cleartext;
}
try {
final Cipher cipher = Cipher.getInstance("AES", "SC");
cipher.init(Cipher.ENCRYPT_MODE, new SecretKeySpec(
sKey, "AES"));
return SecurePreferences.encode(cipher.doFinal(cleartext
.getBytes("UTF-8")));
} catch (Exception e) {
Log.w(SecurePreferences.class.getName(), "encrypt", e);
return null;
}
}
private String decrypt(String ciphertext) {
if (ciphertext == null || ciphertext.length() == 0) {
return ciphertext;
}
try {
final Cipher cipher = Cipher.getInstance("AES", "SC");
cipher.init(Cipher.DECRYPT_MODE, new SecretKeySpec(
sKey, "AES"));
return new String(cipher.doFinal(SecurePreferences
.decode(ciphertext)), "UTF-8");
} catch (Exception e) {
Log.w(SecurePreferences.class.getName(), "decrypt", e);
return null;
}
}
@Override
public Map<String, String> getAll() {
final Map<String, ?> encryptedMap = sFile.getAll();
final Map<String, String> decryptedMap = new HashMap<String, String>(
encryptedMap.size());
for (Entry<String, ?> entry : encryptedMap.entrySet()) {
try {
decryptedMap.put(decrypt(entry.getKey()),
decrypt(entry.getValue().toString()));
} catch (Exception e) {
// Ignore unencrypted key/value pairs
}
}
return decryptedMap;
}
@Override
public String getString(String key, String defaultValue) {
final String encryptedValue = sFile.getString(
encrypt(key), null);
return (encryptedValue != null) ? decrypt(encryptedValue) : defaultValue;
}
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public Set<String> getStringSet(String key, Set<String> defaultValues) {
final Set<String> encryptedSet = sFile.getStringSet(
encrypt(key), null);
if (encryptedSet == null) {
return defaultValues;
}
final Set<String> decryptedSet = new HashSet<String>(
encryptedSet.size());
for (String encryptedValue : encryptedSet) {
decryptedSet.add(decrypt(encryptedValue));
}
return decryptedSet;
}
@Override
public int getInt(String key, int defaultValue) {
final String encryptedValue = sFile.getString(
encrypt(key), null);
if (encryptedValue == null) {
return defaultValue;
}
try {
return Integer.parseInt(decrypt(encryptedValue));
} catch (NumberFormatException e) {
throw new ClassCastException(e.getMessage());
}
}
@Override
public long getLong(String key, long defaultValue) {
final String encryptedValue = sFile.getString(
encrypt(key), null);
if (encryptedValue == null) {
return defaultValue;
}
try {
return Long.parseLong(decrypt(encryptedValue));
} catch (NumberFormatException e) {
throw new ClassCastException(e.getMessage());
}
}
@Override
public float getFloat(String key, float defaultValue) {
final String encryptedValue = sFile.getString(
encrypt(key), null);
if (encryptedValue == null) {
return defaultValue;
}
try {
return Float.parseFloat(decrypt(encryptedValue));
} catch (NumberFormatException e) {
throw new ClassCastException(e.getMessage());
}
}
@Override
public boolean getBoolean(String key, boolean defaultValue) {
final String encryptedValue = sFile.getString(
encrypt(key), null);
if (encryptedValue == null) {
return defaultValue;
}
try {
return Boolean.parseBoolean(decrypt(encryptedValue));
} catch (NumberFormatException e) {
throw new ClassCastException(e.getMessage());
}
}
@Override
public boolean contains(String key) {
return sFile.contains(encrypt(key));
}
@Override
public Editor edit() {
return new Editor();
}
/**
* Wrapper for Android's {@link android.content.SharedPreferences.Editor}.
* <p/>
* Used for modifying values in a {@link iojjj.androidbootstrap.utils.security.SecurePreferences} object. All
* changes you make in an editor are batched, and not copied back to the
* original {@link iojjj.androidbootstrap.utils.security.SecurePreferences} until you call {@link #commit()} or
* {@link #apply()}.
*/
public class Editor implements SharedPreferences.Editor {
private SharedPreferences.Editor mEditor;
/**
* Constructor.
*/
private Editor() {
mEditor = sFile.edit();
}
@Override
public SharedPreferences.Editor putString(String key, String value) {
mEditor.putString(encrypt(key),
encrypt(value));
return this;
}
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public SharedPreferences.Editor putStringSet(String key,
Set<String> values) {
final Set<String> encryptedValues = new HashSet<String>(
values.size());
for (String value : values) {
encryptedValues.add(encrypt(value));
}
mEditor.putStringSet(encrypt(key),
encryptedValues);
return this;
}
@Override
public SharedPreferences.Editor putInt(String key, int value) {
mEditor.putString(encrypt(key),
encrypt(Integer.toString(value)));
return this;
}
@Override
public SharedPreferences.Editor putLong(String key, long value) {
mEditor.putString(encrypt(key),
encrypt(Long.toString(value)));
return this;
}
@Override
public SharedPreferences.Editor putFloat(String key, float value) {
mEditor.putString(encrypt(key),
encrypt(Float.toString(value)));
return this;
}
@Override
public SharedPreferences.Editor putBoolean(String key, boolean value) {
mEditor.putString(encrypt(key),
encrypt(Boolean.toString(value)));
return this;
}
@Override
public SharedPreferences.Editor remove(String key) {
mEditor.remove(encrypt(key));
return this;
}
@Override
public SharedPreferences.Editor clear() {
mEditor.clear();
return this;
}
@Override
public boolean commit() {
return mEditor.commit();
}
@Override
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
public void apply() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
mEditor.apply();
} else {
commit();
}
}
}
@Override
public void registerOnSharedPreferenceChangeListener(
OnSharedPreferenceChangeListener listener) {
sFile
.registerOnSharedPreferenceChangeListener(listener);
}
@Override
public void unregisterOnSharedPreferenceChangeListener(
OnSharedPreferenceChangeListener listener) {
sFile
.unregisterOnSharedPreferenceChangeListener(listener);
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.compare;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.resources.IResource;
import org.eclipse.jface.text.Document;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IDocumentPartitioner;
import org.eclipse.ui.services.IDisposable;
import org.eclipse.compare.CompareUI;
import org.eclipse.compare.IEditableContent;
import org.eclipse.compare.IEditableContentExtension;
import org.eclipse.compare.IResourceProvider;
import org.eclipse.compare.ISharedDocumentAdapter;
import org.eclipse.compare.IStreamContentAccessor;
import org.eclipse.compare.structuremergeviewer.DiffNode;
import org.eclipse.compare.structuremergeviewer.Differencer;
import org.eclipse.compare.structuremergeviewer.DocumentRangeNode;
import org.eclipse.compare.structuremergeviewer.ICompareInput;
import org.eclipse.compare.structuremergeviewer.IDiffContainer;
import org.eclipse.compare.structuremergeviewer.IDiffElement;
import org.eclipse.compare.structuremergeviewer.IStructureComparator;
import org.eclipse.compare.structuremergeviewer.StructureCreator;
import org.eclipse.compare.structuremergeviewer.StructureRootNode;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.ToolFactory;
import org.eclipse.jdt.core.compiler.IScanner;
import org.eclipse.jdt.core.compiler.ITerminalSymbols;
import org.eclipse.jdt.core.compiler.InvalidInputException;
import org.eclipse.jdt.core.dom.ASTParser;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.ui.text.IJavaPartitions;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.javaeditor.ASTProvider;
public class JavaStructureCreator extends StructureCreator {
private Map<String, String> fDefaultCompilerOptions;
/**
* A root node for the structure. It is similar to {@link StructureRootNode} but needed
* to be a subclass of {@link JavaNode} because of the code used to build the structure.
*/
private final class RootJavaNode extends JavaNode implements IDisposable {
private final Object fInput;
private final boolean fEditable;
private final ISharedDocumentAdapter fAdapter;
private RootJavaNode(IDocument document, boolean editable, Object input, ISharedDocumentAdapter adapter) {
super(document);
this.fEditable = editable;
fInput= input;
fAdapter= adapter;
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.DocumentRangeNode#isEditable()
*/
@Override
public boolean isEditable() {
return fEditable;
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.DocumentRangeNode#nodeChanged(org.eclipse.compare.structuremergeviewer.DocumentRangeNode)
*/
@Override
protected void nodeChanged(DocumentRangeNode node) {
save(this, fInput);
}
/* (non-Javadoc)
* @see org.eclipse.ui.services.IDisposable#dispose()
*/
public void dispose() {
if (fAdapter != null) {
fAdapter.disconnect(fInput);
}
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.DocumentRangeNode#getAdapter(java.lang.Class)
*/
@Override
public Object getAdapter(Class adapter) {
if (adapter == ISharedDocumentAdapter.class) {
return fAdapter;
}
return super.getAdapter(adapter);
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.DocumentRangeNode#isReadOnly()
*/
@Override
public boolean isReadOnly() {
if (fInput instanceof IEditableContentExtension) {
IEditableContentExtension ext = (IEditableContentExtension) fInput;
return ext.isReadOnly();
}
return super.isReadOnly();
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.DocumentRangeNode#validateEdit(org.eclipse.swt.widgets.Shell)
*/
@Override
public IStatus validateEdit(Shell shell) {
if (fInput instanceof IEditableContentExtension) {
IEditableContentExtension ext = (IEditableContentExtension) fInput;
return ext.validateEdit(shell);
}
return super.validateEdit(shell);
}
}
/**
* RewriteInfos are used temporarily when rewriting the diff tree
* in order to combine similar diff nodes ("smart folding").
*/
static class RewriteInfo {
boolean fIsOut= false;
JavaNode fAncestor= null;
JavaNode fLeft= null;
JavaNode fRight= null;
ArrayList<IDiffElement> fChildren= new ArrayList<IDiffElement>();
void add(IDiffElement diff) {
fChildren.add(diff);
}
void setDiff(ICompareInput diff) {
if (fIsOut)
return;
fIsOut= true;
JavaNode a= (JavaNode) diff.getAncestor();
JavaNode y= (JavaNode) diff.getLeft();
JavaNode m= (JavaNode) diff.getRight();
if (a != null) {
if (fAncestor != null)
return;
fAncestor= a;
}
if (y != null) {
if (fLeft != null)
return;
fLeft= y;
}
if (m != null) {
if (fRight != null)
return;
fRight= m;
}
fIsOut= false;
}
/**
* @return true if some nodes could be successfully combined into one
*/
boolean matches() {
return !fIsOut && fAncestor != null && fLeft != null && fRight != null;
}
}
public JavaStructureCreator() {
}
void setDefaultCompilerOptions(Map<String, String> compilerSettings) {
fDefaultCompilerOptions= compilerSettings;
}
/**
* @return the name that appears in the enclosing pane title bar
*/
public String getName() {
return CompareMessages.JavaStructureViewer_title;
}
/**
* @param input implement the IStreamContentAccessor interface
* @return a tree of JavaNodes for the given input.
* In case of error null is returned.
*/
@Override
public IStructureComparator getStructure(final Object input) {
String contents= null;
char[] buffer= null;
IDocument doc= CompareUI.getDocument(input);
if (doc == null) {
if (input instanceof IStreamContentAccessor) {
IStreamContentAccessor sca= (IStreamContentAccessor) input;
try {
contents= JavaCompareUtilities.readString(sca);
} catch (CoreException ex) {
// return null indicates the error.
return null;
}
}
if (contents != null) {
int n= contents.length();
buffer= new char[n];
contents.getChars(0, n, buffer, 0);
doc= new Document(contents);
setupDocument(doc);
}
}
return createStructureComparator(input, buffer, doc, null, null);
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.StructureCreator#createStructureComparator(java.lang.Object, org.eclipse.jface.text.IDocument, org.eclipse.compare.ISharedDocumentAdapter, org.eclipse.core.runtime.IProgressMonitor)
*/
@Override
protected IStructureComparator createStructureComparator(Object element,
IDocument document, ISharedDocumentAdapter sharedDocumentAdapter,
IProgressMonitor monitor) throws CoreException {
return createStructureComparator(element, null, document, sharedDocumentAdapter, monitor);
}
private IStructureComparator createStructureComparator(final Object input, char[] buffer, IDocument doc, ISharedDocumentAdapter adapter, IProgressMonitor monitor) {
String contents;
Map<String, String> compilerOptions= null;
if (input instanceof IResourceProvider) {
IResource resource= ((IResourceProvider) input).getResource();
if (resource != null) {
IJavaElement element= JavaCore.create(resource);
if (element != null) {
IJavaProject javaProject= element.getJavaProject();
if (javaProject != null)
compilerOptions= javaProject.getOptions(true);
}
}
}
if (compilerOptions == null)
compilerOptions= fDefaultCompilerOptions;
if (doc != null) {
boolean isEditable= false;
if (input instanceof IEditableContent)
isEditable= ((IEditableContent) input).isEditable();
// we hook into the root node to intercept all node changes
JavaNode root= new RootJavaNode(doc, isEditable, input, adapter);
if (buffer == null) {
contents= doc.get();
int n= contents.length();
buffer= new char[n];
contents.getChars(0, n, buffer, 0);
}
ASTParser parser= ASTParser.newParser(ASTProvider.SHARED_AST_LEVEL);
if (compilerOptions != null)
parser.setCompilerOptions(compilerOptions);
parser.setSource(buffer);
parser.setFocalPosition(0);
CompilationUnit cu= (CompilationUnit) parser.createAST(monitor);
cu.accept(new JavaParseTreeBuilder(root, buffer, true));
return root;
}
return null;
}
/**
* Returns the contents of the given node as a string.
* This string is used to test the content of a Java element
* for equality. Is is never shown in the UI, so any string representing
* the content will do.
* @param node must implement the IStreamContentAccessor interface
* @param ignoreWhiteSpace if <code>true</code> all Java white space (including comments) is removed from the contents.
* @return contents for equality test
*/
public String getContents(Object node, boolean ignoreWhiteSpace) {
if (! (node instanceof IStreamContentAccessor))
return null;
IStreamContentAccessor sca= (IStreamContentAccessor) node;
String content= null;
try {
content= JavaCompareUtilities.readString(sca);
} catch (CoreException ex) {
JavaPlugin.log(ex);
return null;
}
if (!ignoreWhiteSpace)
return content;
// replace whitespace by a single blank
StringBuffer buf= new StringBuffer();
char[] b= content.toCharArray();
// to avoid the trouble when dealing with Unicode
// we use the Java scanner to extract non-whitespace and comment tokens
IScanner scanner= ToolFactory.createScanner(true, false, false, false); // however we request Whitespace and Comments
scanner.setSource(b);
try {
int token;
while ((token= scanner.getNextToken()) != ITerminalSymbols.TokenNameEOF) {
char[] currentTokenSource= scanner.getCurrentTokenSource();
switch (token) {
case ITerminalSymbols.TokenNameCOMMENT_LINE:
case ITerminalSymbols.TokenNameCOMMENT_BLOCK:
case ITerminalSymbols.TokenNameCOMMENT_JAVADOC:
StringBuffer tokenStr= new StringBuffer(currentTokenSource.length);
tokenStr.append(currentTokenSource);
StringTokenizer tokenizer= new StringTokenizer(tokenStr.toString());
while (tokenizer.hasMoreTokens()) {
buf.append(tokenizer.nextToken());
buf.append(' ');
}
break;
default:
buf.append(currentTokenSource);
buf.append(' ');
break;
}
}
content= buf.toString(); // success!
} catch (InvalidInputException ex) {
// NeedWork
}
return content;
}
/**
* @return true since this IStructureCreator can rewrite the diff tree
* in order to fold certain combinations of additions and deletions.
*/
public boolean canRewriteTree() {
return true;
}
/**
* Tries to detect certain combinations of additions and deletions as renames or signature
* changes and folders them into a single node.
*
* @param differencer the differencer
* @param root the diff container root
*/
public void rewriteTree(Differencer differencer, IDiffContainer root) {
HashMap<String, RewriteInfo> map= new HashMap<String, RewriteInfo>(10);
Object[] children= root.getChildren();
for (int i= 0; i < children.length; i++) {
DiffNode diff= (DiffNode) children[i];
JavaNode jn= (JavaNode) diff.getId();
if (jn == null)
continue;
int type= jn.getTypeCode();
// we can only combine methods or constructors
if (type == JavaNode.METHOD || type == JavaNode.CONSTRUCTOR) {
// find or create a RewriteInfo for all methods with the same name
String name= jn.extractMethodName();
RewriteInfo nameInfo= map.get(name);
if (nameInfo == null) {
nameInfo= new RewriteInfo();
map.put(name, nameInfo);
}
nameInfo.add(diff);
// find or create a RewriteInfo for all methods with the same
// (non-empty) argument list
String argList= jn.extractArgumentList();
RewriteInfo argInfo= null;
if (argList != null && !argList.equals("()")) { //$NON-NLS-1$
argInfo= map.get(argList);
if (argInfo == null) {
argInfo= new RewriteInfo();
map.put(argList, argInfo);
}
argInfo.add(diff);
}
switch (diff.getKind() & Differencer.CHANGE_TYPE_MASK) {
case Differencer.ADDITION:
case Differencer.DELETION:
// we only consider addition and deletions
// since a rename or argument list change looks
// like a pair of addition and deletions
if (type != JavaNode.CONSTRUCTOR)
nameInfo.setDiff(diff);
if (argInfo != null)
argInfo.setDiff(diff);
break;
default:
break;
}
}
// recurse
rewriteTree(differencer, diff);
}
// now we have to rebuild the diff tree according to the combined
// changes
Iterator<String> it= map.keySet().iterator();
while (it.hasNext()) {
String name= it.next();
RewriteInfo i= map.get(name);
if (i.matches()) { // we found a RewriteInfo that could be successfully combined
// we have to find the differences of the newly combined node
// (because in the first pass we only got a deletion and an addition)
DiffNode d= (DiffNode) differencer.findDifferences(true, null, root, i.fAncestor, i.fLeft, i.fRight);
if (d != null) {// there better should be a difference
d.setDontExpand(true);
Iterator<IDiffElement> it2= i.fChildren.iterator();
while (it2.hasNext()) {
IDiffElement rd= it2.next();
root.removeToRoot(rd);
d.add(rd);
}
}
}
}
}
/**
* The JavaHistoryAction uses this function to determine whether
* a selected Java element can be replaced by some piece of
* code from the local history.
* @param je Java element
* @return true if the given IJavaElement maps to a JavaNode
*/
static boolean hasEdition(IJavaElement je) {
return JavaElementHistoryPageSource.hasEdition(je);
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.StructureCreator#getDocumentPartitioner()
*/
@Override
protected IDocumentPartitioner getDocumentPartitioner() {
return JavaCompareUtilities.createJavaPartitioner();
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.StructureCreator#getDocumentPartitioning()
*/
@Override
protected String getDocumentPartitioning() {
return IJavaPartitions.JAVA_PARTITIONING;
}
/* (non-Javadoc)
* @see org.eclipse.compare.structuremergeviewer.StructureCreator#getPath(java.lang.Object, java.lang.Object)
*/
@Override
protected String[] getPath(Object element, Object input) {
if (element instanceof IJavaElement) {
IJavaElement je = (IJavaElement) element;
// build a path starting at the given Java element and walk
// up the parent chain until we reach a IWorkingCopy or ICompilationUnit
List<String> args= new ArrayList<String>();
while (je != null) {
// each path component has a name that uses the same
// conventions as a JavaNode name
String name= JavaCompareUtilities.getJavaElementID(je);
if (name == null)
return null;
args.add(name);
if (je instanceof ICompilationUnit)
break;
je= je.getParent();
}
// revert the path
int n= args.size();
String[] path= new String[n];
for (int i= 0; i < n; i++)
path[i]= args.get(n-1-i);
return path;
}
return null;
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wseemann.media.romote.util;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Environment;
import android.os.StatFs;
import android.util.Log;
import androidx.collection.LruCache;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import wseemann.media.romote.BuildConfig;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.ref.SoftReference;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
/**
* This class handles disk and memory caching of bitmaps in conjunction with the
* {@link ImageWorker} class and its subclasses. Use
* {@link ImageCache#getInstance(android.support.v4.app.FragmentManager, ImageCacheParams)} to get an instance of this
* class, although usually a cache should be added directly to an {@link ImageWorker} by calling
* {@link ImageWorker#addImageCache(android.support.v4.app.FragmentManager, ImageCacheParams)}.
*/
public class ImageCache {
private static final String TAG = "ImageCache";
// Default memory cache size in kilobytes
private static final int DEFAULT_MEM_CACHE_SIZE = 1024 * 5; // 5MB
// Default disk cache size in bytes
private static final int DEFAULT_DISK_CACHE_SIZE = 1024 * 1024 * 10; // 10MB
// Compression settings when writing images to disk cache
private static final CompressFormat DEFAULT_COMPRESS_FORMAT = CompressFormat.JPEG;
private static final int DEFAULT_COMPRESS_QUALITY = 70;
private static final int DISK_CACHE_INDEX = 0;
// Constants to easily toggle various caches
private static final boolean DEFAULT_MEM_CACHE_ENABLED = true;
private static final boolean DEFAULT_DISK_CACHE_ENABLED = true;
private static final boolean DEFAULT_INIT_DISK_CACHE_ON_CREATE = false;
private DiskLruCache mDiskLruCache;
private LruCache<String, BitmapDrawable> mMemoryCache;
private ImageCacheParams mCacheParams;
private final Object mDiskCacheLock = new Object();
private boolean mDiskCacheStarting = true;
private Set<SoftReference<Bitmap>> mReusableBitmaps;
/**
* Create a new ImageCache object using the specified parameters. This should not be
* called directly by other classes, instead use
* {@link ImageCache#getInstance(android.support.v4.app.FragmentManager, ImageCacheParams)} to fetch an ImageCache
* instance.
*
* @param cacheParams The cache parameters to use to initialize the cache
*/
private ImageCache(ImageCacheParams cacheParams) {
init(cacheParams);
}
/**
* Return an {@link ImageCache} instance. A {@link RetainFragment} is used to retain the
* ImageCache object across configuration changes such as a change in device orientation.
*
* @param fragmentManager The fragment manager to use when dealing with the retained fragment.
* @param cacheParams The cache parameters to use if the ImageCache needs instantiation.
* @return An existing retained ImageCache object or a new one if one did not exist
*/
public static ImageCache getInstance(
FragmentManager fragmentManager, ImageCacheParams cacheParams) {
// Search for, or create an instance of the non-UI RetainFragment
final RetainFragment mRetainFragment = findOrCreateRetainFragment(fragmentManager);
// See if we already have an ImageCache stored in RetainFragment
ImageCache imageCache = (ImageCache) mRetainFragment.getObject();
// No existing ImageCache, create one and store it in RetainFragment
if (imageCache == null) {
imageCache = new ImageCache(cacheParams);
mRetainFragment.setObject(imageCache);
}
return imageCache;
}
/**
* Initialize the cache, providing all parameters.
*
* @param cacheParams The cache parameters to initialize the cache
*/
private void init(ImageCacheParams cacheParams) {
mCacheParams = cacheParams;
//BEGIN_INCLUDE(init_memory_cache)
// Set up memory cache
if (mCacheParams.memoryCacheEnabled) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Memory cache created (size = " + mCacheParams.memCacheSize + ")");
}
// If we're running on Honeycomb or newer, create a set of reusable bitmaps that can be
// populated into the inBitmap field of BitmapFactory.Options. Note that the set is
// of SoftReferences which will actually not be very effective due to the garbage
// collector being aggressive clearing Soft/WeakReferences. A better approach
// would be to use a strongly references bitmaps, however this would require some
// balancing of memory usage between this set and the bitmap LruCache. It would also
// require knowledge of the expected size of the bitmaps. From Honeycomb to JellyBean
// the size would need to be precise, from KitKat onward the size would just need to
// be the upper bound (due to changes in how inBitmap can re-use bitmaps).
if (Utils.hasHoneycomb()) {
mReusableBitmaps =
Collections.synchronizedSet(new HashSet<SoftReference<Bitmap>>());
}
mMemoryCache = new LruCache<String, BitmapDrawable>(mCacheParams.memCacheSize) {
/**
* Notify the removed entry that is no longer being cached
*/
@Override
protected void entryRemoved(boolean evicted, String key,
BitmapDrawable oldValue, BitmapDrawable newValue) {
if (RecyclingBitmapDrawable.class.isInstance(oldValue)) {
// The removed entry is a recycling drawable, so notify it
// that it has been removed from the memory cache
((RecyclingBitmapDrawable) oldValue).setIsCached(false);
} else {
// The removed entry is a standard BitmapDrawable
if (Utils.hasHoneycomb()) {
// We're running on Honeycomb or later, so add the bitmap
// to a SoftReference set for possible use with inBitmap later
mReusableBitmaps.add(new SoftReference<Bitmap>(oldValue.getBitmap()));
}
}
}
/**
* Measure item size in kilobytes rather than units which is more practical
* for a bitmap cache
*/
@Override
protected int sizeOf(String key, BitmapDrawable value) {
final int bitmapSize = getBitmapSize(value) / 1024;
return bitmapSize == 0 ? 1 : bitmapSize;
}
};
}
//END_INCLUDE(init_memory_cache)
// By default the disk cache is not initialized here as it should be initialized
// on a separate thread due to disk access.
if (cacheParams.initDiskCacheOnCreate) {
// Set up disk cache
initDiskCache();
}
}
/**
* Initializes the disk cache. Note that this includes disk access so this should not be
* executed on the main/UI thread. By default an ImageCache does not initialize the disk
* cache when it is created, instead you should call initDiskCache() to initialize it on a
* background thread.
*/
public void initDiskCache() {
// Set up disk cache
synchronized (mDiskCacheLock) {
if (mDiskLruCache == null || mDiskLruCache.isClosed()) {
File diskCacheDir = mCacheParams.diskCacheDir;
if (mCacheParams.diskCacheEnabled && diskCacheDir != null) {
if (!diskCacheDir.exists()) {
diskCacheDir.mkdirs();
}
if (getUsableSpace(diskCacheDir) > mCacheParams.diskCacheSize) {
try {
mDiskLruCache = DiskLruCache.open(
diskCacheDir, 1, 1, mCacheParams.diskCacheSize);
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache initialized");
}
} catch (final IOException e) {
mCacheParams.diskCacheDir = null;
Log.e(TAG, "initDiskCache - " + e);
}
}
}
}
mDiskCacheStarting = false;
mDiskCacheLock.notifyAll();
}
}
/**
* Adds a bitmap to both memory and disk cache.
* @param data Unique identifier for the bitmap to store
* @param value The bitmap drawable to store
*/
public void addBitmapToCache(String data, BitmapDrawable value) {
//BEGIN_INCLUDE(add_bitmap_to_cache)
if (data == null || value == null) {
return;
}
// Add to memory cache
if (mMemoryCache != null) {
if (RecyclingBitmapDrawable.class.isInstance(value)) {
// The removed entry is a recycling drawable, so notify it
// that it has been added into the memory cache
((RecyclingBitmapDrawable) value).setIsCached(true);
}
mMemoryCache.put(data, value);
}
synchronized (mDiskCacheLock) {
// Add to disk cache
if (mDiskLruCache != null) {
final String key = hashKeyForDisk(data);
OutputStream out = null;
try {
DiskLruCache.Snapshot snapshot = mDiskLruCache.get(key);
if (snapshot == null) {
final DiskLruCache.Editor editor = mDiskLruCache.edit(key);
if (editor != null) {
out = editor.newOutputStream(DISK_CACHE_INDEX);
value.getBitmap().compress(
mCacheParams.compressFormat, mCacheParams.compressQuality, out);
editor.commit();
out.close();
}
} else {
snapshot.getInputStream(DISK_CACHE_INDEX).close();
}
} catch (final IOException e) {
Log.e(TAG, "addBitmapToCache - " + e);
} catch (Exception e) {
Log.e(TAG, "addBitmapToCache - " + e);
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {}
}
}
}
//END_INCLUDE(add_bitmap_to_cache)
}
/**
* Get from memory cache.
*
* @param data Unique identifier for which item to get
* @return The bitmap drawable if found in cache, null otherwise
*/
public BitmapDrawable getBitmapFromMemCache(String data) {
//BEGIN_INCLUDE(get_bitmap_from_mem_cache)
BitmapDrawable memValue = null;
if (mMemoryCache != null) {
memValue = mMemoryCache.get(data);
}
if (BuildConfig.DEBUG && memValue != null) {
Log.d(TAG, "Memory cache hit");
}
return memValue;
//END_INCLUDE(get_bitmap_from_mem_cache)
}
/**
* Get from disk cache.
*
* @param data Unique identifier for which item to get
* @return The bitmap if found in cache, null otherwise
*/
public Bitmap getBitmapFromDiskCache(String data) {
//BEGIN_INCLUDE(get_bitmap_from_disk_cache)
final String key = hashKeyForDisk(data);
Bitmap bitmap = null;
synchronized (mDiskCacheLock) {
while (mDiskCacheStarting) {
try {
mDiskCacheLock.wait();
} catch (InterruptedException e) {}
}
if (mDiskLruCache != null) {
InputStream inputStream = null;
try {
final DiskLruCache.Snapshot snapshot = mDiskLruCache.get(key);
if (snapshot != null) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache hit");
}
inputStream = snapshot.getInputStream(DISK_CACHE_INDEX);
if (inputStream != null) {
FileDescriptor fd = ((FileInputStream) inputStream).getFD();
// Decode bitmap, but we don't want to sample so give
// MAX_VALUE as the target dimensions
bitmap = ImageResizer.decodeSampledBitmapFromDescriptor(
fd, Integer.MAX_VALUE, Integer.MAX_VALUE, this);
}
}
} catch (final IOException e) {
Log.e(TAG, "getBitmapFromDiskCache - " + e);
} finally {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {}
}
}
return bitmap;
}
//END_INCLUDE(get_bitmap_from_disk_cache)
}
/**
* @param options - BitmapFactory.Options with out* options populated
* @return Bitmap that case be used for inBitmap
*/
protected Bitmap getBitmapFromReusableSet(BitmapFactory.Options options) {
//BEGIN_INCLUDE(get_bitmap_from_reusable_set)
Bitmap bitmap = null;
if (mReusableBitmaps != null && !mReusableBitmaps.isEmpty()) {
synchronized (mReusableBitmaps) {
final Iterator<SoftReference<Bitmap>> iterator = mReusableBitmaps.iterator();
Bitmap item;
while (iterator.hasNext()) {
item = iterator.next().get();
if (null != item && item.isMutable()) {
// Check to see it the item can be used for inBitmap
if (canUseForInBitmap(item, options)) {
bitmap = item;
// Remove from reusable set so it can't be used again
iterator.remove();
break;
}
} else {
// Remove from the set if the reference has been cleared.
iterator.remove();
}
}
}
}
return bitmap;
//END_INCLUDE(get_bitmap_from_reusable_set)
}
/**
* Clears both the memory and disk cache associated with this ImageCache object. Note that
* this includes disk access so this should not be executed on the main/UI thread.
*/
public void clearCache() {
if (mMemoryCache != null) {
mMemoryCache.evictAll();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Memory cache cleared");
}
}
synchronized (mDiskCacheLock) {
mDiskCacheStarting = true;
if (mDiskLruCache != null && !mDiskLruCache.isClosed()) {
try {
mDiskLruCache.delete();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache cleared");
}
} catch (IOException e) {
Log.e(TAG, "clearCache - " + e);
}
mDiskLruCache = null;
initDiskCache();
}
}
}
/**
* Flushes the disk cache associated with this ImageCache object. Note that this includes
* disk access so this should not be executed on the main/UI thread.
*/
public void flush() {
synchronized (mDiskCacheLock) {
if (mDiskLruCache != null) {
try {
mDiskLruCache.flush();
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache flushed");
}
} catch (IOException e) {
Log.e(TAG, "flush - " + e);
}
}
}
}
/**
* Closes the disk cache associated with this ImageCache object. Note that this includes
* disk access so this should not be executed on the main/UI thread.
*/
public void close() {
synchronized (mDiskCacheLock) {
if (mDiskLruCache != null) {
try {
if (!mDiskLruCache.isClosed()) {
mDiskLruCache.close();
mDiskLruCache = null;
if (BuildConfig.DEBUG) {
Log.d(TAG, "Disk cache closed");
}
}
} catch (IOException e) {
Log.e(TAG, "close - " + e);
}
}
}
}
/**
* A holder class that contains cache parameters.
*/
public static class ImageCacheParams {
public int memCacheSize = DEFAULT_MEM_CACHE_SIZE;
public int diskCacheSize = DEFAULT_DISK_CACHE_SIZE;
public File diskCacheDir;
public CompressFormat compressFormat = DEFAULT_COMPRESS_FORMAT;
public int compressQuality = DEFAULT_COMPRESS_QUALITY;
public boolean memoryCacheEnabled = DEFAULT_MEM_CACHE_ENABLED;
public boolean diskCacheEnabled = DEFAULT_DISK_CACHE_ENABLED;
public boolean initDiskCacheOnCreate = DEFAULT_INIT_DISK_CACHE_ON_CREATE;
/**
* Create a set of image cache parameters that can be provided to
* {@link ImageCache#getInstance(android.support.v4.app.FragmentManager, ImageCacheParams)} or
* {@link ImageWorker#addImageCache(android.support.v4.app.FragmentManager, ImageCacheParams)}.
* @param context A context to use.
* @param diskCacheDirectoryName A unique subdirectory name that will be appended to the
* application cache directory. Usually "cache" or "images"
* is sufficient.
*/
public ImageCacheParams(Context context, String diskCacheDirectoryName) {
diskCacheDir = getDiskCacheDir(context, diskCacheDirectoryName);
}
/**
* Sets the memory cache size based on a percentage of the max available VM memory.
* Eg. setting percent to 0.2 would set the memory cache to one fifth of the available
* memory. Throws {@link IllegalArgumentException} if percent is < 0.01 or > .8.
* memCacheSize is stored in kilobytes instead of bytes as this will eventually be passed
* to construct a LruCache which takes an int in its constructor.
*
* This value should be chosen carefully based on a number of factors
* Refer to the corresponding Android Training class for more discussion:
* http://developer.android.com/training/displaying-bitmaps/
*
* @param percent Percent of available app memory to use to size memory cache
*/
public void setMemCacheSizePercent(float percent) {
if (percent < 0.01f || percent > 0.8f) {
throw new IllegalArgumentException("setMemCacheSizePercent - percent must be "
+ "between 0.01 and 0.8 (inclusive)");
}
memCacheSize = Math.round(percent * Runtime.getRuntime().maxMemory() / 1024);
}
}
/**
* @param candidate - Bitmap to check
* @param targetOptions - Options that have the out* value populated
* @return true if <code>candidate</code> can be used for inBitmap re-use with
* <code>targetOptions</code>
*/
@TargetApi(VERSION_CODES.KITKAT)
private static boolean canUseForInBitmap(
Bitmap candidate, BitmapFactory.Options targetOptions) {
//BEGIN_INCLUDE(can_use_for_inbitmap)
if (!Utils.hasKitKat()) {
// On earlier versions, the dimensions must match exactly and the inSampleSize must be 1
return candidate.getWidth() == targetOptions.outWidth
&& candidate.getHeight() == targetOptions.outHeight
&& targetOptions.inSampleSize == 1;
}
// From Android 4.4 (KitKat) onward we can re-use if the byte size of the new bitmap
// is smaller than the reusable bitmap candidate allocation byte count.
int width = targetOptions.outWidth / targetOptions.inSampleSize;
int height = targetOptions.outHeight / targetOptions.inSampleSize;
int byteCount = width * height * getBytesPerPixel(candidate.getConfig());
return byteCount <= candidate.getAllocationByteCount();
//END_INCLUDE(can_use_for_inbitmap)
}
/**
* Return the byte usage per pixel of a bitmap based on its configuration.
* @param config The bitmap configuration.
* @return The byte usage per pixel.
*/
private static int getBytesPerPixel(Config config) {
if (config == Config.ARGB_8888) {
return 4;
} else if (config == Config.RGB_565) {
return 2;
} else if (config == Config.ARGB_4444) {
return 2;
} else if (config == Config.ALPHA_8) {
return 1;
}
return 1;
}
/**
* Get a usable cache directory (external if available, internal otherwise).
*
* @param context The context to use
* @param uniqueName A unique directory name to append to the cache dir
* @return The cache dir
*/
public static File getDiskCacheDir(Context context, String uniqueName) {
// Check if media is mounted or storage is built-in, if so, try and use external cache dir
// otherwise use internal cache dir
final String cachePath =
Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState()) ||
!isExternalStorageRemovable() ? getExternalCacheDir(context).getPath() :
context.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
/**
* A hashing method that changes a string (like a URL) into a hash suitable for using as a
* disk filename.
*/
public static String hashKeyForDisk(String key) {
String cacheKey;
try {
final MessageDigest mDigest = MessageDigest.getInstance("MD5");
mDigest.update(key.getBytes());
cacheKey = bytesToHexString(mDigest.digest());
} catch (NoSuchAlgorithmException e) {
cacheKey = String.valueOf(key.hashCode());
}
return cacheKey;
}
private static String bytesToHexString(byte[] bytes) {
// http://stackoverflow.com/questions/332079
StringBuilder sb = new StringBuilder();
for (int i = 0; i < bytes.length; i++) {
String hex = Integer.toHexString(0xFF & bytes[i]);
if (hex.length() == 1) {
sb.append('0');
}
sb.append(hex);
}
return sb.toString();
}
/**
* Get the size in bytes of a bitmap in a BitmapDrawable. Note that from Android 4.4 (KitKat)
* onward this returns the allocated memory size of the bitmap which can be larger than the
* actual bitmap data byte count (in the case it was re-used).
*
* @param value
* @return size in bytes
*/
@TargetApi(VERSION_CODES.KITKAT)
public static int getBitmapSize(BitmapDrawable value) {
Bitmap bitmap = value.getBitmap();
// From KitKat onward use getAllocationByteCount() as allocated bytes can potentially be
// larger than bitmap byte count.
if (Utils.hasKitKat()) {
return bitmap.getAllocationByteCount();
}
if (Utils.hasHoneycombMR1()) {
return bitmap.getByteCount();
}
// Pre HC-MR1
return bitmap.getRowBytes() * bitmap.getHeight();
}
/**
* Check if external storage is built-in or removable.
*
* @return True if external storage is removable (like an SD card), false
* otherwise.
*/
@TargetApi(VERSION_CODES.GINGERBREAD)
public static boolean isExternalStorageRemovable() {
if (Utils.hasGingerbread()) {
return Environment.isExternalStorageRemovable();
}
return true;
}
/**
* Get the external app cache directory.
*
* @param context The context to use
* @return The external cache dir
*/
@TargetApi(VERSION_CODES.FROYO)
public static File getExternalCacheDir(Context context) {
if (Utils.hasFroyo()) {
return context.getExternalCacheDir();
}
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir);
}
/**
* Check how much usable space is available at a given path.
*
* @param path The path to check
* @return The space available in bytes
*/
@TargetApi(VERSION_CODES.GINGERBREAD)
public static long getUsableSpace(File path) {
if (Utils.hasGingerbread()) {
return path.getUsableSpace();
}
final StatFs stats = new StatFs(path.getPath());
return (long) stats.getBlockSize() * (long) stats.getAvailableBlocks();
}
/**
* Locate an existing instance of this Fragment or if not found, create and
* add it using FragmentManager.
*
* @param fm The FragmentManager manager to use.
* @return The existing instance of the Fragment or the new instance if just
* created.
*/
private static RetainFragment findOrCreateRetainFragment(FragmentManager fm) {
//BEGIN_INCLUDE(find_create_retain_fragment)
// Check to see if we have retained the worker fragment.
RetainFragment mRetainFragment = (RetainFragment) fm.findFragmentByTag(TAG);
// If not retained (or first time running), we need to create and add it.
if (mRetainFragment == null) {
mRetainFragment = new RetainFragment();
fm.beginTransaction().add(mRetainFragment, TAG).commitAllowingStateLoss();
}
return mRetainFragment;
//END_INCLUDE(find_create_retain_fragment)
}
/**
* A simple non-UI Fragment that stores a single Object and is retained over configuration
* changes. It will be used to retain the ImageCache object.
*/
public static class RetainFragment extends Fragment {
private Object mObject;
/**
* Empty constructor as per the Fragment documentation
*/
public RetainFragment() {}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Make sure this Fragment is retained over a configuration change
setRetainInstance(true);
}
/**
* Store a single object in this Fragment.
*
* @param object The object to store
*/
public void setObject(Object object) {
mObject = object;
}
/**
* Get the stored object.
*
* @return The stored object
*/
public Object getObject() {
return mObject;
}
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.kettle.plugins.pig;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.vfs2.FileObject;
import org.pentaho.hadoop.shim.api.HadoopClientServices;
import org.pentaho.hadoop.shim.api.cluster.NamedCluster;
import org.pentaho.hadoop.shim.api.cluster.NamedClusterService;
import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.annotations.JobEntry;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobListener;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.hadoop.shim.api.pig.PigResult;
import org.pentaho.metastore.api.IMetaStore;
import org.pentaho.runtime.test.RuntimeTester;
import org.pentaho.runtime.test.action.RuntimeTestActionService;
import org.w3c.dom.Node;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Job entry that executes a Pig script either on a hadoop cluster or locally.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
@JobEntry( id = "HadoopPigScriptExecutorPlugin", image = "PIG.svg", name = "HadoopPigScriptExecutorPlugin.Name",
description = "HadoopPigScriptExecutorPlugin.Description",
categoryDescription = "i18n:org.pentaho.di.job:JobCategory.Category.BigData",
i18nPackageName = "org.pentaho.di.job.entries.pig",
documentationUrl = "http://wiki.pentaho.com/display/EAI/Pig+Script+Executor" )
public class JobEntryPigScriptExecutor extends JobEntryBase implements Cloneable, JobEntryInterface {
public static final Class<?> PKG = JobEntryPigScriptExecutor.class; // for i18n purposes, needed by Translator2!!
public static final String CLUSTER_NAME = "cluster_name";
public static final String HDFS_HOSTNAME = "hdfs_hostname";
public static final String HDFS_PORT = "hdfs_port";
public static final String JOBTRACKER_HOSTNAME = "jobtracker_hostname";
public static final String JOBTRACKER_PORT = "jobtracker_port";
public static final String SCRIPT_FILE = "script_file";
public static final String ENABLE_BLOCKING = "enable_blocking";
public static final String LOCAL_EXECUTION = "local_execution";
public static final String JOB_ENTRY_PIG_SCRIPT_EXECUTOR_ERROR_NO_PIG_SCRIPT_SPECIFIED =
"JobEntryPigScriptExecutor.Error.NoPigScriptSpecified";
public static final String JOB_ENTRY_PIG_SCRIPT_EXECUTOR_WARNING_LOCAL_EXECUTION =
"JobEntryPigScriptExecutor.Warning.LocalExecution";
// $NON-NLS-1$
private final NamedClusterService namedClusterService;
private final RuntimeTestActionService runtimeTestActionService;
private final RuntimeTester runtimeTester;
private final NamedClusterServiceLocator namedClusterServiceLocator;
/**
* Hostname of the job tracker
*/
protected NamedCluster namedCluster;
/**
* URL to the pig script to execute
*/
protected String m_scriptFile = "";
/**
* True if the job entry should block until the script has executed
*/
protected boolean m_enableBlocking;
/**
* True if the script should execute locally, rather than on a hadoop cluster
*/
protected boolean m_localExecution;
/**
* Parameters for the script
*/
protected Map<String, String> m_params = new HashMap<String, String>();
public JobEntryPigScriptExecutor( NamedClusterService namedClusterService,
RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester,
NamedClusterServiceLocator namedClusterServiceLocator ) {
this.namedClusterService = namedClusterService;
this.runtimeTestActionService = runtimeTestActionService;
this.runtimeTester = runtimeTester;
this.namedClusterServiceLocator = namedClusterServiceLocator;
}
private void loadClusterConfig( ObjectId id_jobentry, Repository rep, Node entrynode, IMetaStore metaStore ) {
boolean configLoaded = false;
try {
// attempt to load from named cluster
String clusterName = null;
if ( entrynode != null ) {
clusterName = XMLHandler.getTagValue( entrynode, CLUSTER_NAME ); //$NON-NLS-1$
} else if ( rep != null ) {
clusterName = rep.getJobEntryAttributeString( id_jobentry, CLUSTER_NAME ); //$NON-NLS-1$ //$NON-NLS-2$
}
// load from system first, then fall back to copy stored with job (AbstractMeta)
if ( !StringUtils.isEmpty( clusterName ) && namedClusterService.contains( clusterName, metaStore ) ) {
// pull config from NamedCluster
namedCluster = namedClusterService.read( clusterName, metaStore );
}
if ( namedCluster != null ) {
configLoaded = true;
}
} catch ( Throwable t ) {
logDebug( t.getMessage(), t );
}
if ( !configLoaded ) {
namedCluster = namedClusterService.getClusterTemplate();
if ( entrynode != null ) {
// load default values for cluster & legacy fallback
namedCluster.setName( XMLHandler.getTagValue( entrynode, CLUSTER_NAME ) );
namedCluster.setHdfsHost( XMLHandler.getTagValue( entrynode, HDFS_HOSTNAME ) ); //$NON-NLS-1$
namedCluster.setHdfsPort( XMLHandler.getTagValue( entrynode, HDFS_PORT ) ); //$NON-NLS-1$
namedCluster.setJobTrackerHost( XMLHandler.getTagValue( entrynode, JOBTRACKER_HOSTNAME ) ); //$NON-NLS-1$
namedCluster.setJobTrackerPort( XMLHandler.getTagValue( entrynode, JOBTRACKER_PORT ) ); //$NON-NLS-1$
} else if ( rep != null ) {
// load default values for cluster & legacy fallback
try {
namedCluster.setName( rep.getJobEntryAttributeString( id_jobentry, CLUSTER_NAME ) );
namedCluster.setHdfsHost( rep.getJobEntryAttributeString( id_jobentry, HDFS_HOSTNAME ) );
namedCluster.setHdfsPort( rep.getJobEntryAttributeString( id_jobentry, HDFS_PORT ) ); //$NON-NLS-1$
namedCluster
.setJobTrackerHost( rep.getJobEntryAttributeString( id_jobentry, JOBTRACKER_HOSTNAME ) ); //$NON-NLS-1$
namedCluster
.setJobTrackerPort( rep.getJobEntryAttributeString( id_jobentry, JOBTRACKER_PORT ) ); //$NON-NLS-1$
} catch ( KettleException ke ) {
logError( ke.getMessage(), ke );
}
}
}
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryBase#getXML()
*/
public String getXML() {
StringBuffer retval = new StringBuffer();
retval.append( super.getXML() );
if ( namedCluster != null ) {
String namedClusterName = namedCluster.getName();
if ( !StringUtils.isEmpty( namedClusterName ) ) {
retval.append( " " )
.append( XMLHandler.addTagValue( CLUSTER_NAME, namedClusterName ) ); //$NON-NLS-1$ //$NON-NLS-2$
}
retval.append( " " ).append( XMLHandler.addTagValue( HDFS_HOSTNAME, namedCluster.getHdfsHost() ) );
retval.append( " " ).append( XMLHandler.addTagValue( HDFS_PORT, namedCluster.getHdfsPort() ) );
retval.append( " " ).append(
XMLHandler.addTagValue( JOBTRACKER_HOSTNAME, namedCluster.getJobTrackerHost() ) );
retval.append( " " ).append( XMLHandler.addTagValue( JOBTRACKER_PORT, namedCluster.getJobTrackerPort() ) );
}
retval.append( " " ).append( XMLHandler.addTagValue( SCRIPT_FILE, m_scriptFile ) );
retval.append( " " ).append( XMLHandler.addTagValue( ENABLE_BLOCKING, m_enableBlocking ) );
retval.append( " " ).append( XMLHandler.addTagValue( LOCAL_EXECUTION, m_localExecution ) );
retval.append( " <script_parameters>" ).append( Const.CR );
if ( m_params != null ) {
for ( String name : m_params.keySet() ) {
String value = m_params.get( name );
if ( !Utils.isEmpty( name ) && !Utils.isEmpty( value ) ) {
retval.append( " <parameter>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "name", name ) );
retval.append( " " ).append( XMLHandler.addTagValue( "value", value ) );
retval.append( " </parameter>" ).append( Const.CR );
}
}
}
retval.append( " </script_parameters>" ).append( Const.CR );
return retval.toString();
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryInterface#loadXML(org.w3c.dom.Node, java.util.List, java.util.List,
* org.pentaho.di.repository.Repository)
*/
@Override
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository repository, IMetaStore metaStore ) throws KettleXMLException {
super.loadXML( entrynode, databases, slaveServers );
loadClusterConfig( null, rep, entrynode, metaStore );
setRepository( repository );
m_scriptFile = XMLHandler.getTagValue( entrynode, "script_file" );
m_enableBlocking = XMLHandler.getTagValue( entrynode, "enable_blocking" ).equalsIgnoreCase( "Y" );
m_localExecution = XMLHandler.getTagValue( entrynode, "local_execution" ).equalsIgnoreCase( "Y" );
// Script parameters
m_params = new HashMap<String, String>();
Node paramList = XMLHandler.getSubNode( entrynode, "script_parameters" );
if ( paramList != null ) {
int numParams = XMLHandler.countNodes( paramList, "parameter" );
for ( int i = 0; i < numParams; i++ ) {
Node paramNode = XMLHandler.getSubNodeByNr( paramList, "parameter", i );
String name = XMLHandler.getTagValue( paramNode, "name" );
String value = XMLHandler.getTagValue( paramNode, "value" );
m_params.put( name, value );
}
}
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryBase#loadRep(org.pentaho.di.repository.Repository,
* org.pentaho.di.repository.ObjectId, java.util.List, java.util.List)
*/
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
if ( rep != null ) {
super.loadRep( rep, metaStore, id_jobentry, databases, slaveServers );
loadClusterConfig( id_jobentry, rep, null, metaStore );
setRepository( rep );
setScriptFilename( rep.getJobEntryAttributeString( id_jobentry, "script_file" ) );
setEnableBlocking( rep.getJobEntryAttributeBoolean( id_jobentry, "enable_blocking" ) );
setLocalExecution( rep.getJobEntryAttributeBoolean( id_jobentry, "local_execution" ) );
// Script parameters
m_params = new HashMap<String, String>();
int numParams = rep.countNrJobEntryAttributes( id_jobentry, "param_name" );
if ( numParams > 0 ) {
for ( int i = 0; i < numParams; i++ ) {
String name = rep.getJobEntryAttributeString( id_jobentry, i, "param_name" );
String value = rep.getJobEntryAttributeString( id_jobentry, i, "param_value" );
m_params.put( name, value );
}
}
} else {
throw new KettleException( "Unable to load from a repository. The repository is null." );
}
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryBase#saveRep(org.pentaho.di.repository.Repository,
* org.pentaho.di.repository.ObjectId)
*/
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
if ( rep != null ) {
super.saveRep( rep, metaStore, id_job );
if ( namedCluster != null ) {
String namedClusterName = namedCluster.getName();
if ( !StringUtils.isEmpty( namedClusterName ) ) {
rep.saveJobEntryAttribute( id_job, getObjectId(), "cluster_name", namedClusterName ); //$NON-NLS-1$
}
rep.saveJobEntryAttribute( id_job, getObjectId(), "hdfs_hostname", namedCluster.getHdfsHost() );
rep.saveJobEntryAttribute( id_job, getObjectId(), "hdfs_port", namedCluster.getHdfsPort() );
rep.saveJobEntryAttribute( id_job, getObjectId(), "jobtracker_hostname", namedCluster.getJobTrackerHost() );
rep.saveJobEntryAttribute( id_job, getObjectId(), "jobtracker_port", namedCluster.getJobTrackerPort() );
}
rep.saveJobEntryAttribute( id_job, getObjectId(), "script_file", m_scriptFile );
rep.saveJobEntryAttribute( id_job, getObjectId(), "enable_blocking", m_enableBlocking );
rep.saveJobEntryAttribute( id_job, getObjectId(), "local_execution", m_localExecution );
if ( m_params != null ) {
int i = 0;
for ( String name : m_params.keySet() ) {
String value = m_params.get( name );
if ( !Utils.isEmpty( name ) && !Utils.isEmpty( value ) ) {
rep.saveJobEntryAttribute( id_job, getObjectId(), i, "param_name", name );
rep.saveJobEntryAttribute( id_job, getObjectId(), i, "param_value", value );
i++;
}
}
}
} else {
throw new KettleException( "Unable to save to a repository. The repository is null." );
}
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryBase#evaluates()
*/
public boolean evaluates() {
return true;
}
/**
* Get whether the job entry will block until the script finishes
*
* @return true if the job entry will block until the script finishes
*/
public boolean getEnableBlocking() {
return m_enableBlocking;
}
/**
* Set whether the job will block until the script finishes
*
* @param block true if the job entry is to block until the script finishes
*/
public void setEnableBlocking( boolean block ) {
m_enableBlocking = block;
}
/**
* Get whether the script is to run locally rather than on a hadoop cluster
*
* @return true if the script is to run locally
*/
public boolean getLocalExecution() {
return m_localExecution;
}
/**
* Set whether the script is to be run locally rather than on a hadoop cluster
*
* @param l true if the script is to run locally
*/
public void setLocalExecution( boolean l ) {
m_localExecution = l;
}
/**
* Get the URL to the pig script to run
*
* @return the URL to the pig script to run
*/
public String getScriptFilename() {
return m_scriptFile;
}
/**
* Set the URL to the pig script to run
*
* @param filename the URL to the pig script
*/
public void setScriptFilename( String filename ) {
m_scriptFile = filename;
}
/**
* Get the values of parameters to replace in the script
*
* @return a HashMap mapping parameter names to values
*/
public Map<String, String> getScriptParameters() {
return m_params;
}
/**
* Set the values of parameters to replace in the script
*
* @param params a HashMap mapping parameter names to values
*/
public void setScriptParameters( Map<String, String> params ) {
m_params = params;
}
public NamedCluster getNamedCluster() {
return namedCluster;
}
public void setNamedCluster( NamedCluster namedCluster ) {
this.namedCluster = namedCluster;
}
public NamedClusterService getNamedClusterService() {
return namedClusterService;
}
public RuntimeTestActionService getRuntimeTestActionService() {
return runtimeTestActionService;
}
public RuntimeTester getRuntimeTester() {
return runtimeTester;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.job.entry.JobEntryInterface#execute(org.pentaho.di.core.Result, int)
*/
public Result execute( final Result result, int arg1 ) throws KettleException {
result.setNrErrors( 0 );
if ( Utils.isEmpty( m_scriptFile ) ) {
throw new KettleException( BaseMessages.getString( PKG, JOB_ENTRY_PIG_SCRIPT_EXECUTOR_ERROR_NO_PIG_SCRIPT_SPECIFIED ) );
}
try {
String scriptFileS = m_scriptFile;
scriptFileS = environmentSubstitute( scriptFileS );
HadoopClientServices hadoopClientServices = namedClusterServiceLocator.getService( namedCluster, HadoopClientServices.class );
// transform the map type to list type which can been accepted by ParameterSubstitutionPreprocessor
final List<String> paramList = new ArrayList<String>();
if ( m_params != null ) {
for ( Map.Entry<String, String> entry : m_params.entrySet() ) {
String name = entry.getKey();
name = environmentSubstitute( name ); // do environment variable substitution
String value = entry.getValue();
value = environmentSubstitute( value ); // do environment variable substitution
paramList.add( name + "=" + value );
}
}
final HadoopClientServices.PigExecutionMode execMode = ( m_localExecution ? HadoopClientServices.PigExecutionMode.LOCAL : HadoopClientServices.PigExecutionMode.MAPREDUCE );
if ( m_enableBlocking ) {
PigResult pigResult = hadoopClientServices.runPig( scriptFileS, execMode, paramList, getName(), getLogChannel(), this, parentJob.getLogLevel() );
processScriptExecutionResult( pigResult, result );
} else {
final String finalScriptFileS = scriptFileS;
final Thread runThread = new Thread() {
public void run() {
PigResult pigResult =
hadoopClientServices.runPig( finalScriptFileS, execMode, paramList, getName(), getLogChannel(),
JobEntryPigScriptExecutor.this, parentJob.getLogLevel() );
processScriptExecutionResult( pigResult, result );
}
};
runThread.start();
parentJob.addJobListener( new JobListener() {
@Override
public void jobStarted( Job job ) throws KettleException {
}
@Override
public void jobFinished( Job job ) throws KettleException {
if ( runThread.isAlive() ) {
logMinimal( BaseMessages.getString( PKG, "JobEntryPigScriptExecutor.Warning.AsynctaskStillRunning", getName(), job.getJobname() ) );
}
}
} );
}
} catch ( Exception ex ) {
ex.printStackTrace();
result.setStopped( true );
result.setNrErrors( 1 );
result.setResult( false );
logError( ex.getMessage(), ex );
}
return result;
}
protected void processScriptExecutionResult( PigResult pigResult, Result result ) {
int[] executionStatus = pigResult.getResult();
Exception pigResultException = pigResult.getException();
//we have several execution status
if ( executionStatus != null && executionStatus.length > 0 ) {
int countFailedJob = 0;
if ( executionStatus.length > 1 ) {
countFailedJob = executionStatus[ 1 ];
}
logBasic( BaseMessages.getString( PKG, "JobEntryPigScriptExecutor.JobCompletionStatus",
String.valueOf( executionStatus[ 0 ] ), String.valueOf( countFailedJob ) ) );
if ( countFailedJob > 0 ) {
result.setStopped( true );
result.setNrErrors( countFailedJob );
result.setResult( false );
}
} else if ( pigResultException != null ) {
logError( pigResultException.getMessage(), pigResultException );
result.setStopped( true );
result.setNrErrors( 1 );
result.setResult( false );
}
FileObject logFile = pigResult.getLogFile();
if ( logFile != null ) {
ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_LOG, logFile, parentJob.getJobname(), getName() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
}
}
@VisibleForTesting
void setLog( LogChannelInterface log ) {
this.log = log;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerContext;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.evaluation.EvaluationContext;
import com.intellij.debugger.engine.evaluation.TextWithImports;
import com.intellij.execution.ExecutionException;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.StringBuilderSpinAllocator;
import com.sun.jdi.*;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public abstract class DebuggerUtils {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.DebuggerUtils");
private static final Key<Method> TO_STRING_METHOD_KEY = new Key<Method>("CachedToStringMethod");
public static final Set<String> ourPrimitiveTypeNames = new HashSet<String>(Arrays.asList(
"byte", "short", "int", "long", "float", "double", "boolean", "char"
));
public static void cleanupAfterProcessFinish(DebugProcess debugProcess) {
debugProcess.putUserData(TO_STRING_METHOD_KEY, null);
}
@NonNls
public static String getValueAsString(final EvaluationContext evaluationContext, Value value) throws EvaluateException {
try {
if (value == null) {
return "null";
}
if (value instanceof StringReference) {
return ((StringReference)value).value();
}
if (isInteger(value)) {
long v = ((PrimitiveValue)value).longValue();
return String.valueOf(v);
}
if (isNumeric(value)) {
double v = ((PrimitiveValue)value).doubleValue();
return String.valueOf(v);
}
if (value instanceof BooleanValue) {
boolean v = ((PrimitiveValue)value).booleanValue();
return String.valueOf(v);
}
if (value instanceof CharValue) {
char v = ((PrimitiveValue)value).charValue();
return String.valueOf(v);
}
if (value instanceof ObjectReference) {
if (value instanceof ArrayReference) {
final StringBuilder builder = new StringBuilder();
builder.append("[");
for (Iterator<Value> iterator = ((ArrayReference)value).getValues().iterator(); iterator.hasNext();) {
final Value element = iterator.next();
builder.append(getValueAsString(evaluationContext, element));
if (iterator.hasNext()) {
builder.append(",");
}
}
builder.append("]");
return builder.toString();
}
final ObjectReference objRef = (ObjectReference)value;
final DebugProcess debugProcess = evaluationContext.getDebugProcess();
Method toStringMethod = debugProcess.getUserData(TO_STRING_METHOD_KEY);
if (toStringMethod == null) {
try {
ReferenceType refType = objRef.virtualMachine().classesByName(CommonClassNames.JAVA_LANG_OBJECT).get(0);
toStringMethod = findMethod(refType, "toString", "()Ljava/lang/String;");
debugProcess.putUserData(TO_STRING_METHOD_KEY, toStringMethod);
}
catch (Exception ignored) {
throw EvaluateExceptionUtil.createEvaluateException(
DebuggerBundle.message("evaluation.error.cannot.evaluate.tostring", objRef.referenceType().name()));
}
}
if (toStringMethod == null) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.cannot.evaluate.tostring", objRef.referenceType().name()));
}
// while result must be of com.sun.jdi.StringReference type, it turns out that sometimes (jvm bugs?)
// it is a plain com.sun.tools.jdi.ObjectReferenceImpl
final Value result = debugProcess.invokeInstanceMethod(evaluationContext, objRef, toStringMethod, Collections.emptyList(), 0);
if (result == null) {
return "null";
}
return result instanceof StringReference ? ((StringReference)result).value() : result.toString();
}
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.unsupported.expression.type"));
}
catch (ObjectCollectedException ignored) {
throw EvaluateExceptionUtil.OBJECT_WAS_COLLECTED;
}
}
public static final int MAX_DISPLAY_LABEL_LENGTH = 1024 * 5;
public static String convertToPresentationString(String str) {
if (str.length() > MAX_DISPLAY_LABEL_LENGTH) {
str = translateStringValue(str.substring(0, MAX_DISPLAY_LABEL_LENGTH));
StringBuilder buf = new StringBuilder();
buf.append(str);
if (!str.endsWith("...")) {
buf.append("...");
}
return buf.toString();
}
return translateStringValue(str);
}
@Nullable
public static Method findMethod(ReferenceType refType, @NonNls String methodName, @NonNls String methodSignature) {
if (refType instanceof ArrayType) {
// for array types methodByName() in JDI always returns empty list
final Method method = findMethod(refType.virtualMachine().classesByName(CommonClassNames.JAVA_LANG_OBJECT).get(0), methodName, methodSignature);
if (method != null) {
return method;
}
}
Method method = null;
if (methodSignature != null) {
if (refType instanceof ClassType) {
method = ((ClassType)refType).concreteMethodByName(methodName, methodSignature);
}
if (method == null) {
final List<Method> methods = refType.methodsByName(methodName, methodSignature);
if (methods.size() > 0) {
method = methods.get(0);
}
}
}
else {
List<Method> methods = null;
if (refType instanceof ClassType) {
methods = refType.methodsByName(methodName);
}
if (methods != null && methods.size() > 0) {
method = methods.get(0);
}
}
return method;
}
public static boolean isNumeric(Value value) {
return value != null &&
(isInteger(value) ||
value instanceof FloatValue ||
value instanceof DoubleValue
);
}
public static boolean isInteger(Value value) {
return value != null &&
(value instanceof ByteValue ||
value instanceof ShortValue ||
value instanceof LongValue ||
value instanceof IntegerValue
);
}
public static String translateStringValue(final String str) {
int length = str.length();
final StringBuilder buffer = StringBuilderSpinAllocator.alloc();
try {
StringUtil.escapeStringCharacters(length, str, buffer);
if (str.length() > length) {
buffer.append("...");
}
return buffer.toString();
}
finally {
StringBuilderSpinAllocator.dispose(buffer);
}
}
protected static ArrayClass getArrayClass(String className) {
boolean searchBracket = false;
int dims = 0;
int pos;
for(pos = className.lastIndexOf(']'); pos >= 0; pos--){
char c = className.charAt(pos);
if (searchBracket) {
if (c == '[') {
dims++;
searchBracket = false;
}
else if (!Character.isWhitespace(c)) break;
}
else {
if (c == ']') {
searchBracket = true;
}
else if (!Character.isWhitespace(c)) break;
}
}
if (searchBracket) return null;
if(dims == 0) return null;
return new ArrayClass(className.substring(0, pos + 1), dims);
}
public static boolean instanceOf(String subType, String superType, Project project) {
if(project == null) {
return subType.equals(superType);
}
ArrayClass nodeClass = getArrayClass(subType);
ArrayClass rendererClass = getArrayClass(superType);
if (nodeClass == null || rendererClass == null) return false;
if (nodeClass.dims == rendererClass.dims) {
GlobalSearchScope scope = GlobalSearchScope.allScope(project);
PsiClass psiNodeClass = JavaPsiFacade.getInstance(project).findClass(nodeClass.className, scope);
PsiClass psiRendererClass = JavaPsiFacade.getInstance(project).findClass(rendererClass.className, scope);
return InheritanceUtil.isInheritorOrSelf(psiNodeClass, psiRendererClass, true);
}
else if (nodeClass.dims > rendererClass.dims) {
return rendererClass.className.equals(CommonClassNames.JAVA_LANG_OBJECT);
}
return false;
}
public static Type getSuperType(Type subType, String superType) {
if(CommonClassNames.JAVA_LANG_OBJECT.equals(superType)) {
List list = subType.virtualMachine().classesByName(CommonClassNames.JAVA_LANG_OBJECT);
if(list.size() > 0) {
return (ReferenceType)list.get(0);
}
return null;
}
return getSuperTypeInt(subType, superType);
}
private static Type getSuperTypeInt(Type subType, String superType) {
Type result;
if (subType == null) {
return null;
}
if (subType.name().equals(superType)) {
return subType;
}
if (subType instanceof ClassType) {
result = getSuperType(((ClassType)subType).superclass(), superType);
if (result != null) {
return result;
}
List ifaces = ((ClassType)subType).allInterfaces();
for (Object iface : ifaces) {
InterfaceType interfaceType = (InterfaceType)iface;
if (interfaceType.name().equals(superType)) {
return interfaceType;
}
}
return null;
}
if (subType instanceof InterfaceType) {
List ifaces = ((InterfaceType)subType).superinterfaces();
for (Object iface : ifaces) {
InterfaceType interfaceType = (InterfaceType)iface;
result = getSuperType(interfaceType, superType);
if (result != null) {
return result;
}
}
}
else if (subType instanceof ArrayType) {
if (superType.endsWith("[]")) {
try {
String superTypeItem = superType.substring(0, superType.length() - 2);
Type subTypeItem = ((ArrayType)subType).componentType();
return instanceOf(subTypeItem, superTypeItem) ? subType : null;
}
catch (ClassNotLoadedException e) {
LOG.debug(e);
}
}
}
else if (subType instanceof PrimitiveType) {
//noinspection HardCodedStringLiteral
if(superType.equals("java.lang.Primitive")) {
return subType;
}
}
//only for interfaces and arrays
if(CommonClassNames.JAVA_LANG_OBJECT.equals(superType)) {
List list = subType.virtualMachine().classesByName(CommonClassNames.JAVA_LANG_OBJECT);
if(list.size() > 0) {
return (ReferenceType)list.get(0);
}
}
return null;
}
public static boolean instanceOf(Type subType, String superType) {
return getSuperType(subType, superType) != null;
}
@Nullable
public static PsiClass findClass(final String className, Project project, final GlobalSearchScope scope) {
ApplicationManager.getApplication().assertReadAccessAllowed();
final PsiManager psiManager = PsiManager.getInstance(project);
final JavaPsiFacade javaPsiFacade = JavaPsiFacade.getInstance(psiManager.getProject());
if (getArrayClass(className) != null) {
return javaPsiFacade.getElementFactory().getArrayClass(LanguageLevelProjectExtension.getInstance(psiManager.getProject()).getLanguageLevel());
}
if(project.isDefault()) {
return null;
}
final String _className = className.replace('$', '.');
PsiClass aClass = javaPsiFacade.findClass(_className, scope);
if (aClass == null) {
if (!_className.equals(className)) {
// try original name if it differs from the normalized name
aClass = javaPsiFacade.findClass(className, scope);
}
}
if (aClass == null) {
final GlobalSearchScope globalScope = GlobalSearchScope.allScope(project);
if (!globalScope.equals(scope)) {
aClass = javaPsiFacade.findClass(_className, globalScope);
if (aClass == null) {
if (!_className.equals(className)) {
// try original name with global scope if the original differs from the normalized name
aClass = javaPsiFacade.findClass(className, globalScope);
}
}
}
}
return aClass;
}
public static PsiType getType(String className, Project project) {
ApplicationManager.getApplication().assertReadAccessAllowed();
final PsiManager psiManager = PsiManager.getInstance(project);
try {
if (getArrayClass(className) != null) {
return JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory().createTypeFromText(className, null);
}
if(project.isDefault()) {
return null;
}
final PsiClass aClass =
JavaPsiFacade.getInstance(psiManager.getProject()).findClass(className.replace('$', '.'), GlobalSearchScope.allScope(project));
return JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory().createType(aClass);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
return null;
}
public static void checkSyntax(PsiCodeFragment codeFragment) throws EvaluateException {
PsiElement[] children = codeFragment.getChildren();
if(children.length == 0) throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.empty.code.fragment"));
for (PsiElement child : children) {
if (child instanceof PsiErrorElement) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.invalid.expression", child.getText()));
}
}
}
public static boolean hasSideEffects(PsiElement element) {
return hasSideEffectsOrReferencesMissingVars(element, null);
}
public static boolean hasSideEffectsOrReferencesMissingVars(PsiElement element, @Nullable final Set<String> visibleLocalVariables) {
final Ref<Boolean> rv = new Ref<Boolean>(Boolean.FALSE);
element.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitPostfixExpression(final PsiPostfixExpression expression) {
rv.set(Boolean.TRUE);
}
@Override
public void visitReferenceExpression(final PsiReferenceExpression expression) {
final PsiElement psiElement = expression.resolve();
if (psiElement instanceof PsiLocalVariable) {
if (visibleLocalVariables != null) {
if (!visibleLocalVariables.contains(((PsiLocalVariable)psiElement).getName())) {
rv.set(Boolean.TRUE);
}
}
}
else if (psiElement instanceof PsiMethod) {
rv.set(Boolean.TRUE);
//final PsiMethod method = (PsiMethod)psiElement;
//if (!isSimpleGetter(method)) {
// rv.set(Boolean.TRUE);
//}
}
if (!rv.get().booleanValue()) {
super.visitReferenceExpression(expression);
}
}
@Override
public void visitPrefixExpression(final PsiPrefixExpression expression) {
final IElementType op = expression.getOperationTokenType();
if (JavaTokenType.PLUSPLUS.equals(op) || JavaTokenType.MINUSMINUS.equals(op)) {
rv.set(Boolean.TRUE);
}
else {
super.visitPrefixExpression(expression);
}
}
@Override
public void visitAssignmentExpression(final PsiAssignmentExpression expression) {
rv.set(Boolean.TRUE);
}
@Override
public void visitCallExpression(final PsiCallExpression callExpression) {
rv.set(Boolean.TRUE);
//final PsiMethod method = callExpression.resolveMethod();
//if (method == null || !isSimpleGetter(method)) {
// rv.set(Boolean.TRUE);
//}
//else {
// super.visitCallExpression(callExpression);
//}
}
});
return rv.get().booleanValue();
}
public abstract String findAvailableDebugAddress(boolean useSockets) throws ExecutionException;
public static boolean isSynthetic(TypeComponent typeComponent) {
if (typeComponent == null) {
return false;
}
VirtualMachine machine = typeComponent.virtualMachine();
return machine != null && machine.canGetSyntheticAttribute() && typeComponent.isSynthetic();
}
public static boolean isSimpleGetter(PsiMethod method){
final PsiCodeBlock body = method.getBody();
if(body == null){
return false;
}
final PsiStatement[] statements = body.getStatements();
if(statements.length != 1){
return false;
}
final PsiStatement statement = statements[0];
if(!(statement instanceof PsiReturnStatement)){
return false;
}
final PsiExpression value = ((PsiReturnStatement)statement).getReturnValue();
if(!(value instanceof PsiReferenceExpression)){
return false;
}
final PsiReferenceExpression reference = (PsiReferenceExpression)value;
final PsiExpression qualifier = reference.getQualifierExpression();
//noinspection HardCodedStringLiteral
if(qualifier != null && !"this".equals(qualifier.getText())) {
return false;
}
final PsiElement referent = reference.resolve();
if(referent == null) {
return false;
}
if(!(referent instanceof PsiField)) {
return false;
}
return ((PsiField)referent).getContainingClass().equals(method.getContainingClass());
}
public static boolean isPrimitiveType(final String typeName) {
return ourPrimitiveTypeNames.contains(typeName);
}
protected static class ArrayClass {
public String className;
public int dims;
public ArrayClass(String className, int dims) {
this.className = className;
this.dims = dims;
}
}
public static DebuggerUtils getInstance() {
return ServiceManager.getService(DebuggerUtils.class);
}
public abstract PsiExpression substituteThis(PsiExpression expressionWithThis, PsiExpression howToEvaluateThis, Value howToEvaluateThisValue, StackFrameContext context) throws EvaluateException;
public abstract DebuggerContext getDebuggerContext (DataContext context);
public abstract Element writeTextWithImports(TextWithImports text);
public abstract TextWithImports readTextWithImports (Element element);
public abstract void writeTextWithImports(Element root, @NonNls String name, TextWithImports value);
public abstract TextWithImports readTextWithImports (Element root, @NonNls String name);
public abstract TextWithImports createExpressionWithImports(@NonNls String expression);
public abstract PsiElement getContextElement(final StackFrameContext context);
public abstract PsiClass chooseClassDialog(String title, Project project);
public static boolean supportsJVMDebugging(FileType type) {
return type instanceof LanguageFileType && ((LanguageFileType)type).isJVMDebuggingSupported();
}
public static boolean supportsJVMDebugging(PsiFile file) {
final JVMDebugProvider[] providers = Extensions.getExtensions(JVMDebugProvider.EP_NAME);
for (JVMDebugProvider provider : providers) {
if (provider.supportsJVMDebugging(file)) {
return true;
}
}
return false;
}
}
| |
/* This file was generated by SableCC (http://www.sablecc.org/). */
package org.acre.lang.node;
import java.util.*;
import org.acre.lang.analysis.*;
public final class ATmpRelationalExpr extends PTmpRelationalExpr
{
private PTmpRelationalExpr _tmpRelationalExpr_;
private PCompareToken _compareToken_;
private PCompositePredicate _compositePredicate_;
private PAdditiveExpr _additiveExpr_;
public ATmpRelationalExpr()
{
}
public ATmpRelationalExpr(
PTmpRelationalExpr _tmpRelationalExpr_,
PCompareToken _compareToken_,
PCompositePredicate _compositePredicate_,
PAdditiveExpr _additiveExpr_)
{
setTmpRelationalExpr(_tmpRelationalExpr_);
setCompareToken(_compareToken_);
setCompositePredicate(_compositePredicate_);
setAdditiveExpr(_additiveExpr_);
}
public Object clone()
{
return new ATmpRelationalExpr(
(PTmpRelationalExpr) cloneNode(_tmpRelationalExpr_),
(PCompareToken) cloneNode(_compareToken_),
(PCompositePredicate) cloneNode(_compositePredicate_),
(PAdditiveExpr) cloneNode(_additiveExpr_));
}
public void apply(Switch sw)
{
((Analysis) sw).caseATmpRelationalExpr(this);
}
public PTmpRelationalExpr getTmpRelationalExpr()
{
return _tmpRelationalExpr_;
}
public void setTmpRelationalExpr(PTmpRelationalExpr node)
{
if(_tmpRelationalExpr_ != null)
{
_tmpRelationalExpr_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
_tmpRelationalExpr_ = node;
}
public PCompareToken getCompareToken()
{
return _compareToken_;
}
public void setCompareToken(PCompareToken node)
{
if(_compareToken_ != null)
{
_compareToken_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
_compareToken_ = node;
}
public PCompositePredicate getCompositePredicate()
{
return _compositePredicate_;
}
public void setCompositePredicate(PCompositePredicate node)
{
if(_compositePredicate_ != null)
{
_compositePredicate_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
_compositePredicate_ = node;
}
public PAdditiveExpr getAdditiveExpr()
{
return _additiveExpr_;
}
public void setAdditiveExpr(PAdditiveExpr node)
{
if(_additiveExpr_ != null)
{
_additiveExpr_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
_additiveExpr_ = node;
}
public String toString()
{
return ""
+ toString(_tmpRelationalExpr_)
+ toString(_compareToken_)
+ toString(_compositePredicate_)
+ toString(_additiveExpr_);
}
void removeChild(Node child)
{
if(_tmpRelationalExpr_ == child)
{
_tmpRelationalExpr_ = null;
return;
}
if(_compareToken_ == child)
{
_compareToken_ = null;
return;
}
if(_compositePredicate_ == child)
{
_compositePredicate_ = null;
return;
}
if(_additiveExpr_ == child)
{
_additiveExpr_ = null;
return;
}
}
void replaceChild(Node oldChild, Node newChild)
{
if(_tmpRelationalExpr_ == oldChild)
{
setTmpRelationalExpr((PTmpRelationalExpr) newChild);
return;
}
if(_compareToken_ == oldChild)
{
setCompareToken((PCompareToken) newChild);
return;
}
if(_compositePredicate_ == oldChild)
{
setCompositePredicate((PCompositePredicate) newChild);
return;
}
if(_additiveExpr_ == oldChild)
{
setAdditiveExpr((PAdditiveExpr) newChild);
return;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.runtime.common.client.api;
import org.apache.reef.runtime.common.files.FileResource;
import org.apache.reef.tang.Configuration;
import org.apache.reef.util.BuilderUtils;
import org.apache.reef.util.Optional;
import java.util.HashSet;
import java.util.Set;
/**
* Default POJO implementation of JobSubmissionEvent.
* Use newBuilder to construct an instance.
*/
public final class JobSubmissionEventImpl implements JobSubmissionEvent {
private final String identifier;
private final String remoteId;
private final Configuration configuration;
private final String userName;
private final Set<FileResource> globalFileSet;
private final Set<FileResource> localFileSet;
private final Optional<Integer> driverMemory;
private final Optional<Integer> priority;
private final Optional<String> queue;
private final Optional<Boolean> preserveEvaluators;
private final Optional<Integer> maxApplicationSubmissions;
private JobSubmissionEventImpl(final Builder builder) {
this.identifier = BuilderUtils.notNull(builder.identifier);
this.remoteId = BuilderUtils.notNull(builder.remoteId);
this.configuration = BuilderUtils.notNull(builder.configuration);
this.userName = BuilderUtils.notNull(builder.userName);
this.globalFileSet = BuilderUtils.notNull(builder.globalFileSet);
this.localFileSet = BuilderUtils.notNull(builder.localFileSet);
this.driverMemory = Optional.ofNullable(builder.driverMemory);
this.priority = Optional.ofNullable(builder.priority);
this.preserveEvaluators = Optional.ofNullable(builder.preserveEvaluators);
this.queue = Optional.ofNullable(builder.queue);
this.maxApplicationSubmissions = Optional.ofNullable(builder.maxApplicationSubmissions);
}
@Override
public String getIdentifier() {
return identifier;
}
@Override
public String getRemoteId() {
return remoteId;
}
@Override
public Configuration getConfiguration() {
return configuration;
}
@Override
public String getUserName() {
return userName;
}
@Override
public Set<FileResource> getGlobalFileSet() {
return globalFileSet;
}
@Override
public Set<FileResource> getLocalFileSet() {
return localFileSet;
}
@Override
public Optional<Integer> getDriverMemory() {
return driverMemory;
}
@Override
public Optional<Integer> getPriority() {
return priority;
}
@Override
public Optional<Boolean> getPreserveEvaluators() {
return preserveEvaluators;
}
@Override
public Optional<Integer> getMaxApplicationSubmissions() {
return maxApplicationSubmissions;
}
public static Builder newBuilder() {
return new Builder();
}
/**
* Builder used to create JobSubmissionEvent instances.
*/
public static final class Builder implements org.apache.reef.util.Builder<JobSubmissionEvent> {
private String identifier;
private String remoteId;
private Configuration configuration;
private String userName;
private Set<FileResource> globalFileSet = new HashSet<>();
private Set<FileResource> localFileSet = new HashSet<>();
private Integer driverMemory;
private Integer priority;
private String queue;
private Boolean preserveEvaluators;
private Integer maxApplicationSubmissions;
/**
* @see JobSubmissionEvent#getIdentifier()
*/
public Builder setIdentifier(final String identifier) {
this.identifier = identifier;
return this;
}
/**
* @see JobSubmissionEvent#getRemoteId()
*/
public Builder setRemoteId(final String remoteId) {
this.remoteId = remoteId;
return this;
}
/**
* @see JobSubmissionEvent#getConfiguration()
*/
public Builder setConfiguration(final Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* @see JobSubmissionEvent#getUserName()
*/
public Builder setUserName(final String userName) {
this.userName = userName;
return this;
}
/**
* Add an entry to the globalFileSet.
* @see JobSubmissionEvent#getGlobalFileSet()
*/
public Builder addGlobalFile(final FileResource globalFile) {
this.globalFileSet.add(globalFile);
return this;
}
/**
* Add an entry to the localFileSet.
* @see JobSubmissionEvent#getLocalFileSet()
*/
public Builder addLocalFile(final FileResource localFile) {
this.localFileSet.add(localFile);
return this;
}
/**
* @see JobSubmissionEvent#getDriverMemory()
*/
public Builder setDriverMemory(final Integer driverMemory) {
this.driverMemory = driverMemory;
return this;
}
/**
* @see JobSubmissionEvent#getPriority()
*/
public Builder setPriority(final Integer priority) {
this.priority = priority;
return this;
}
/**
* @see JobSubmissionEvent#getPreserveEvaluators()
*/
public Builder setPreserveEvaluators(final Boolean preserveEvaluators) {
this.preserveEvaluators = preserveEvaluators;
return this;
}
/**
* @see JobSubmissionEvent#getMaxApplicationSubmissions()
*/
public Builder setMaxApplicationSubmissions(final Integer maxApplicationSubmissions) {
this.maxApplicationSubmissions = maxApplicationSubmissions;
return this;
}
@Override
public JobSubmissionEvent build() {
return new JobSubmissionEventImpl(this);
}
}
}
| |
// BSD License (http://lemurproject.org/galago-license)
package org.lemurproject.galago.core.retrieval;
import org.lemurproject.galago.core.index.stats.FieldStatistics;
import org.lemurproject.galago.core.index.stats.IndexPartStatistics;
import org.lemurproject.galago.core.index.stats.NodeStatistics;
import org.lemurproject.galago.core.parse.Document;
import org.lemurproject.galago.core.parse.Document.DocumentComponents;
import org.lemurproject.galago.core.retrieval.iterator.*;
import org.lemurproject.galago.core.retrieval.query.Node;
import org.lemurproject.galago.core.retrieval.query.NodeType;
import org.lemurproject.galago.core.retrieval.query.QueryType;
import org.lemurproject.galago.core.retrieval.query.StructuredQuery;
import org.lemurproject.galago.core.retrieval.traversal.Traversal;
import org.lemurproject.galago.utility.Parameters;
import java.io.IOException;
import java.util.*;
/**
* This class allows searching over a set of Retrievals.
*
* Although it is possible to list such objects as GroupRetrievals or other
* MultiRetrievals under a MultiRetrieval, it is not recommended, as this
* behavior has not been tested and is currently undefined.
*
* @author sjh
*/
public class MultiRetrieval implements Retrieval {
protected ArrayList<Retrieval> retrievals;
protected FeatureFactory features;
protected List<Traversal> defaultTraversals;
protected Parameters globalParameters;
protected Parameters retrievalParts;
protected HashMap<String, String> defaultIndexOperators = new HashMap<>();
protected HashSet<String> knownIndexOperators = new HashSet<>();
public MultiRetrieval(ArrayList<Retrieval> indexes, Parameters p) throws Exception {
this.retrievals = indexes;
this.globalParameters = p;
initRetrieval();
this.features = new FeatureFactory(this.globalParameters);
defaultTraversals = features.getTraversals(this);
}
@Override
public void close() throws IOException {
for (Retrieval r : retrievals) {
r.close();
}
}
@Override
public IndexPartStatistics getIndexPartStatistics(String partName) throws IOException {
IndexPartStatistics aggregate = null;
for (Retrieval r : retrievals) {
IndexPartStatistics stats = r.getIndexPartStatistics(partName);
if (aggregate == null) {
aggregate = stats;
} else {
aggregate.add(stats);
}
}
return aggregate;
}
@Override
public Parameters getAvailableParts() throws IOException {
return this.retrievalParts;
}
@Override
public Parameters getGlobalParameters() {
return this.globalParameters;
}
@Override
public Document getDocument(String identifier, DocumentComponents p) throws IOException {
for (Retrieval r : this.retrievals) {
Document d = r.getDocument(identifier, p);
if (d != null) {
return d;
}
}
return null;
}
@Override
public Map<String, Document> getDocuments(List<String> identifiers, DocumentComponents p) throws IOException {
HashMap<String, Document> results = new HashMap<String,Document>();
for (Retrieval r : this.retrievals) {
results.putAll(r.getDocuments(identifiers, p));
}
return results;
}
/**
* Runs a query across all retrieval objects
*
* @throws Exception
*/
@Override
public Results executeQuery(Node root) throws Exception {
return executeQuery(root, Parameters.create());
}
// Based on the root of the tree, that dictates how we execute.
@Override
public Results executeQuery(Node queryTree, Parameters p) throws Exception {
ScoredDocument[] rankedList = runRankedQuery(queryTree, p);
Results results = new Results();
results.inputQuery = queryTree;
results.scoredDocuments = Arrays.asList(rankedList);
return results;
}
private ScoredDocument[] runRankedQuery(Node root, Parameters parameters) throws Exception {
// Asynchronously run retrieval
ArrayList<Thread> threads = new ArrayList<>();
final List<ScoredDocument> queryResultCollector = Collections.synchronizedList(new ArrayList<ScoredDocument>());
final List<String> errorCollector = Collections.synchronizedList(new ArrayList<String>());
final Node queryTree = root;
for (Retrieval retrieval : retrievals) {
final Parameters shardParams = parameters.clone();
final Retrieval r = retrieval;
Thread t = new Thread() {
@Override
public void run() {
try {
List<ScoredDocument> results = r.executeQuery(queryTree, shardParams).scoredDocuments;
if (results != null) {
queryResultCollector.addAll(results);
}
} catch (Exception e) {
errorCollector.add(e.getMessage());
}
}
};
threads.add(t);
t.start();
}
// Wait for a finished list
for (Thread t : threads) {
t.join();
}
if (errorCollector.size() > 0) {
System.err.println("Failed to run: " + root.toString());
for (String e : errorCollector) {
System.err.println(e);
}
// we do not want to return partial or erroneous results.
return new ScoredDocument[0];
}
// sort the results and invert (sort is inverted)
Collections.sort(queryResultCollector, Collections.reverseOrder());
// get the best {requested} results
int requested = (int) parameters.get("requested", 1000);
// fix ranks
List<ScoredDocument> scoredDocuments = queryResultCollector.subList(0, Math.min(queryResultCollector.size(), requested));
ScoredDocument[] results = scoredDocuments.toArray(new ScoredDocument[scoredDocuments.size()]);
int rank = 1;
for (ScoredDocument r : results) {
r.rank = rank;
rank += 1;
}
return results;
}
@Override
public Node transformQuery(Node root, Parameters qp) throws Exception {
return transformQuery(defaultTraversals, root, qp);
}
// private functions
private Node transformQuery(List<Traversal> traversals, Node queryTree, Parameters queryParams) throws Exception {
for (Traversal traversal : traversals) {
queryTree = traversal.traverse(queryTree, queryParams);
}
return queryTree;
}
private void initRetrieval() throws IOException {
ArrayList<Parameters> parts = new ArrayList<Parameters>();
for (Retrieval r : retrievals) {
Parameters partSet = r.getAvailableParts();
parts.add(partSet);
}
this.retrievalParts = mergeParts(parts);
}
// This takes the intersection of parts from constituent retrievals, and determines which
// part/operator pairs are ok to search on given the current retrievalGroup. We assume that
// a part is valid if it has at least one usable operator, and an operator is usable if the
// iteratorClass that implements it is the same across all constituents under a given part.
private Parameters mergeParts(List<Parameters> ps) {
Parameters unifiedParts = Parameters.create();
HashSet<String> operators = new HashSet<>();
// Get *all* parts
HashSet<String> allParts = new HashSet<>();
for (Parameters j : ps) {
//System.out.println("*** Printing parameters: " + j.toPrettyString());
allParts.addAll(j.getKeys());
}
// Now iterate over the keys, looking for matches
for (String part : allParts) {
Parameters unifiedPart = Parameters.create();
// If one of the constituents doesn't have a part of this name, we skip
// further processing of it
boolean hasPart = true;
operators.clear();
for (Parameters retrievalParams : ps) {
if (!retrievalParams.getKeys().contains(part)) {
hasPart = false;
break;
} else {
operators.addAll(retrievalParams.getMap(part).getKeys());
}
}
if (!hasPart) {
continue;
}
// All operators discovered for a given part. Go over those.
for (String op : operators) {
String iteratorClassName = null;
boolean sharesIterator = true;
for (Parameters retrievalParams : ps) {
String partIterator = retrievalParams.getMap(part).getString(op);
if (iteratorClassName == null) {
iteratorClassName = partIterator;
} else {
if (!iteratorClassName.equals(partIterator)) {
sharesIterator = false;
break;
}
}
}
// If not all had the same iterator, skip adding it to that part's available operators
if (!sharesIterator) {
continue;
}
unifiedPart.set(op, iteratorClassName);
}
// the unified part is not empty, we have at least one viable operator for that part, so add it.
if (!unifiedPart.isEmpty()) {
unifiedParts.set(part, unifiedPart);
}
}
//System.out.println("Unified parts: ***: " + unifiedParts.toPrettyString());
return unifiedParts;
}
@Override
public FieldStatistics getCollectionStatistics(String nodeString) throws Exception {
Node root = StructuredQuery.parse(nodeString);
return getCollectionStatistics(root);
}
@Override
public FieldStatistics getCollectionStatistics(Node node) throws Exception {
ArrayList<Thread> threads = new ArrayList<Thread>();
final Node root = node;
final List<FieldStatistics> stats = Collections.synchronizedList(new ArrayList<FieldStatistics>());
final List<String> errors = Collections.synchronizedList(new ArrayList<String>());
for (final Retrieval r : this.retrievals) {
Thread t = new Thread() {
@Override
public void run() {
try {
FieldStatistics ns = r.getCollectionStatistics(root);
stats.add(ns);
} catch (Exception ex) {
errors.add(ex.getMessage());
}
}
};
threads.add(t);
t.start();
}
for (Thread t : threads) {
t.join();
}
if (errors.size() > 0) {
System.err.println("Failed to count: " + root.toString());
for (String e : errors) {
System.err.println(e);
}
throw new IOException("Unable to count " + node.toString());
}
FieldStatistics output = stats.remove(0);
for (FieldStatistics s : stats) {
output.add(s);
}
return output;
}
/**
* Note that this assumes the retrieval objects involved in the group contain
* mutually exclusive subcollections. If you're doing PAC-search or another
* non-disjoint subset retrieval model, look out.
*/
@Override
public NodeStatistics getNodeStatistics(String nodeString) throws Exception {
Node root = StructuredQuery.parse(nodeString);
return getNodeStatistics(root);
}
@Override
public NodeStatistics getNodeStatistics(Node node) throws Exception {
ArrayList<Thread> threads = new ArrayList<Thread>();
final Node root = node;
final List<NodeStatistics> stats = Collections.synchronizedList(new ArrayList<NodeStatistics>());
final List<String> errors = Collections.synchronizedList(new ArrayList<String>());
for (final Retrieval r : this.retrievals) {
Thread t = new Thread() {
@Override
public void run() {
try {
NodeStatistics ns = r.getNodeStatistics(root);
stats.add(ns);
} catch (Exception ex) {
errors.add(ex.getMessage());
}
}
};
threads.add(t);
t.start();
}
for (Thread t : threads) {
t.join();
}
if (errors.size() > 0) {
System.err.println("Failed to count: " + root.toString());
for (String e : errors) {
System.err.println(e);
}
throw new IOException("Unable to count " + node.toString());
}
NodeStatistics output = stats.remove(0);
for (NodeStatistics s : stats) {
output.add(s);
}
return output;
}
@Override
public NodeType getNodeType(Node node) throws Exception {
NodeType nodeType = getIndexNodeType(node);
if (nodeType == null) {
nodeType = features.getNodeType(node);
}
return nodeType;
}
private NodeType getIndexNodeType(Node node) throws Exception {
if (node.getNodeParameters().containsKey("part") || node.getOperator().equals("field")) {
// System.out.println("Trying to get operators for node... got part parameter");
Parameters parts = getAvailableParts();
String partName = getPartName(node);
if (node.getOperator().equals("field")) {
partName = "fields";
}
// System.out.println("Fetching part : " + partName + " in parts: " + parts.toPrettyString() );
if (!parts.containsKey(partName)) {
throw new IOException("The index has no part named '" + partName + "'");
}
String operator = node.getOperator();
// System.out.println("Trying to look up part: " + partName + " and operator: " + operator);
Parameters partParams = parts.getMap(partName);
if (!partParams.containsKey(operator)) {
throw new IOException("The index has part called iterator for the operator '" + operator + "'");
}
String iteratorClass = partParams.getString(operator);
//System.out.println("Got part for node.");
// may need to do some checking here...
return new NodeType((Class<? extends BaseIterator>) Class.forName(iteratorClass));
} else {
// System.out.println("No part in node parameters. for node: " + node.toPrettyString() + " Not returning index part.");
}
return null;
}
public String getPartName(Node node) throws IOException {
String operator = node.getOperator();
String partName = null;
Parameters parts = getAvailableParts();
if (node.getNodeParameters().containsKey("part")) {
partName = node.getNodeParameters().getString("part");
if (!parts.containsKey(partName)) {
throw new IOException("The index has no part named '" + partName + "'");
}
} else if (knownIndexOperators.contains(operator)) {
if (!defaultIndexOperators.containsKey(operator)) {
throw new IOException("More than one index part supplies the operator '"
+ operator + "', but no part name was specified.");
} else {
partName = defaultIndexOperators.get(operator);
}
}
return partName;
}
@Override
public QueryType getQueryType(Node node) throws Exception {
if (node.getOperator().equals("text")) {
return QueryType.UNKNOWN;
}
NodeType nodeType = getNodeType(node);
Class outputClass = nodeType.getIteratorClass();
if (ScoreIterator.class.isAssignableFrom(outputClass)
|| ScoringFunctionIterator.class.isAssignableFrom(outputClass)) {
return QueryType.RANKED;
} else if (IndicatorIterator.class.isAssignableFrom(outputClass)) {
return QueryType.BOOLEAN;
} else if (CountIterator.class.isAssignableFrom(outputClass)) {
return QueryType.COUNT;
} else {
return QueryType.RANKED;
}
}
@Override
public Integer getDocumentLength(Integer docid) throws IOException {
throw new UnsupportedOperationException("Not supported.");
}
@Override
public Integer getDocumentLength(String docname) throws IOException {
for (Retrieval r : this.retrievals) {
Integer l = r.getDocumentLength(docname);
if (l > 0) {
return l;
}
}
return 0;
}
@Override
public String getDocumentName(Integer docid) throws IOException {
for (Retrieval r: this.retrievals) {
String id = r.getDocumentName(docid);
if(id != null)
return id;
}
return null;
}
@Override
public Long getDocumentId(String docname) throws IOException {
for (Retrieval r: this.retrievals) {
Long id = r.getDocumentId(docname);
if(id != null)
return id;
}
return null;
}
@Override
public void addNodeToCache(Node node) throws Exception {
for (Retrieval r : this.retrievals) {
r.addNodeToCache(node);
}
}
@Override
public void addAllNodesToCache(Node node) throws Exception {
for (Retrieval r : this.retrievals) {
r.addAllNodesToCache(node);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.compatibility;
import java.io.File;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.ClassRule;
import org.junit.rules.TemporaryFolder;
import static org.apache.activemq.artemis.tests.compatibility.GroovyRun.SNAPSHOT;
public abstract class VersionedBaseTest {
protected final String server;
protected final String sender;
protected final String receiver;
protected ClassLoader serverClassloader;
protected ClassLoader senderClassloader;
protected ClassLoader receiverClassloader;
protected static Map<String, ClassLoader> loaderMap = new HashMap<>();
public VersionedBaseTest(String server, String sender, String receiver) throws Exception {
if (server == null) {
server = sender;
}
this.server = server;
this.sender = sender;
this.receiver = receiver;
this.serverClassloader = getClasspathProperty(server);
this.senderClassloader = getClasspathProperty(sender);
this.receiverClassloader = getClasspathProperty(receiver);
}
// This is a test optimization..
// if false it will span a new VM for each classLoader used.
// this can be a bit faster
public static final boolean USE_CLASSLOADER = true;
private static HashSet<String> printed = new HashSet<>();
@ClassRule
public static TemporaryFolder serverFolder;
static {
File parent = new File("./target/tmp");
parent.mkdirs();
serverFolder = new TemporaryFolder(parent);
}
@AfterClass
public static void cleanup() {
loaderMap.clear();
}
protected static Object evaluate(ClassLoader loader, String script, String... arguments) throws Exception {
return tclCall(loader, () -> {
Class clazz = loader.loadClass(GroovyRun.class.getName());
Method method = clazz.getMethod("evaluate", String.class, String[].class);
return method.invoke(null, script, arguments);
});
}
protected static void setVariable(ClassLoader loader, String name, Object object) throws Exception {
tclCall(loader, () -> {
Class clazz = loader.loadClass(GroovyRun.class.getName());
Method method = clazz.getMethod("setVariable", String.class, Object.class);
method.invoke(null, name, object);
return null;
});
}
protected static Object setVariable(ClassLoader loader, String name) throws Exception {
return tclCall(loader, () -> {
Class clazz = loader.loadClass(GroovyRun.class.getName());
Method method = clazz.getMethod("getVariable", String.class);
return method.invoke(null, name);
});
}
protected static Object execute(ClassLoader loader, String script) throws Exception {
return tclCall(loader, () -> {
Class clazz = loader.loadClass(GroovyRun.class.getName());
Method method = clazz.getMethod("execute", String.class);
return method.invoke(null, script);
});
}
protected static Object tclCall(ClassLoader loader, CallIt run) throws Exception {
ClassLoader original = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(loader);
try {
return run.run();
} finally {
Thread.currentThread().setContextClassLoader(original);
}
}
public interface CallIt {
Object run() throws Exception;
}
protected static ClassLoader defineClassLoader(String classPath) throws MalformedURLException {
String[] classPathArray = classPath.split(File.pathSeparator);
URL[] elements = new URL[classPathArray.length];
for (int i = 0; i < classPathArray.length; i++) {
elements[i] = new File(classPathArray[i]).toPath().toUri().toURL();
}
return new URLClassLoader(elements, null);
}
protected static ClassLoader getClasspathProperty(String name) throws Exception {
if (name.equals(SNAPSHOT)) {
return VersionedBaseTest.class.getClassLoader();
}
ClassLoader loader = loaderMap.get(name);
if (loader != null) {
return loader;
}
String value = System.getProperty(name);
if (!printed.contains(name)) {
boolean ok = value != null && !value.trim().isEmpty();
if (!ok) {
System.out.println("Add \"-D" + name + "=\'CLASSPATH\'\" into your VM settings");
} else {
printed.add(name);
System.out.println("****************************************************************************");
System.out.println("* If you want to debug this test, add this parameter to your IDE run settings...");
System.out.println("****************************************************************************");
System.out.println("-D" + name + "=\"" + value + "\"");
System.out.println("****************************************************************************");
}
Assume.assumeTrue("Cannot run these tests, no classpath found", ok);
}
loader = defineClassLoader(value);
loaderMap.put(name, loader);
return loader;
}
protected static List<Object[]> combinatory(Object[] rootSide, Object[] sideLeft, Object[] sideRight) {
LinkedList<Object[]> combinations = new LinkedList<>();
for (Object root : rootSide) {
for (Object left : sideLeft) {
for (Object right : sideRight) {
combinations.add(new Object[]{root, left, right});
}
}
}
return combinations;
}
public void startServer(File folder, ClassLoader loader, String serverName) throws Throwable {
folder.mkdirs();
System.out.println("Folder::" + folder);
String scriptToUse;
if (server.startsWith("ARTEMIS")) {
scriptToUse = "servers/artemisServer.groovy";
} else {
scriptToUse = "servers/hornetqServer.groovy";
}
evaluate(loader, scriptToUse, folder.getAbsolutePath(), serverName, server, sender, receiver);
}
public void stopServer(ClassLoader loader) throws Throwable {
execute(loader, "server.stop()");
}
}
| |
package org.ovirt.engine.core.bll.scheduling;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.ovirt.engine.core.bll.network.host.NetworkDeviceHelper;
import org.ovirt.engine.core.bll.network.host.VfScheduler;
import org.ovirt.engine.core.bll.scheduling.external.ExternalSchedulerDiscovery;
import org.ovirt.engine.core.bll.scheduling.external.ExternalSchedulerFactory;
import org.ovirt.engine.core.bll.scheduling.pending.PendingCpuCores;
import org.ovirt.engine.core.bll.scheduling.pending.PendingMemory;
import org.ovirt.engine.core.bll.scheduling.pending.PendingOvercommitMemory;
import org.ovirt.engine.core.bll.scheduling.pending.PendingResourceManager;
import org.ovirt.engine.core.bll.scheduling.pending.PendingVM;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.BackendService;
import org.ovirt.engine.core.common.businessentities.BusinessEntity;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSGroup;
import org.ovirt.engine.core.common.businessentities.VDSStatus;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VmStatic;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.errors.EngineMessage;
import org.ovirt.engine.core.common.scheduling.ClusterPolicy;
import org.ovirt.engine.core.common.scheduling.OptimizationType;
import org.ovirt.engine.core.common.scheduling.PerHostMessages;
import org.ovirt.engine.core.common.scheduling.PolicyUnit;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AlertDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.dao.VdsDao;
import org.ovirt.engine.core.dao.VdsDynamicDao;
import org.ovirt.engine.core.dao.VdsGroupDao;
import org.ovirt.engine.core.dao.scheduling.ClusterPolicyDao;
import org.ovirt.engine.core.dao.scheduling.PolicyUnitDao;
import org.ovirt.engine.core.di.Injector;
import org.ovirt.engine.core.utils.threadpool.ThreadPoolUtil;
import org.ovirt.engine.core.utils.timer.OnTimerMethodAnnotation;
import org.ovirt.engine.core.utils.timer.SchedulerUtilQuartzImpl;
import org.ovirt.engine.core.vdsbroker.ResourceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class SchedulingManager implements BackendService {
private static final Logger log = LoggerFactory.getLogger(SchedulingManager.class);
private static final String HIGH_UTILIZATION = "HighUtilization";
private static final String LOW_UTILIZATION = "LowUtilization";
@Inject
private AuditLogDirector auditLogDirector;
@Inject
private ResourceManager resourceManager;
@Inject
private MigrationHandler migrationHandler;
@Inject
private ExternalSchedulerDiscovery exSchedulerDiscovery;
@Inject
private DbFacade dbFacade;
private PendingResourceManager pendingResourceManager;
/**
* <policy id, policy> map
*/
private final ConcurrentHashMap<Guid, ClusterPolicy> policyMap;
/**
* <policy unit id, policy unit> map
*/
private volatile ConcurrentHashMap<Guid, PolicyUnitImpl> policyUnits;
private final Object policyUnitsLock = new Object();
private final ConcurrentHashMap<Guid, Semaphore> clusterLockMap = new ConcurrentHashMap<>();
private final VdsFreeMemoryChecker noWaitingMemoryChecker = new VdsFreeMemoryChecker(new NonWaitingDelayer());
private final Map<Guid, Boolean> clusterId2isHaReservationSafe = new HashMap<>();
private PendingResourceManager getPendingResourceManager() {
return pendingResourceManager;
}
@Inject
private SchedulingManager() {
policyMap = new ConcurrentHashMap<>();
policyUnits = new ConcurrentHashMap<>();
}
@PostConstruct
public void init() {
log.info("Initializing Scheduling manager");
initializePendingResourceManager();
loadPolicyUnits();
loadClusterPolicies();
loadExternalScheduler();
enableLoadBalancer();
enableHaReservationCheck();
log.info("Initialized Scheduling manager");
}
private void initializePendingResourceManager() {
pendingResourceManager = new PendingResourceManager(resourceManager);
}
protected void loadExternalScheduler() {
if (Config.<Boolean>getValue(ConfigValues.ExternalSchedulerEnabled)) {
log.info("Starting external scheduler discovery thread");
ThreadPoolUtil.execute(new Runnable() {
@Override
public void run() {
if (exSchedulerDiscovery.discover()) {
reloadPolicyUnits();
}
}
});
} else {
exSchedulerDiscovery.markAllExternalPoliciesAsDisabled();
log.info("External scheduler disabled, discovery skipped");
}
}
public void reloadPolicyUnits() {
synchronized (policyUnitsLock) {
policyUnits = new ConcurrentHashMap<>();
loadPolicyUnits();
}
}
public List<ClusterPolicy> getClusterPolicies() {
return new ArrayList<>(policyMap.values());
}
public ClusterPolicy getClusterPolicy(Guid clusterPolicyId) {
return policyMap.get(clusterPolicyId);
}
public ClusterPolicy getClusterPolicy(String name) {
if (name == null || name.isEmpty()) {
return getDefaultClusterPolicy();
}
for (ClusterPolicy clusterPolicy : policyMap.values()) {
if (clusterPolicy.getName().toLowerCase().equals(name.toLowerCase())) {
return clusterPolicy;
}
}
return null;
}
private ClusterPolicy getDefaultClusterPolicy() {
for (ClusterPolicy clusterPolicy : policyMap.values()) {
if (clusterPolicy.isDefaultPolicy()) {
return clusterPolicy;
}
}
return null;
}
public Map<Guid, PolicyUnitImpl> getPolicyUnitsMap() {
synchronized (policyUnitsLock) {
return policyUnits;
}
}
protected void loadClusterPolicies() {
List<ClusterPolicy> allClusterPolicies = getClusterPolicyDao().getAll();
for (ClusterPolicy clusterPolicy : allClusterPolicies) {
policyMap.put(clusterPolicy.getId(), clusterPolicy);
}
}
protected void loadPolicyUnits() {
List<PolicyUnit> allPolicyUnits = getPolicyUnitDao().getAll();
for (PolicyUnit policyUnit : allPolicyUnits) {
if (policyUnit.isInternal()) {
policyUnits.put(policyUnit.getId(), PolicyUnitImpl.getPolicyUnitImpl(policyUnit, getPendingResourceManager()));
} else {
policyUnits.put(policyUnit.getId(), new PolicyUnitImpl(policyUnit, getPendingResourceManager()));
}
}
}
private static class SchedulingResult {
Map<Guid, Pair<EngineMessage, String>> filteredOutReasons;
Map<Guid, String> hostNames;
PerHostMessages details;
String message;
Guid vdsSelected = null;
public SchedulingResult() {
filteredOutReasons = new HashMap<>();
hostNames = new HashMap<>();
details = new PerHostMessages();
}
public Guid getVdsSelected() {
return vdsSelected;
}
public void setVdsSelected(Guid vdsSelected) {
this.vdsSelected = vdsSelected;
}
public void addReason(Guid id, String hostName, EngineMessage filterType, String filterName) {
filteredOutReasons.put(id, new Pair<>(filterType, filterName));
hostNames.put(id, hostName);
}
public Set<Entry<Guid, Pair<EngineMessage, String>>> getReasons() {
return filteredOutReasons.entrySet();
}
public Collection<String> getReasonMessages() {
List<String> lines = new ArrayList<>();
for (Entry<Guid, Pair<EngineMessage, String>> line: filteredOutReasons.entrySet()) {
lines.add(line.getValue().getFirst().name());
lines.add(String.format("$%1$s %2$s", "hostName", hostNames.get(line.getKey())));
lines.add(String.format("$%1$s %2$s", "filterName", line.getValue().getSecond()));
final List<String> detailMessages = details.getMessages(line.getKey());
if (detailMessages == null || detailMessages.isEmpty()) {
lines.add(EngineMessage.SCHEDULING_HOST_FILTERED_REASON.name());
}
else {
lines.addAll(detailMessages);
lines.add(EngineMessage.SCHEDULING_HOST_FILTERED_REASON_WITH_DETAIL.name());
}
}
return lines;
}
private PerHostMessages getDetails() {
return details;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
public Guid schedule(VDSGroup cluster,
VM vm,
List<Guid> hostBlackList,
List<Guid> hostWhiteList,
List<Guid> destHostIdList,
List<String> messages,
VdsFreeMemoryChecker memoryChecker,
String correlationId) {
prepareClusterLock(cluster.getId());
try {
log.debug("Scheduling started, correlation Id: {}", correlationId);
checkAllowOverbooking(cluster);
lockCluster(cluster.getId());
List<VDS> vdsList = getVdsDao()
.getAllForVdsGroupWithStatus(cluster.getId(), VDSStatus.Up);
updateInitialHostList(vdsList, hostBlackList, true);
updateInitialHostList(vdsList, hostWhiteList, false);
refreshCachedPendingValues(vdsList);
ClusterPolicy policy = policyMap.get(cluster.getClusterPolicyId());
Map<String, String> parameters = createClusterPolicyParameters(cluster);
vdsList =
runFilters(policy.getFilters(),
cluster,
vdsList,
vm,
parameters,
policy.getFilterPositionMap(),
messages,
memoryChecker,
true,
correlationId);
if (vdsList == null || vdsList.isEmpty()) {
return null;
}
Guid bestHost = selectBestHost(cluster, vm, destHostIdList, vdsList, policy, parameters);
if (bestHost != null) {
getPendingResourceManager().addPending(new PendingCpuCores(bestHost, vm, vm.getNumOfCpus()));
VDS bestHostEntity = null;
for (VDS host: vdsList) {
if (host.getId().equals(bestHost)) {
bestHostEntity = host;
break;
}
}
// Will never happen (bestHost was selected from vdsList so it is always present)
assert bestHostEntity != null;
getPendingResourceManager().addPending(new PendingMemory(bestHost, vm, bestHostEntity.getGuestOverhead()));
getPendingResourceManager().addPending(new PendingOvercommitMemory(bestHost, vm, vm.getMemSizeMb()));
getPendingResourceManager().addPending(new PendingVM(bestHost, vm));
getPendingResourceManager().notifyHostManagers(bestHost);
VfScheduler vfScheduler = Injector.get(VfScheduler.class);
Map<Guid, String> passthroughVnicToVfMap = vfScheduler.getVnicToVfMap(vm.getId(), bestHost);
if (passthroughVnicToVfMap != null && !passthroughVnicToVfMap.isEmpty()) {
markVfsAsUsedByVm(bestHost, vm.getId(), passthroughVnicToVfMap);
}
}
return bestHost;
} catch (InterruptedException e) {
log.error("interrupted", e);
return null;
} finally {
releaseCluster(cluster.getId());
log.debug("Scheduling ended, correlation Id: {}", correlationId);
}
}
private void releaseCluster(Guid cluster) {
// ensuring setting the semaphore permits to 1
synchronized (clusterLockMap.get(cluster)) {
clusterLockMap.get(cluster).drainPermits();
clusterLockMap.get(cluster).release();
}
}
private void lockCluster(Guid cluster) throws InterruptedException {
clusterLockMap.get(cluster).acquire();
}
private void prepareClusterLock(Guid cluster) {
clusterLockMap.putIfAbsent(cluster, new Semaphore(1));
}
private void markVfsAsUsedByVm(Guid hostId, Guid vmId, Map<Guid, String> passthroughVnicToVfMap) {
NetworkDeviceHelper networkDeviceHelper = Injector.get(NetworkDeviceHelper.class);
networkDeviceHelper.setVmIdOnVfs(hostId, vmId, new HashSet<>(passthroughVnicToVfMap.values()));
}
/**
* Refresh cached VDS pending fields with the current pending
* values from PendingResourceManager.
* @param vdsList - list of candidate hosts
*/
private void refreshCachedPendingValues(List<VDS> vdsList) {
for (VDS vds: vdsList) {
int pendingMemory = PendingOvercommitMemory.collectForHost(getPendingResourceManager(), vds.getId());
int pendingCpuCount = PendingCpuCores.collectForHost(getPendingResourceManager(), vds.getId());
vds.setPendingVcpusCount(pendingCpuCount);
vds.setPendingVmemSize(pendingMemory);
}
}
/**
* @param destHostIdList - used for RunAt preselection, overrides the ordering in vdsList
* @param availableVdsList - presorted list of hosts (better hosts first) that are available
*/
private Guid selectBestHost(VDSGroup cluster,
VM vm,
List<Guid> destHostIdList,
List<VDS> availableVdsList,
ClusterPolicy policy,
Map<String, String> parameters) {
// in case a default destination host was specified and
// it passed filters, return the first found
List<VDS> runnableHosts = new LinkedList<>();
if (destHostIdList.size() > 0) {
// there are dedicated hosts
// intersect dedicated hosts list with available list
for (VDS vds : availableVdsList) {
for (Guid destHostId : destHostIdList) {
if (destHostId.equals(vds.getId())) {
runnableHosts.add(vds);
}
}
}
}
if (runnableHosts.isEmpty()) { // no dedicated hosts found
runnableHosts = availableVdsList;
}
switch (runnableHosts.size()){
case 0:
// no runnable hosts found, nothing found
return null;
case 1:
// found single available host, in available list return it
return runnableHosts.get(0).getId();
default:
// select best runnable host with scoring functions (from policy)
List<Pair<Guid, Integer>> functions = policy.getFunctions();
if (functions != null && !functions.isEmpty()
&& shouldWeighClusterHosts(cluster, runnableHosts)) {
Guid bestHostByFunctions = runFunctions(functions, cluster, runnableHosts, vm, parameters);
if (bestHostByFunctions != null) {
return bestHostByFunctions;
}
}
}
// failed select best runnable host using scoring functions, return the first
return runnableHosts.get(0).getId();
}
/**
* Checks whether scheduler should schedule several requests in parallel:
* Conditions:
* * config option SchedulerAllowOverBooking should be enabled.
* * cluster optimization type flag should allow over-booking.
* * more than than X (config.SchedulerOverBookingThreshold) pending for scheduling.
* In case all of the above conditions are met, we release all the pending scheduling
* requests.
*/
protected void checkAllowOverbooking(VDSGroup cluster) {
if (OptimizationType.ALLOW_OVERBOOKING == cluster.getOptimizationType()
&& Config.<Boolean>getValue(ConfigValues.SchedulerAllowOverBooking)
&& clusterLockMap.get(cluster.getId()).getQueueLength() >=
Config.<Integer>getValue(ConfigValues.SchedulerOverBookingThreshold)) {
log.info("Scheduler: cluster '{}' lock is skipped (cluster is allowed to overbook)",
cluster.getName());
// release pending threads (requests) and current one (+1)
clusterLockMap.get(cluster.getId())
.release(Config.<Integer>getValue(ConfigValues.SchedulerOverBookingThreshold) + 1);
}
}
/**
* Checks whether scheduler should weigh hosts/or skip weighing:
* * More than one host (it's trivial to weigh a single host).
* * optimize for speed is enabled for the cluster, and there are less than configurable requests pending (skip
* weighing in a loaded setup).
*
* @param cluster
* @param vdsList
* @return
*/
protected boolean shouldWeighClusterHosts(VDSGroup cluster, List<VDS> vdsList) {
Integer threshold = Config.<Integer>getValue(ConfigValues.SpeedOptimizationSchedulingThreshold);
// threshold is crossed only when cluster is configured for optimized for speed
boolean crossedThreshold =
OptimizationType.OPTIMIZE_FOR_SPEED == cluster.getOptimizationType()
&& clusterLockMap.get(cluster.getId()).getQueueLength() >
threshold;
if (crossedThreshold) {
log.info(
"Scheduler: skipping whinging hosts in cluster '{}', since there are more than '{}' parallel requests",
cluster.getName(),
threshold);
}
return vdsList.size() > 1
&& !crossedThreshold;
}
public boolean canSchedule(VDSGroup cluster,
VM vm,
List<Guid> vdsBlackList,
List<Guid> vdsWhiteList,
List<Guid> destVdsIdList,
List<String> messages) {
List<VDS> vdsList = getVdsDao()
.getAllForVdsGroupWithStatus(cluster.getId(), VDSStatus.Up);
updateInitialHostList(vdsList, vdsBlackList, true);
updateInitialHostList(vdsList, vdsWhiteList, false);
refreshCachedPendingValues(vdsList);
ClusterPolicy policy = policyMap.get(cluster.getClusterPolicyId());
Map<String, String> parameters = createClusterPolicyParameters(cluster);
vdsList =
runFilters(policy.getFilters(),
cluster,
vdsList,
vm,
parameters,
policy.getFilterPositionMap(),
messages,
noWaitingMemoryChecker,
false,
null);
return vdsList != null && !vdsList.isEmpty();
}
static List<Guid> getEntityIds(List<? extends BusinessEntity<Guid>> entities) {
ArrayList<Guid> ids = new ArrayList<>();
for (BusinessEntity<Guid> entity : entities) {
ids.add(entity.getId());
}
return ids;
}
protected Map<String, String> createClusterPolicyParameters(VDSGroup cluster) {
Map<String, String> parameters = new HashMap<>();
if (cluster.getClusterPolicyProperties() != null) {
parameters.putAll(cluster.getClusterPolicyProperties());
}
return parameters;
}
protected void updateInitialHostList(List<VDS> vdsList, List<Guid> list, boolean contains) {
if (list != null && !list.isEmpty()) {
List<VDS> toRemoveList = new ArrayList<>();
Set<Guid> listSet = new HashSet<>(list);
for (VDS vds : vdsList) {
if (listSet.contains(vds.getId()) == contains) {
toRemoveList.add(vds);
}
}
vdsList.removeAll(toRemoveList);
}
}
private List<VDS> runFilters(ArrayList<Guid> filters,
VDSGroup cluster,
List<VDS> hostList,
VM vm,
Map<String, String> parameters,
Map<Guid, Integer> filterPositionMap,
List<String> messages,
VdsFreeMemoryChecker memoryChecker,
boolean shouldRunExternalFilters,
String correlationId) {
SchedulingResult result = new SchedulingResult();
ArrayList<PolicyUnitImpl> internalFilters = new ArrayList<>();
ArrayList<PolicyUnitImpl> externalFilters = new ArrayList<>();
filters = (filters != null) ? new ArrayList<>(filters) : new ArrayList<Guid>();
sortFilters(filters, filterPositionMap);
for (Guid filter : filters) {
PolicyUnitImpl filterPolicyUnit = policyUnits.get(filter);
if (filterPolicyUnit.getPolicyUnit().isInternal()) {
internalFilters.add(filterPolicyUnit);
} else {
if (filterPolicyUnit.getPolicyUnit().isEnabled()) {
externalFilters.add(filterPolicyUnit);
}
}
}
/* Short circuit filters if there are no hosts at all */
if (hostList == null || hostList.isEmpty()) {
messages.add(EngineMessage.SCHEDULING_NO_HOSTS.name());
messages.addAll(result.getReasonMessages());
return hostList;
}
hostList =
runInternalFilters(internalFilters, cluster, hostList, vm, parameters, filterPositionMap,
memoryChecker, correlationId, result);
if (shouldRunExternalFilters
&& Config.<Boolean>getValue(ConfigValues.ExternalSchedulerEnabled)
&& !externalFilters.isEmpty()
&& hostList != null
&& !hostList.isEmpty()) {
hostList = runExternalFilters(externalFilters, hostList, vm, parameters, messages, correlationId, result);
}
if (hostList == null || hostList.isEmpty()) {
messages.add(EngineMessage.SCHEDULING_ALL_HOSTS_FILTERED_OUT.name());
messages.addAll(result.getReasonMessages());
}
return hostList;
}
private List<VDS> runInternalFilters(ArrayList<PolicyUnitImpl> filters,
VDSGroup cluster,
List<VDS> hostList,
VM vm,
Map<String, String> parameters,
Map<Guid, Integer> filterPositionMap,
VdsFreeMemoryChecker memoryChecker,
String correlationId, SchedulingResult result) {
if (filters != null) {
for (PolicyUnitImpl filterPolicyUnit : filters) {
if (hostList == null || hostList.isEmpty()) {
break;
}
filterPolicyUnit.setMemoryChecker(memoryChecker);
List<VDS> currentHostList = new ArrayList<>(hostList);
hostList = filterPolicyUnit.filter(cluster, hostList, vm, parameters, result.getDetails());
logFilterActions(currentHostList,
toIdSet(hostList),
EngineMessage.VAR__FILTERTYPE__INTERNAL,
filterPolicyUnit.getPolicyUnit().getName(),
result,
correlationId);
}
}
return hostList;
}
private Set<Guid> toIdSet(List<VDS> hostList) {
Set<Guid> set = new HashSet<>();
if (hostList != null) {
for (VDS vds : hostList) {
set.add(vds.getId());
}
}
return set;
}
private void logFilterActions(List<VDS> oldList,
Set<Guid> newSet,
EngineMessage actionName,
String filterName,
SchedulingResult result,
String correlationId) {
for (VDS host: oldList) {
if (!newSet.contains(host.getId())) {
result.addReason(host.getId(), host.getName(), actionName, filterName);
log.info("Candidate host '{}' ('{}') was filtered out by '{}' filter '{}' (correlation id: {})",
host.getName(),
host.getId(),
actionName.name(),
filterName,
correlationId);
}
}
}
private List<VDS> runExternalFilters(ArrayList<PolicyUnitImpl> filters,
List<VDS> hostList,
VM vm,
Map<String, String> parameters,
List<String> messages,
String correlationId, SchedulingResult result) {
List<Guid> filteredIDs = null;
if (filters != null) {
List<String> filterNames = new ArrayList<>();
for (PolicyUnitImpl filter : filters) {
filterNames.add(filter.getPolicyUnit().getName());
}
List<Guid> hostIDs = new ArrayList<>();
for (VDS host : hostList) {
hostIDs.add(host.getId());
}
filteredIDs =
ExternalSchedulerFactory.getInstance().runFilters(filterNames, hostIDs, vm.getId(), parameters);
if (filteredIDs != null) {
logFilterActions(hostList,
new HashSet<>(filteredIDs),
EngineMessage.VAR__FILTERTYPE__EXTERNAL,
Arrays.toString(filterNames.toArray()),
result,
correlationId);
}
}
return intersectHosts(hostList, filteredIDs);
}
private List<VDS> intersectHosts(List<VDS> hosts, List<Guid> IDs) {
if (IDs == null) {
return hosts;
}
List<VDS> retList = new ArrayList<>();
for (VDS vds : hosts) {
if (IDs.contains(vds.getId())) {
retList.add(vds);
}
}
return retList;
}
private void sortFilters(ArrayList<Guid> filters, final Map<Guid, Integer> filterPositionMap) {
if (filterPositionMap == null) {
return;
}
Collections.sort(filters, new Comparator<Guid>() {
@Override
public int compare(Guid filter1, Guid filter2) {
Integer position1 = getPosition(filterPositionMap.get(filter1));
Integer position2 = getPosition(filterPositionMap.get(filter2));
return position1 - position2;
}
private Integer getPosition(Integer position) {
if (position == null) {
position = 0;
}
return position;
}
});
}
private Guid runFunctions(List<Pair<Guid, Integer>> functions,
VDSGroup cluster,
List<VDS> hostList,
VM vm,
Map<String, String> parameters) {
List<Pair<PolicyUnitImpl, Integer>> internalScoreFunctions = new ArrayList<>();
List<Pair<PolicyUnitImpl, Integer>> externalScoreFunctions = new ArrayList<>();
for (Pair<Guid, Integer> pair : functions) {
PolicyUnitImpl currentPolicy = policyUnits.get(pair.getFirst());
if (currentPolicy.getPolicyUnit().isInternal()) {
internalScoreFunctions.add(new Pair<>(currentPolicy, pair.getSecond()));
} else {
if (currentPolicy.getPolicyUnit().isEnabled()) {
externalScoreFunctions.add(new Pair<>(currentPolicy, pair.getSecond()));
}
}
}
Map<Guid, Integer> hostCostTable = runInternalFunctions(internalScoreFunctions, cluster, hostList, vm,
parameters);
if (Config.<Boolean>getValue(ConfigValues.ExternalSchedulerEnabled) && !externalScoreFunctions.isEmpty()) {
runExternalFunctions(externalScoreFunctions, hostList, vm, parameters, hostCostTable);
}
Entry<Guid, Integer> bestHostEntry = null;
for (Entry<Guid, Integer> entry : hostCostTable.entrySet()) {
if (bestHostEntry == null || bestHostEntry.getValue() > entry.getValue()) {
bestHostEntry = entry;
}
}
if (bestHostEntry == null) {
return null;
}
return bestHostEntry.getKey();
}
private Map<Guid, Integer> runInternalFunctions(List<Pair<PolicyUnitImpl, Integer>> functions,
VDSGroup cluster,
List<VDS> hostList,
VM vm,
Map<String, String> parameters) {
Map<Guid, Integer> hostCostTable = new HashMap<>();
for (Pair<PolicyUnitImpl, Integer> pair : functions) {
List<Pair<Guid, Integer>> scoreResult = pair.getFirst().score(cluster, hostList, vm, parameters);
for (Pair<Guid, Integer> result : scoreResult) {
Guid hostId = result.getFirst();
if (hostCostTable.get(hostId) == null) {
hostCostTable.put(hostId, 0);
}
hostCostTable.put(hostId,
hostCostTable.get(hostId) + pair.getSecond() * result.getSecond());
}
}
return hostCostTable;
}
private void runExternalFunctions(List<Pair<PolicyUnitImpl, Integer>> functions,
List<VDS> hostList,
VM vm,
Map<String, String> parameters,
Map<Guid, Integer> hostCostTable) {
List<Pair<String, Integer>> scoreNameAndWeight = new ArrayList<>();
for (Pair<PolicyUnitImpl, Integer> pair : functions) {
scoreNameAndWeight.add(new Pair<>(pair.getFirst().getPolicyUnit().getName(),
pair.getSecond()));
}
List<Guid> hostIDs = new ArrayList<>();
for (VDS vds : hostList) {
hostIDs.add(vds.getId());
}
List<Pair<Guid, Integer>> externalScores =
ExternalSchedulerFactory.getInstance().runScores(scoreNameAndWeight,
hostIDs,
vm.getId(),
parameters);
if (externalScores != null) {
sumScoreResults(hostCostTable, externalScores);
}
}
private void sumScoreResults(Map<Guid, Integer> hostCostTable, List<Pair<Guid, Integer>> externalScores) {
if (externalScores == null) {
// the external scheduler proxy may return null if error happens, in this case the external scores will
// remain empty
log.warn("External scheduler proxy returned null score");
} else {
for (Pair<Guid, Integer> pair : externalScores) {
Guid hostId = pair.getFirst();
if (hostCostTable.get(hostId) == null) {
hostCostTable.put(hostId, 0);
}
hostCostTable.put(hostId,
hostCostTable.get(hostId) + pair.getSecond());
}
}
}
public Map<String, String> getCustomPropertiesRegexMap(ClusterPolicy clusterPolicy) {
Set<Guid> usedPolicyUnits = new HashSet<>();
if (clusterPolicy.getFilters() != null) {
usedPolicyUnits.addAll(clusterPolicy.getFilters());
}
if (clusterPolicy.getFunctions() != null) {
for (Pair<Guid, Integer> pair : clusterPolicy.getFunctions()) {
usedPolicyUnits.add(pair.getFirst());
}
}
if (clusterPolicy.getBalance() != null) {
usedPolicyUnits.add(clusterPolicy.getBalance());
}
Map<String, String> map = new LinkedHashMap<>();
for (Guid policyUnitId : usedPolicyUnits) {
map.putAll(policyUnits.get(policyUnitId).getPolicyUnit().getParameterRegExMap());
}
return map;
}
public void addClusterPolicy(ClusterPolicy clusterPolicy) {
getClusterPolicyDao().save(clusterPolicy);
policyMap.put(clusterPolicy.getId(), clusterPolicy);
}
public void editClusterPolicy(ClusterPolicy clusterPolicy) {
getClusterPolicyDao().update(clusterPolicy);
policyMap.put(clusterPolicy.getId(), clusterPolicy);
}
public void removeClusterPolicy(Guid clusterPolicyId) {
getClusterPolicyDao().remove(clusterPolicyId);
policyMap.remove(clusterPolicyId);
}
protected VdsDao getVdsDao() {
return dbFacade.getVdsDao();
}
protected VdsGroupDao getVdsGroupDao() {
return dbFacade.getVdsGroupDao();
}
protected VdsDynamicDao getVdsDynamicDao() {
return dbFacade.getVdsDynamicDao();
}
protected PolicyUnitDao getPolicyUnitDao() {
return dbFacade.getPolicyUnitDao();
}
protected ClusterPolicyDao getClusterPolicyDao() {
return dbFacade.getClusterPolicyDao();
}
public void enableLoadBalancer() {
if (Config.<Boolean>getValue(ConfigValues.EnableVdsLoadBalancing)) {
log.info("Start scheduling to enable vds load balancer");
Injector.get(SchedulerUtilQuartzImpl.class).scheduleAFixedDelayJob(
this,
"performLoadBalancing",
new Class[] {},
new Object[] {},
Config.<Integer>getValue(ConfigValues.VdsLoadBalancingIntervalInMinutes),
Config.<Integer>getValue(ConfigValues.VdsLoadBalancingIntervalInMinutes),
TimeUnit.MINUTES);
log.info("Finished scheduling to enable vds load balancer");
}
}
public void enableHaReservationCheck() {
if (Config.<Boolean>getValue(ConfigValues.EnableVdsLoadBalancing)) {
log.info("Start HA Reservation check");
Integer interval = Config.<Integer> getValue(ConfigValues.VdsHaReservationIntervalInMinutes);
Injector.get(SchedulerUtilQuartzImpl.class).scheduleAFixedDelayJob(
this,
"performHaResevationCheck",
new Class[] {},
new Object[] {},
interval,
interval,
TimeUnit.MINUTES);
log.info("Finished HA Reservation check");
}
}
@OnTimerMethodAnnotation("performHaResevationCheck")
public void performHaResevationCheck() {
log.debug("HA Reservation check timer entered.");
List<VDSGroup> clusters = getVdsGroupDao().getAll();
if (clusters != null) {
HaReservationHandling haReservationHandling = new HaReservationHandling(getPendingResourceManager());
for (VDSGroup cluster : clusters) {
if (cluster.supportsHaReservation()) {
List<VDS> returnedFailedHosts = new ArrayList<>();
boolean clusterHaStatus =
haReservationHandling.checkHaReservationStatusForCluster(cluster, returnedFailedHosts);
if (!clusterHaStatus) {
// create Alert using returnedFailedHosts
AuditLogableBase logable = new AuditLogableBase();
logable.setVdsGroupId(cluster.getId());
logable.addCustomValue("ClusterName", cluster.getName());
String failedHostsStr = StringUtils.join(Entities.objectNames(returnedFailedHosts), ", ");
logable.addCustomValue("Hosts", failedHostsStr);
AlertDirector.Alert(logable, AuditLogType.CLUSTER_ALERT_HA_RESERVATION, auditLogDirector);
log.info("Cluster '{}' fail to pass HA reservation check.", cluster.getName());
}
boolean clusterHaStatusFromPreviousCycle =
clusterId2isHaReservationSafe.containsKey(cluster.getId()) ? clusterId2isHaReservationSafe.get(cluster.getId())
: true;
// Update the status map with the new status
clusterId2isHaReservationSafe.put(cluster.getId(), clusterHaStatus);
// Create Alert if the status was changed from false to true
if (!clusterHaStatusFromPreviousCycle && clusterHaStatus) {
AuditLogableBase logable = new AuditLogableBase();
logable.setVdsGroupId(cluster.getId());
logable.addCustomValue("ClusterName", cluster.getName());
AlertDirector.Alert(logable, AuditLogType.CLUSTER_ALERT_HA_RESERVATION_DOWN, auditLogDirector);
}
}
}
}
log.debug("HA Reservation check timer finished.");
}
@OnTimerMethodAnnotation("performLoadBalancing")
public void performLoadBalancing() {
log.debug("Load Balancer timer entered.");
List<VDSGroup> clusters = getVdsGroupDao().getAll();
for (VDSGroup cluster : clusters) {
ClusterPolicy policy = policyMap.get(cluster.getClusterPolicyId());
PolicyUnitImpl policyUnit = policyUnits.get(policy.getBalance());
Pair<List<Guid>, Guid> balanceResult = null;
if (policyUnit.getPolicyUnit().isEnabled()) {
List<VDS> hosts = getVdsDao().getAllForVdsGroupWithoutMigrating(cluster.getId());
if (policyUnit.getPolicyUnit().isInternal()) {
balanceResult = internalRunBalance(policyUnit, cluster, hosts);
} else if (Config.<Boolean> getValue(ConfigValues.ExternalSchedulerEnabled)) {
balanceResult = externalRunBalance(policyUnit, cluster, hosts);
}
}
if (balanceResult != null && balanceResult.getSecond() != null) {
migrationHandler.migrateVM(balanceResult.getFirst(), balanceResult.getSecond());
}
}
}
private Pair<List<Guid>, Guid> internalRunBalance(PolicyUnitImpl policyUnit, VDSGroup cluster, List<VDS> hosts) {
return policyUnit.balance(cluster,
hosts,
cluster.getClusterPolicyProperties(),
new ArrayList<String>());
}
private Pair<List<Guid>, Guid> externalRunBalance(PolicyUnitImpl policyUnit, VDSGroup cluster, List<VDS> hosts) {
List<Guid> hostIDs = new ArrayList<>();
for (VDS vds : hosts) {
hostIDs.add(vds.getId());
}
return ExternalSchedulerFactory.getInstance()
.runBalance(policyUnit.getPolicyUnit().getName(), hostIDs, cluster.getClusterPolicyProperties());
}
/**
* returns all cluster policies names containing the specific policy unit.
* @param policyUnitId
* @return List of cluster policy names that use the referenced policyUnitId
* or null if the policy unit is not available.
*/
public List<String> getClusterPoliciesNamesByPolicyUnitId(Guid policyUnitId) {
List<String> list = new ArrayList<>();
final PolicyUnitImpl policyUnitImpl = policyUnits.get(policyUnitId);
if (policyUnitImpl == null) {
log.warn("Trying to find usages of non-existing policy unit '{}'", policyUnitId);
return null;
}
PolicyUnit policyUnit = policyUnitImpl.getPolicyUnit();
if (policyUnit != null) {
for (ClusterPolicy clusterPolicy : policyMap.values()) {
switch (policyUnit.getPolicyUnitType()) {
case FILTER:
Collection<Guid> filters = clusterPolicy.getFilters();
if (filters != null && filters.contains(policyUnitId)) {
list.add(clusterPolicy.getName());
}
break;
case WEIGHT:
Collection<Pair<Guid, Integer>> functions = clusterPolicy.getFunctions();
if (functions == null) {
break;
}
for (Pair<Guid, Integer> pair : functions) {
if (pair.getFirst().equals(policyUnitId)) {
list.add(clusterPolicy.getName());
break;
}
}
break;
case LOAD_BALANCING:
if (policyUnitId.equals(clusterPolicy.getBalance())) {
list.add(clusterPolicy.getName());
}
break;
default:
break;
}
}
}
return list;
}
public void removeExternalPolicyUnit(Guid policyUnitId) {
getPolicyUnitDao().remove(policyUnitId);
policyUnits.remove(policyUnitId);
}
/**
* update host scheduling statistics:
* * CPU load duration interval over/under policy threshold
* @param vds
*/
public void updateHostSchedulingStats(VDS vds) {
if (vds.getUsageCpuPercent() != null) {
VDSGroup vdsGroup = getVdsGroupDao().get(vds.getVdsGroupId());
if (vds.getUsageCpuPercent() >= NumberUtils.toInt(vdsGroup.getClusterPolicyProperties()
.get(HIGH_UTILIZATION),
Config.<Integer> getValue(ConfigValues.HighUtilizationForEvenlyDistribute))
|| vds.getUsageCpuPercent() <= NumberUtils.toInt(vdsGroup.getClusterPolicyProperties()
.get(LOW_UTILIZATION),
Config.<Integer> getValue(ConfigValues.LowUtilizationForEvenlyDistribute))) {
if (vds.getCpuOverCommitTimestamp() == null) {
vds.setCpuOverCommitTimestamp(new Date());
}
} else {
vds.setCpuOverCommitTimestamp(null);
}
}
}
/**
* Clear pending records for a VM.
* This operation locks the cluster to make sure a possible scheduling operation is not under way.
*/
public void clearPendingVm(VmStatic vm) {
prepareClusterLock(vm.getVdsGroupId());
try {
lockCluster(vm.getVdsGroupId());
getPendingResourceManager().clearVm(vm);
} catch (InterruptedException e) {
log.warn("Interrupted.. pending counters can be out of sync");
} finally {
releaseCluster(vm.getVdsGroupId());
}
}
/**
* Clear pending records for a Host.
* This operation locks the cluster to make sure a possible scheduling operation is not under way.
*/
public void clearPendingHost(VDS host) {
prepareClusterLock(host.getVdsGroupId());
try {
lockCluster(host.getVdsGroupId());
getPendingResourceManager().clearHost(host);
} catch (InterruptedException e) {
log.warn("Interrupted.. pending counters can be out of sync");
} finally {
releaseCluster(host.getVdsGroupId());
}
}
/**
* Get the host scheduled to receive a VM.
* This is best effort only.
*/
public Guid getPendingHostForVm(VM vm) {
return PendingVM.getScheduledHost(getPendingResourceManager(), vm);
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.util.network;
import alluxio.AlluxioURI;
import alluxio.Configuration;
import alluxio.MasterInquireClient;
import alluxio.PropertyKey;
import alluxio.exception.PreconditionMessage;
import alluxio.util.OSUtils;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.apache.thrift.transport.TServerSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import javax.annotation.concurrent.ThreadSafe;
/**
* Common network address related utilities shared by all components in Alluxio.
*/
@ThreadSafe
public final class NetworkAddressUtils {
private static final Logger LOG = LoggerFactory.getLogger(NetworkAddressUtils.class);
public static final String WILDCARD_ADDRESS = "0.0.0.0";
/**
* Checks if the underlying OS is Windows.
*/
public static final boolean WINDOWS = OSUtils.isWindows();
private static String sLocalHost;
private static String sLocalIP;
private NetworkAddressUtils() {}
/**
* Different types of services that client uses to connect. These types also indicate the service
* bind address.
*/
public enum ServiceType {
/**
* Master RPC service (Thrift).
*/
MASTER_RPC("Alluxio Master RPC service", PropertyKey.MASTER_HOSTNAME,
PropertyKey.MASTER_BIND_HOST, PropertyKey.MASTER_RPC_PORT),
/**
* Master web service (Jetty).
*/
MASTER_WEB("Alluxio Master Web service", PropertyKey.MASTER_WEB_HOSTNAME,
PropertyKey.MASTER_WEB_BIND_HOST, PropertyKey.MASTER_WEB_PORT),
/**
* Worker RPC service (Thrift).
*/
WORKER_RPC("Alluxio Worker RPC service", PropertyKey.WORKER_HOSTNAME,
PropertyKey.WORKER_BIND_HOST, PropertyKey.WORKER_RPC_PORT),
/**
* Worker data service (Netty).
*/
WORKER_DATA("Alluxio Worker data service", PropertyKey.WORKER_DATA_HOSTNAME,
PropertyKey.WORKER_DATA_BIND_HOST, PropertyKey.WORKER_DATA_PORT),
/**
* Worker web service (Jetty).
*/
WORKER_WEB("Alluxio Worker Web service", PropertyKey.WORKER_WEB_HOSTNAME,
PropertyKey.WORKER_WEB_BIND_HOST, PropertyKey.WORKER_WEB_PORT),
/**
* Proxy web service (Jetty).
*/
PROXY_WEB("Alluxio Proxy Web service", PropertyKey.PROXY_WEB_HOSTNAME,
PropertyKey.PROXY_WEB_BIND_HOST, PropertyKey.PROXY_WEB_PORT),
;
// service name
private final String mServiceName;
// the key of connect hostname
private final PropertyKey mHostNameKey;
// the key of bind hostname
private final PropertyKey mBindHostKey;
// the key of service port
private final PropertyKey mPortKey;
ServiceType(String serviceName, PropertyKey hostNameKey, PropertyKey bindHostKey,
PropertyKey portKey) {
mServiceName = serviceName;
mHostNameKey = hostNameKey;
mBindHostKey = bindHostKey;
mPortKey = portKey;
}
/**
* Gets service name.
*
* @return service name
*/
public String getServiceName() {
return mServiceName;
}
/**
* Gets the key of connect hostname.
*
* @return key of connect hostname
*/
public PropertyKey getHostNameKey() {
return mHostNameKey;
}
/**
* Gets the key of bind hostname.
*
* @return key of bind hostname
*/
public PropertyKey getBindHostKey() {
return mBindHostKey;
}
/**
* Gets the key of service port.
*
* @return key of service port
*/
public PropertyKey getPortKey() {
return mPortKey;
}
/**
* Gets the default port number on service.
*
* @return default port
*/
public int getDefaultPort() {
return Integer.parseInt(mPortKey.getDefaultValue());
}
}
/**
* Checks if the given port is valid.
*
* @param port the port to check
*/
public static void assertValidPort(final int port) {
Preconditions.checkArgument(port < 65536, "Port must be less than 65536");
Preconditions.checkArgument(port >= 0, "Port must be non-negative");
}
/**
* Checks if the given port in the address is valid.
*
* @param address the {@link InetSocketAddress} with the port to check
*/
public static void assertValidPort(final InetSocketAddress address) {
assertValidPort(address.getPort());
}
/**
* Helper method to get the {@link InetSocketAddress} address for client to communicate with the
* service.
*
* @param service the service name used to connect
* @return the service address that a client (typically outside the service machine) uses to
* communicate with service.
*/
public static InetSocketAddress getConnectAddress(ServiceType service) {
return new InetSocketAddress(getConnectHost(service), getPort(service));
}
/**
* Provides an externally resolvable hostname for client to communicate with the service. If the
* hostname is not explicitly specified, Alluxio will try to use the bind host. If the bind host
* is wildcard, Alluxio will automatically determine an appropriate hostname from local machine.
* The various possibilities shown in the following table:
* <table>
* <caption>Hostname Scenarios</caption>
* <thead>
* <tr>
* <th>Specified Hostname</th>
* <th>Specified Bind Host</th>
* <th>Returned Connect Host</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>hostname</td>
* <td>hostname</td>
* <td>hostname</td>
* </tr>
* <tr>
* <td>not defined</td>
* <td>hostname</td>
* <td>hostname</td>
* </tr>
* <tr>
* <td>hostname</td>
* <td>0.0.0.0 or not defined</td>
* <td>hostname</td>
* </tr>
* <tr>
* <td>not defined</td>
* <td>0.0.0.0 or not defined</td>
* <td>localhost</td>
* </tr>
* </tbody>
* </table>
*
* @param service Service type used to connect
* @return the externally resolvable hostname that the client can use to communicate with the
* service.
*/
public static String getConnectHost(ServiceType service) {
if (Configuration.containsKey(service.mHostNameKey)) {
String connectHost = Configuration.get(service.mHostNameKey);
if (!connectHost.isEmpty() && !connectHost.equals(WILDCARD_ADDRESS)) {
return connectHost;
}
}
if (Configuration.containsKey(service.mBindHostKey)) {
String bindHost = Configuration.get(service.mBindHostKey);
if (!bindHost.isEmpty() && !bindHost.equals(WILDCARD_ADDRESS)) {
return bindHost;
}
}
return getLocalHostName();
}
/**
* Gets the port number on a given service type. If user defined port number is not explicitly
* specified, Alluxio will use the default service port.
*
* @param service Service type used to connect
* @return the service port number
*/
public static int getPort(ServiceType service) {
return Configuration.getInt(service.mPortKey);
}
/**
* Helper method to get the {@link InetSocketAddress} bind address on a given service.
* <p>
* Host bind information searching order:
* <ol>
* <li>System properties or environment variables via alluxio-env.sh
* <li>Default properties via alluxio-default.properties file
* <li>A externally resolvable local hostname for the host this JVM is running on
* </ol>
*
* @param service the service name used to connect
* @return the InetSocketAddress the service will bind to
*/
public static InetSocketAddress getBindAddress(ServiceType service) {
int port = getPort(service);
assertValidPort(port);
String host;
if (Configuration.containsKey(service.mBindHostKey) && !Configuration.get(service.mBindHostKey)
.isEmpty()) {
host = Configuration.get(service.mBindHostKey);
} else {
host = getLocalHostName();
}
return new InetSocketAddress(host, port);
}
/**
* Gets the local hostname to be used by the client. If this isn't configured, a non-loopback
* local hostname will be looked up.
*
* @return the local hostname for the client
*/
public static String getClientHostName() {
if (Configuration.containsKey(PropertyKey.USER_HOSTNAME)) {
return Configuration.get(PropertyKey.USER_HOSTNAME);
}
return getLocalHostName();
}
/**
* Gets a local hostname for the host this JVM is running on.
*
* @return the local host name, which is not based on a loopback ip address
*/
public static synchronized String getLocalHostName() {
if (sLocalHost != null) {
return sLocalHost;
}
int hostResolutionTimeout =
Configuration.getInt(PropertyKey.NETWORK_HOST_RESOLUTION_TIMEOUT_MS);
return getLocalHostName(hostResolutionTimeout);
}
/**
* Gets a local host name for the host this JVM is running on.
*
* @param timeoutMs Timeout in milliseconds to use for checking that a possible local host is
* reachable
* @return the local host name, which is not based on a loopback ip address
*/
public static synchronized String getLocalHostName(int timeoutMs) {
if (sLocalHost != null) {
return sLocalHost;
}
try {
sLocalHost = InetAddress.getByName(getLocalIpAddress(timeoutMs)).getCanonicalHostName();
return sLocalHost;
} catch (UnknownHostException e) {
throw Throwables.propagate(e);
}
}
/**
* Gets a local IP address for the host this JVM is running on.
*
* @return the local ip address, which is not a loopback address and is reachable
*/
public static synchronized String getLocalIpAddress() {
if (sLocalIP != null) {
return sLocalIP;
}
int hostResolutionTimeout =
Configuration.getInt(PropertyKey.NETWORK_HOST_RESOLUTION_TIMEOUT_MS);
return getLocalIpAddress(hostResolutionTimeout);
}
/**
* Gets a local IP address for the host this JVM is running on.
*
* @param timeoutMs Timeout in milliseconds to use for checking that a possible local IP is
* reachable
* @return the local ip address, which is not a loopback address and is reachable
*/
public static synchronized String getLocalIpAddress(int timeoutMs) {
if (sLocalIP != null) {
return sLocalIP;
}
try {
InetAddress address = InetAddress.getLocalHost();
LOG.debug("address: {} isLoopbackAddress: {}, with host {} {}", address,
address.isLoopbackAddress(), address.getHostAddress(), address.getHostName());
// Make sure that the address is actually reachable since in some network configurations
// it is possible for the InetAddress.getLocalHost() call to return a non-reachable
// address e.g. a broadcast address
if (!isValidAddress(address, timeoutMs)) {
Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces();
// Make getNetworkInterfaces have the same order of network interfaces as listed on
// unix-like systems. This optimization can help avoid to get some special addresses, such
// as loopback address"127.0.0.1", virtual bridge address "192.168.122.1" as far as
// possible.
if (!WINDOWS) {
List<NetworkInterface> netIFs = Collections.list(networkInterfaces);
Collections.reverse(netIFs);
networkInterfaces = Collections.enumeration(netIFs);
}
while (networkInterfaces.hasMoreElements()) {
NetworkInterface ni = networkInterfaces.nextElement();
Enumeration<InetAddress> addresses = ni.getInetAddresses();
while (addresses.hasMoreElements()) {
address = addresses.nextElement();
// Address must not be link local or loopback. And it must be reachable
if (isValidAddress(address, timeoutMs)) {
sLocalIP = address.getHostAddress();
return sLocalIP;
}
}
}
LOG.warn("Your hostname, {} resolves to a loopback/non-reachable address: {}, "
+ "but we couldn't find any external IP address!",
InetAddress.getLocalHost().getHostName(), address.getHostAddress());
}
sLocalIP = address.getHostAddress();
return sLocalIP;
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* @param host the host to try to connect to
* @param port the port to try to connect on
* @return whether a socket connection can be made to the given host on the given port
*/
public static boolean isServing(String host, int port) {
if (port < 0) {
return false;
}
try {
Socket socket = new Socket(host, port);
socket.close();
return true;
} catch (IOException e) {
return false;
}
}
/**
* Tests if the address is externally resolvable. Address must not be wildcard, link local,
* loopback address, non-IPv4, or other unreachable addresses.
*
* @param address The testing address
* @param timeoutMs Timeout in milliseconds to use for checking that a possible local IP is
* reachable
* @return a {@code boolean} indicating if the given address is externally resolvable address
* @throws IOException if the address resolution fails
*/
private static boolean isValidAddress(InetAddress address, int timeoutMs) throws IOException {
return !address.isAnyLocalAddress() && !address.isLinkLocalAddress()
&& !address.isLoopbackAddress() && address.isReachable(timeoutMs)
&& (address instanceof Inet4Address);
}
/**
* Replaces and resolves the hostname in a given address or path string.
*
* @param path an address or path string, e.g., "hdfs://host:port/dir", "file:///dir", "/dir"
* @return an address or path string with hostname resolved, or the original path intact if no
* hostname is embedded, or null if the given path is null or empty.
* @throws UnknownHostException if the hostname cannot be resolved
*/
public static AlluxioURI replaceHostName(AlluxioURI path) throws UnknownHostException {
if (path == null) {
return null;
}
if (path.hasAuthority()) {
String authority = resolveHostName(path.getHost());
if (path.getPort() != -1) {
authority += ":" + path.getPort();
}
return new AlluxioURI(path.getScheme(), authority, path.getPath(), path.getQueryMap());
}
return path;
}
/**
* Resolves a given hostname by a canonical hostname. When a hostname alias (e.g., those specified
* in /etc/hosts) is given, the alias may not be resolvable on other hosts in a cluster unless the
* same alias is defined there. In this situation, loadufs would break.
*
* @param hostname the input hostname, which could be an alias
* @return the canonical form of the hostname, or null if it is null or empty
* @throws UnknownHostException if the given hostname cannot be resolved
*/
public static String resolveHostName(String hostname) throws UnknownHostException {
if (hostname == null || hostname.isEmpty()) {
return null;
}
return InetAddress.getByName(hostname).getCanonicalHostName();
}
/**
* Gets FQDN(Full Qualified Domain Name) from Java representations of network address, except
* String representation which should be handled by {@link #resolveHostName(String)} which will
* handle the situation where hostname is null.
*
* @param addr the input network address representation, can not be null
* @return the resolved FQDN host name
*/
public static String getFqdnHost(InetSocketAddress addr) {
Preconditions.checkNotNull(addr.getAddress(), "the address of " + addr + " is invalid.");
return addr.getAddress().getCanonicalHostName();
}
/**
* Gets FQDN(Full Qualified Domain Name) from Alluxio representation of network address.
*
* @param addr the input network address representation
* @return the resolved FQDN host name
* @throws UnknownHostException if the host is not known
*/
public static String getFqdnHost(WorkerNetAddress addr) throws UnknownHostException {
return resolveHostName(addr.getHost());
}
/**
* Gets the port for the underline socket. This function calls
* {@link #getThriftSocket(org.apache.thrift.transport.TServerSocket)}, so reflection will be used
* to get the port.
*
* @param thriftSocket the underline socket
* @return the thrift port for the underline socket
* @see #getThriftSocket(org.apache.thrift.transport.TServerSocket)
*/
public static int getThriftPort(TServerSocket thriftSocket) {
return getThriftSocket(thriftSocket).getLocalPort();
}
/**
* Extracts the port from the thrift socket. As of thrift 0.9, the internal socket used is not
* exposed in the API, so this function will use reflection to get access to it.
*
* @param thriftSocket the underline thrift socket
* @return the server socket
*/
public static ServerSocket getThriftSocket(final TServerSocket thriftSocket) {
try {
Field field = TServerSocket.class.getDeclaredField("serverSocket_");
field.setAccessible(true);
return (ServerSocket) field.get(thriftSocket);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw Throwables.propagate(e);
}
}
/**
* Parses {@link InetSocketAddress} from a String.
*
* @param address socket address to parse
* @return InetSocketAddress of the String
* @throws IOException if the socket address is invalid
*/
public static InetSocketAddress parseInetSocketAddress(String address) throws IOException {
if (address == null) {
return null;
}
String[] strArr = address.split(":");
if (strArr.length != 2) {
throw new IOException("Invalid InetSocketAddress " + address);
}
return new InetSocketAddress(strArr[0], Integer.parseInt(strArr[1]));
}
/**
* Extracts rpcPort InetSocketAddress from Alluxio representation of network address.
*
* @param netAddress the input network address representation
* @return InetSocketAddress
*/
public static InetSocketAddress getRpcPortSocketAddress(WorkerNetAddress netAddress) {
String host = netAddress.getHost();
int port = netAddress.getRpcPort();
return new InetSocketAddress(host, port);
}
/**
* Extracts dataPort InetSocketAddress from Alluxio representation of network address.
*
* @param netAddress the input network address representation
* @return InetSocketAddress
*/
public static InetSocketAddress getDataPortSocketAddress(WorkerNetAddress netAddress) {
String host = netAddress.getHost();
int port = netAddress.getDataPort();
return new InetSocketAddress(host, port);
}
/**
* Get the active master address from zookeeper for the fault tolerant Alluxio masters.
*
* @param zkLeaderPath the Zookeeper path containing the leader master address
* @return InetSocketAddress the active master address retrieved from zookeeper
*/
public static InetSocketAddress getLeaderAddressFromZK(String zkLeaderPath) {
Preconditions.checkState(Configuration.containsKey(PropertyKey.ZOOKEEPER_ADDRESS),
PreconditionMessage.ERR_ZK_ADDRESS_NOT_SET.toString(),
PropertyKey.ZOOKEEPER_ADDRESS.toString());
Preconditions.checkState(Configuration.containsKey(PropertyKey.ZOOKEEPER_ELECTION_PATH),
PropertyKey.ZOOKEEPER_ELECTION_PATH.toString());
MasterInquireClient masterInquireClient =
MasterInquireClient.getClient(
Configuration.get(PropertyKey.ZOOKEEPER_ADDRESS),
Configuration.get(PropertyKey.ZOOKEEPER_ELECTION_PATH), zkLeaderPath);
try {
String temp = masterInquireClient.getLeaderAddress();
return NetworkAddressUtils.parseInetSocketAddress(temp);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
throw Throwables.propagate(e);
}
}
/**
* @return InetSocketAddress the list of all master addresses from zookeeper
*/
public static List<InetSocketAddress> getMasterAddressesFromZK() {
Preconditions.checkState(Configuration.containsKey(PropertyKey.ZOOKEEPER_ADDRESS));
Preconditions.checkState(Configuration.containsKey(PropertyKey.ZOOKEEPER_ELECTION_PATH));
Preconditions.checkState(Configuration.containsKey(PropertyKey.ZOOKEEPER_LEADER_PATH));
MasterInquireClient masterInquireClient = MasterInquireClient.getClient(
Configuration.get(PropertyKey.ZOOKEEPER_ADDRESS),
Configuration.get(PropertyKey.ZOOKEEPER_ELECTION_PATH),
Configuration.get(PropertyKey.ZOOKEEPER_LEADER_PATH));
List<String> addresses = masterInquireClient.getMasterAddresses();
if (addresses == null) {
throw new RuntimeException(String.format("Failed to get the master addresses from zookeeper, "
+ "zookeeper address: %s", Configuration.get(PropertyKey.ZOOKEEPER_ADDRESS)));
}
List<InetSocketAddress> ret = new ArrayList<>(addresses.size());
try {
for (String address : addresses) {
ret.add(NetworkAddressUtils.parseInetSocketAddress(address));
}
return ret;
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.ide.plugins.newui;
import com.intellij.execution.process.ProcessIOExecutorService;
import com.intellij.icons.AllIcons;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.plugins.*;
import com.intellij.ide.plugins.marketplace.MarketplaceRequests;
import com.intellij.ide.plugins.marketplace.statistics.PluginManagerUsageCollector;
import com.intellij.ide.plugins.org.PluginManagerFilters;
import com.intellij.openapi.application.ApplicationInfo;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.IdeUrlTrackingParametersProvider;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.text.HtmlChunk;
import com.intellij.openapi.util.text.Strings;
import com.intellij.ui.BrowserHyperlinkListener;
import com.intellij.ui.ColorUtil;
import com.intellij.ui.JBColor;
import com.intellij.ui.LicensingFacade;
import com.intellij.ui.border.CustomLineBorder;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.components.JBPanelWithEmptyText;
import com.intellij.ui.components.JBScrollBar;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.labels.LinkListener;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.OpaquePanel;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.util.io.URLUtil;
import com.intellij.util.ui.*;
import com.intellij.util.ui.components.BorderLayoutPanel;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.text.View;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.ImageView;
import javax.swing.text.html.ParagraphView;
import javax.swing.text.html.StyleSheet;
import java.awt.*;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
/**
* @author Alexander Lobas
*/
public final class PluginDetailsPageComponent extends MultiPanel {
private static final String MARKETPLACE_LINK = "/plugin/index?xmlId=";
private final MyPluginModel myPluginModel;
private final LinkListener<Object> mySearchListener;
private final boolean myMarketplace;
@NotNull
private final AsyncProcessIcon myLoadingIcon = new AsyncProcessIcon.BigCentered(IdeBundle.message("progress.text.loading"));
private JBPanelWithEmptyText myEmptyPanel;
private OpaquePanel myRootPanel;
private OpaquePanel myPanel;
private JLabel myIconLabel;
private final JEditorPane myNameComponent = createNameComponent();
private final BaselinePanel myNameAndButtons = new BaselinePanel();
private JButton myRestartButton;
private InstallButton myInstallButton;
private JButton myUpdateButton;
private JComponent myGearButton;
private ErrorComponent myErrorComponent;
private JTextField myVersion;
private JLabel myEnabledForProject;
private JLabel myVersionSize;
private TagPanel myTagPanel;
private JLabel myDate;
private JLabel myRating;
private JLabel myDownloads;
private JLabel mySize;
private LinkPanel myAuthor;
private BorderLayoutPanel myControlledByOrgNotification;
private final LicensePanel myLicensePanel = new LicensePanel(false);
private LinkPanel myHomePage;
private JBScrollPane myBottomScrollPane;
private JEditorPane myDescriptionComponent;
private String myDescription;
private ChangeNotesPanel myChangeNotesPanel;
private OneLineProgressIndicator myIndicator;
private @Nullable IdeaPluginDescriptor myPlugin;
private boolean myIsPluginAllowed;
private IdeaPluginDescriptor myUpdateDescriptor;
private ListPluginComponent myShowComponent;
public PluginDetailsPageComponent(@NotNull MyPluginModel pluginModel, @NotNull LinkListener<Object> searchListener, boolean marketplace) {
myPluginModel = pluginModel;
mySearchListener = searchListener;
myMarketplace = marketplace;
createPluginPanel();
select(1, true);
setEmptyState(EmptyState.NONE_SELECTED);
}
@Nullable IdeaPluginDescriptor getPlugin() {
return myPlugin;
}
void setPlugin(@Nullable IdeaPluginDescriptor plugin) {
if (plugin != null) {
myPlugin = plugin;
}
}
public boolean isShowingPlugin(@NotNull IdeaPluginDescriptor pluginDescriptor) {
return myPlugin != null && myPlugin.getPluginId().equals(pluginDescriptor.getPluginId());
}
@Override
protected JComponent create(Integer key) {
if (key == 0) {
return myRootPanel;
}
if (key == 1) {
if (myEmptyPanel == null) {
myEmptyPanel = new JBPanelWithEmptyText();
myEmptyPanel.setBorder(new CustomLineBorder(PluginManagerConfigurable.SEARCH_FIELD_BORDER_COLOR, JBUI.insets(1, 0, 0, 0)));
myEmptyPanel.setOpaque(true);
myEmptyPanel.setBackground(PluginManagerConfigurable.MAIN_BG_COLOR);
myLoadingIcon.setOpaque(true);
myLoadingIcon.setPaintPassiveIcon(false);
myEmptyPanel.add(myLoadingIcon);
}
return myEmptyPanel;
}
return super.create(key);
}
private void createPluginPanel() {
myPanel = new OpaquePanel(new BorderLayout(0, JBUIScale.scale(32)), PluginManagerConfigurable.MAIN_BG_COLOR);
myPanel.setBorder(new CustomLineBorder(JBColor.border(), JBUI.insets(1, 0, 0, 0)) {
@Override
public Insets getBorderInsets(Component c) {
return JBUI.insets(15, 20, 0, 0);
}
});
createHeaderPanel().add(createCenterPanel());
createBottomPanel();
myRootPanel = new OpaquePanel(new BorderLayout());
myControlledByOrgNotification = new BorderLayoutPanel();
Border customLine = JBUI.Borders.customLine(JBColor.border(), 1, 0, 1, 0);
myControlledByOrgNotification.setBorder(JBUI.Borders.merge(JBUI.Borders.empty(10), customLine, true));
myControlledByOrgNotification.setBackground(JBUI.CurrentTheme.Notification.BACKGROUND);
myControlledByOrgNotification.setForeground(JBUI.CurrentTheme.Notification.FOREGROUND);
JBLabel notificationLabel = new JBLabel();
notificationLabel.setIcon(AllIcons.General.Warning);
notificationLabel.setVerticalTextPosition(SwingConstants.TOP);
notificationLabel.setText(HtmlChunk.html().addText(IdeBundle.message("plugins.configurable.not.allowed")).toString());
myControlledByOrgNotification.addToCenter(notificationLabel);
myControlledByOrgNotification.setVisible(false);
myRootPanel.add(myControlledByOrgNotification, BorderLayout.NORTH);
myRootPanel.add(myPanel, BorderLayout.CENTER);
}
@NotNull
private JPanel createHeaderPanel() {
JPanel header = new NonOpaquePanel(new BorderLayout(JBUIScale.scale(15), 0));
header.setBorder(JBUI.Borders.emptyRight(20));
myPanel.add(header, BorderLayout.NORTH);
myIconLabel = new JLabel();
myIconLabel.setBorder(JBUI.Borders.emptyTop(5));
myIconLabel.setVerticalAlignment(SwingConstants.TOP);
myIconLabel.setOpaque(false);
header.add(myIconLabel, BorderLayout.WEST);
return header;
}
@NotNull
private JPanel createCenterPanel() {
int offset = PluginManagerConfigurable.offset5();
JPanel centerPanel = new NonOpaquePanel(new VerticalLayout(offset));
myNameAndButtons.setYOffset(JBUIScale.scale(3));
myNameAndButtons.add(myNameComponent);
createButtons();
centerPanel.add(myNameAndButtons, VerticalLayout.FILL_HORIZONTAL);
if (!myMarketplace) {
myErrorComponent = new ErrorComponent();
centerPanel.add(myErrorComponent, VerticalLayout.FILL_HORIZONTAL);
}
createMetricsPanel(centerPanel);
return centerPanel;
}
@NotNull
private static JEditorPane createNameComponent() {
JEditorPane editorPane = new JEditorPane() {
JLabel myBaselineComponent;
@Override
public int getBaseline(int width, int height) {
if (myBaselineComponent == null) {
myBaselineComponent = new JLabel();
myBaselineComponent.setFont(getFont());
}
myBaselineComponent.setText(getText());
Dimension size = myBaselineComponent.getPreferredSize();
return myBaselineComponent.getBaseline(size.width, size.height);
}
@Override
public Dimension getPreferredSize() {
Dimension size = super.getPreferredSize();
if (size.height == 0) {
size.height = getMinimumSize().height;
}
return size;
}
@Override
public void updateUI() {
super.updateUI();
setFont(StartupUiUtil.getLabelFont().deriveFont(Font.BOLD, 18));
}
};
UIUtil.convertToLabel(editorPane);
editorPane.setCaret(EmptyCaret.INSTANCE);
editorPane.setFont(StartupUiUtil.getLabelFont().deriveFont(Font.BOLD, 18));
@NlsSafe String text = "<html><span>Foo</span></html>";
editorPane.setText(text);
editorPane.setMinimumSize(editorPane.getPreferredSize());
editorPane.setText(null);
return editorPane;
}
private void createButtons() {
myNameAndButtons.addButtonComponent(myRestartButton = new RestartButton(myPluginModel));
myNameAndButtons.addButtonComponent(myUpdateButton = new UpdateButton());
myUpdateButton.addActionListener(
e -> myPluginModel.installOrUpdatePlugin(this, myPlugin, myUpdateDescriptor, ModalityState.stateForComponent(myUpdateButton)));
myNameAndButtons.addButtonComponent(myInstallButton = new InstallButton(true));
myInstallButton
.addActionListener(e -> myPluginModel.installOrUpdatePlugin(this, myPlugin, null, ModalityState.stateForComponent(myInstallButton)));
myGearButton = SelectionBasedPluginModelAction.createGearButton(this::createEnableDisableAction,
this::createUninstallAction);
myGearButton.setOpaque(false);
myNameAndButtons.addButtonComponent(myGearButton);
for (Component component : myNameAndButtons.getButtonComponents()) {
component.setBackground(PluginManagerConfigurable.MAIN_BG_COLOR);
}
}
public void setOnlyUpdateMode() {
myNameAndButtons.removeButtons();
Container parent = myEnabledForProject.getParent();
if (parent != null) {
parent.remove(myEnabledForProject);
}
myPanel.setBorder(JBUI.Borders.empty(15, 20, 0, 0));
myEmptyPanel.setBorder(null);
}
private void createMetricsPanel(@NotNull JPanel centerPanel) {
// text field without horizontal margins
myVersion = new JTextField() {
@Override
public void setBorder(Border border) {
super.setBorder(null);
}
@Override
public void updateUI() {
super.updateUI();
if (myVersion != null) {
PluginDetailsPageComponent.setFont(myVersion);
}
if (myVersionSize != null) {
PluginDetailsPageComponent.setFont(myVersionSize);
}
}
};
myVersion.putClientProperty("TextFieldWithoutMargins", Boolean.TRUE);
myVersion.setEditable(false);
setFont(myVersion);
myVersion.setBorder(null);
myVersion.setOpaque(false);
myVersion.setForeground(ListPluginComponent.GRAY_COLOR);
myVersion.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
int caretPosition = myVersion.getCaretPosition();
myVersion.setSelectionStart(caretPosition);
myVersion.setSelectionEnd(caretPosition);
}
});
myVersionSize = new JLabel();
setFont(myVersionSize);
int offset = JBUIScale.scale(10);
JPanel panel1 = new NonOpaquePanel(new TextHorizontalLayout(offset));
centerPanel.add(panel1);
if (myMarketplace) {
myDownloads =
ListPluginComponent.createRatingLabel(panel1, null, "", AllIcons.Plugins.Downloads, ListPluginComponent.GRAY_COLOR, true);
myRating =
ListPluginComponent.createRatingLabel(panel1, null, "", AllIcons.Plugins.Rating, ListPluginComponent.GRAY_COLOR, true);
}
myAuthor = new LinkPanel(panel1, false, true, null, TextHorizontalLayout.FIX_LABEL);
myEnabledForProject = new JLabel();
myEnabledForProject.add(createDescriptionComponent(null));
myEnabledForProject.setHorizontalTextPosition(SwingConstants.LEFT);
myEnabledForProject.setForeground(ListPluginComponent.GRAY_COLOR);
setFont(myEnabledForProject);
TextHorizontalLayout layout = myMarketplace ? new TextHorizontalLayout(offset) {
@Override
public void layoutContainer(Container parent) {
super.layoutContainer(parent);
if (myTagPanel != null && myTagPanel.isVisible()) {
int baseline = myTagPanel.getBaseline(-1, -1);
if (baseline != -1) {
Rectangle versionBounds = myVersion.getBounds();
Dimension versionSize = myVersion.getPreferredSize();
int versionY = myTagPanel.getY() + baseline - myVersion.getBaseline(versionSize.width, versionSize.height);
myVersion.setBounds(versionBounds.x, versionY, versionBounds.width, versionBounds.height);
if (myDate.isVisible()) {
Rectangle dateBounds = myDate.getBounds();
Dimension dateSize = myDate.getPreferredSize();
int dateY = myTagPanel.getY() + baseline - myDate.getBaseline(dateSize.width, dateSize.height);
myDate.setBounds(dateBounds.x - JBUIScale.scale(4), dateY, dateBounds.width, dateBounds.height);
}
}
}
}
} : new TextHorizontalLayout(JBUIScale.scale(7));
JPanel panel2 = new NonOpaquePanel(layout);
panel2.setBorder(JBUI.Borders.emptyTop(5));
panel2.add(myTagPanel = new TagPanel(mySearchListener));
(myMarketplace ? panel2 : panel1).add(myVersion);
panel2.add(myEnabledForProject);
myDate =
ListPluginComponent.createRatingLabel(panel2, TextHorizontalLayout.FIX_LABEL, "", null, ListPluginComponent.GRAY_COLOR, true);
centerPanel.add(panel2);
}
private void createBottomPanel() {
JPanel bottomPanel =
new OpaquePanel(new VerticalLayout(PluginManagerConfigurable.offset5()), PluginManagerConfigurable.MAIN_BG_COLOR);
bottomPanel.setBorder(JBUI.Borders.empty(0, 0, 15, 20));
myBottomScrollPane = new JBScrollPane(bottomPanel);
myBottomScrollPane.getVerticalScrollBar().setBackground(PluginManagerConfigurable.MAIN_BG_COLOR);
myBottomScrollPane.setBorder(JBUI.Borders.empty());
myPanel.add(myBottomScrollPane);
bottomPanel.add(myLicensePanel);
myLicensePanel.setBorder(JBUI.Borders.emptyBottom(20));
if (myMarketplace) {
myHomePage = new LinkPanel(bottomPanel, false);
bottomPanel.add(new JLabel());
}
Object constraints = JBUIScale.scale(700);
bottomPanel.add(myDescriptionComponent = createDescriptionComponent(view -> {
float width = view.getPreferredSpan(View.X_AXIS);
if (width < 0 || width > myBottomScrollPane.getWidth()) {
myBottomScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS);
}
}), constraints);
myChangeNotesPanel = new ChangeNotesPanel(bottomPanel, constraints, myDescriptionComponent);
JLabel separator = new JLabel();
separator.setBorder(JBUI.Borders.emptyTop(20));
bottomPanel.add(separator);
if (myMarketplace) {
bottomPanel.add(mySize = new JLabel());
}
else {
myHomePage = new LinkPanel(bottomPanel, false);
}
}
private static void setFont(@NotNull JComponent component) {
component.setFont(StartupUiUtil.getLabelFont());
PluginManagerConfigurable.setTinyFont(component);
}
@NotNull
public static JEditorPane createDescriptionComponent(@Nullable Consumer<? super View> imageViewHandler) {
HTMLEditorKit kit = new HTMLEditorKitBuilder().withViewFactoryExtensions((e, view) -> {
if (view instanceof ParagraphView) {
return new ParagraphView(e) {
{
super.setLineSpacing(0.3f);
}
@Override
protected void setLineSpacing(float ls) {
}
};
}
if (imageViewHandler != null && view instanceof ImageView) {
imageViewHandler.accept(view);
}
return view;
}).build();
StyleSheet sheet = kit.getStyleSheet();
sheet.addRule("ul { margin-left-ltr: 30; margin-right-rtl: 30; }");
sheet.addRule("a { color: " + ColorUtil.toHtmlColor(JBUI.CurrentTheme.Link.Foreground.ENABLED) + "; }");
sheet.addRule("h4 { font-weight: bold; }");
sheet.addRule("strong { font-weight: bold; }");
sheet.addRule("p { margin-bottom: 6px; }");
Font font = StartupUiUtil.getLabelFont();
if (font != null) {
int size = font.getSize();
sheet.addRule("h3 { font-size: " + (size + 3) + "; font-weight: bold; }");
sheet.addRule("h2 { font-size: " + (size + 5) + "; font-weight: bold; }");
sheet.addRule("h1 { font-size: " + (size + 9) + "; font-weight: bold; }");
sheet.addRule("h0 { font-size: " + (size + 12) + "; font-weight: bold; }");
}
JEditorPane editorPane = new JEditorPane();
editorPane.setEditable(false);
editorPane.setOpaque(false);
editorPane.setBorder(null);
editorPane.setContentType("text/html");
editorPane.setEditorKit(kit);
editorPane.addHyperlinkListener(BrowserHyperlinkListener.INSTANCE);
return editorPane;
}
public void showPlugins(@NotNull List<? extends ListPluginComponent> selection) {
int size = selection.size();
showPlugin(size == 1 ? selection.get(0) : null, size > 1);
}
public void showPlugin(@Nullable ListPluginComponent component) {
showPlugin(component, false);
}
private void showPlugin(@Nullable ListPluginComponent component, boolean multiSelection) {
if (myShowComponent == component && (component == null || myUpdateDescriptor == component.myUpdateDescriptor)) {
return;
}
myShowComponent = component;
if (myIndicator != null) {
MyPluginModel.removeProgress(myPlugin, myIndicator);
hideProgress(false, false);
}
if (component == null) {
myPlugin = myUpdateDescriptor = null;
select(1, true);
setEmptyState(multiSelection ? EmptyState.MULTI_SELECT : EmptyState.NONE_SELECTED);
}
else {
boolean syncLoading = true;
IdeaPluginDescriptor descriptor = component.getPluginDescriptor();
if (descriptor instanceof PluginNode) {
PluginNode node = (PluginNode)descriptor;
if (!node.detailsLoaded()) {
syncLoading = false;
startLoading();
ProcessIOExecutorService.INSTANCE.execute(() -> {
PluginNode pluginNode = MarketplaceRequests.getInstance().loadPluginDetails(node);
if (pluginNode == null) {
return;
}
component.setPluginDescriptor(pluginNode);
ApplicationManager.getApplication().invokeLater(() -> {
if (myShowComponent == component) {
stopLoading();
showPluginImpl(component.getPluginDescriptor(), component.myUpdateDescriptor);
PluginManagerUsageCollector.pluginCardOpened(component.getPluginDescriptor(), component.getGroup());
}
}, ModalityState.stateForComponent(component));
});
}
}
if (syncLoading) {
showPluginImpl(component.getPluginDescriptor(), component.myUpdateDescriptor);
PluginManagerUsageCollector.pluginCardOpened(component.getPluginDescriptor(), component.getGroup());
}
}
}
public void showPluginImpl(@NotNull IdeaPluginDescriptor pluginDescriptor, @Nullable IdeaPluginDescriptor updateDescriptor) {
myPlugin = pluginDescriptor;
PluginManagerFilters org = PluginManagerFilters.getInstance();
myUpdateDescriptor = updateDescriptor != null && org.isPluginAllowed(!myMarketplace, updateDescriptor) ? updateDescriptor : null;
myIsPluginAllowed = org.isPluginAllowed(!myMarketplace, pluginDescriptor);
showPlugin();
select(0, true);
}
private enum EmptyState {
NONE_SELECTED,
MULTI_SELECT,
PROGRESS
}
private void setEmptyState(EmptyState emptyState) {
StatusText text = myEmptyPanel.getEmptyText();
text.clear();
myLoadingIcon.setVisible(false);
myLoadingIcon.suspend();
switch (emptyState) {
case MULTI_SELECT:
text.setText(IdeBundle.message("plugins.configurable.several.plugins"));
text.appendSecondaryText(IdeBundle.message("plugins.configurable.one.plugin.details"), StatusText.DEFAULT_ATTRIBUTES, null);
break;
case NONE_SELECTED:
text.setText(IdeBundle.message("plugins.configurable.plugin.details"));
break;
case PROGRESS:
myLoadingIcon.setVisible(true);
myLoadingIcon.resume();
break;
}
}
private void showPlugin() {
@NlsSafe String text = "<html><span>" + myPlugin.getName() + "</span></html>";
myNameComponent.setText(text);
myControlledByOrgNotification.setVisible(!myIsPluginAllowed);
updateIcon();
updateButtons();
String version = myPlugin.getVersion();
if (myPlugin.isBundled() && !myPlugin.allowBundledUpdate()) {
version = IdeBundle.message("plugin.version.bundled") + (Strings.isEmptyOrSpaces(version) ? "" : " " + version);
}
if (myUpdateDescriptor != null) {
version = NewUiUtil.getVersion(myPlugin, myUpdateDescriptor);
}
myVersion.setText(version);
myVersionSize.setText(version);
myVersion
.setPreferredSize(
new Dimension(myVersionSize.getPreferredSize().width + JBUIScale.scale(4), myVersionSize.getPreferredSize().height));
myVersion.setVisible(!Strings.isEmptyOrSpaces(version));
myTagPanel.setTags(PluginManagerConfigurable.getTags(myPlugin));
if (myMarketplace) {
String rating = null;
String downloads = null;
String size = null;
if (myPlugin instanceof PluginNode) {
PluginNode pluginNode = (PluginNode)myPlugin;
rating = pluginNode.getPresentableRating();
downloads = pluginNode.getPresentableDownloads();
size = pluginNode.getPresentableSize();
}
myRating.setText(rating);
myRating.setVisible(rating != null);
myDownloads.setText(downloads);
myDownloads.setVisible(downloads != null);
mySize.setText(IdeBundle.message("plugins.configurable.size.0", size));
mySize.setVisible(size != null);
}
else {
updateEnabledForProject();
}
String vendor = myPlugin.isBundled() ? null : Strings.trim(myPlugin.getVendor());
String organization = myPlugin.isBundled() ? null : Strings.trim(myPlugin.getOrganization());
if (Strings.isEmptyOrSpaces(vendor)) {
myAuthor.hide();
}
else {
if (Strings.isEmptyOrSpaces(organization)) {
myAuthor.show(vendor, null);
}
else {
myAuthor.show(organization, () -> mySearchListener.linkSelected(
null,
SearchWords.ORGANIZATION.getValue() +
(organization.indexOf(' ') == -1 ? organization : '\"' + organization + "\"")
));
}
}
showLicensePanel();
if (myPlugin.isBundled() && !myPlugin.allowBundledUpdate() || !isPluginFromMarketplace()) {
myHomePage.hide();
}
else {
myHomePage.show(IdeBundle.message(
"plugins.configurable.plugin.homepage.link"),
() -> {
String url = ((ApplicationInfoEx)ApplicationInfo.getInstance()).getPluginManagerUrl() +
MARKETPLACE_LINK +
URLUtil.encodeURIComponent(myPlugin.getPluginId().getIdString());
BrowserUtil.browse(IdeUrlTrackingParametersProvider.getInstance().augmentUrl(url));
});
}
IdeaPluginDescriptor pluginNode = myUpdateDescriptor != null ? myUpdateDescriptor : myPlugin;
String date = pluginNode instanceof PluginNode ?
((PluginNode)pluginNode).getPresentableDate() :
null;
myDate.setText(date);
myDate.setVisible(date != null);
myBottomScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
String description = getDescription();
if (description != null && !description.equals(myDescription)) {
myDescription = description;
myDescriptionComponent.setText(XmlStringUtil.wrapInHtml(description));
if (myDescriptionComponent.getCaret() != null) {
myDescriptionComponent.setCaretPosition(0);
}
}
myDescriptionComponent.setVisible(description != null);
myChangeNotesPanel.show(getChangeNotes());
ApplicationManager.getApplication().invokeLater(() -> {
IdeEventQueue.getInstance().flushQueue();
((JBScrollBar)myBottomScrollPane.getVerticalScrollBar()).setCurrentValue(0);
}, ModalityState.any());
if (MyPluginModel.isInstallingOrUpdate(myPlugin)) {
showProgress();
}
else {
fullRepaint();
}
}
private boolean isPluginFromMarketplace() {
assert myPlugin != null;
PluginInfoProvider provider = PluginInfoProvider.getInstance();
Set<PluginId> marketplacePlugins = provider.loadCachedPlugins();
if (marketplacePlugins != null) {
return marketplacePlugins.contains(myPlugin.getPluginId());
}
// will get the marketplace plugins ids next time
provider.loadPlugins();
// There are no marketplace plugins in the cache, but we should show the title anyway.
return true;
}
private void showLicensePanel() {
String productCode = myPlugin.getProductCode();
if (myPlugin.isBundled() || LicensePanel.isEA2Product(productCode)) {
myLicensePanel.hideWithChildren();
return;
}
if (productCode == null) {
if (myUpdateDescriptor != null && myUpdateDescriptor.getProductCode() != null &&
!LicensePanel.isEA2Product(myUpdateDescriptor.getProductCode())) {
String message;
if (myUpdateDescriptor instanceof PluginNode && ((PluginNode)myUpdateDescriptor).getTags().contains(Tags.Freemium.name())) {
message = IdeBundle.message("label.next.plugin.version.is.freemium");
} else {
message = IdeBundle.message("label.next.plugin.version.is.paid.use.the.trial.for.up.to.30.days.or");
}
myLicensePanel.setText(message, true, false);
myLicensePanel.showBuyPlugin(() -> myUpdateDescriptor);
myLicensePanel.setVisible(true);
}
else {
myLicensePanel.hideWithChildren();
}
}
else if (myMarketplace) {
String message;
if (myPlugin instanceof PluginNode && ((PluginNode)myPlugin).getTags().contains(Tags.Freemium.name())) {
message = IdeBundle.message("label.install.a.limited.functionality.for.free");
} else {
message = IdeBundle.message("label.use.the.trial.for.up.to.30.days.or");
}
myLicensePanel.setText(message, false, false);
myLicensePanel.showBuyPlugin(() -> myPlugin);
myLicensePanel.setVisible(true);
}
else {
LicensingFacade instance = LicensingFacade.getInstance();
if (instance == null) {
myLicensePanel.hideWithChildren();
return;
}
String stamp = instance.getConfirmationStamp(productCode);
if (stamp == null) {
if (ApplicationManager.getApplication().isEAP()) {
myTagPanel.setFirstTagTooltip(IdeBundle.message("tooltip.license.not.required.for.eap.version"));
myLicensePanel.hideWithChildren();
return;
}
myLicensePanel.setText(IdeBundle.message("label.text.plugin.no.license"), true, false);
}
else {
myLicensePanel.setTextFromStamp(stamp, instance.getExpirationDate(productCode));
}
myTagPanel.setFirstTagTooltip(myLicensePanel.getMessage());
//myLicensePanel.setLink("Manage licenses", () -> { XXX }, false);
myLicensePanel.setVisible(true);
}
}
public void updateButtons() {
if (!myIsPluginAllowed) {
myRestartButton.setVisible(false);
myInstallButton.setVisible(false);
myUpdateButton.setVisible(false);
myGearButton.setVisible(false);
return;
}
boolean installedWithoutRestart = InstalledPluginsState.getInstance().wasInstalledWithoutRestart(myPlugin.getPluginId());
if (myMarketplace) {
boolean installed = InstalledPluginsState.getInstance().wasInstalled(myPlugin.getPluginId());
myRestartButton.setVisible(installed);
myInstallButton.setEnabled(PluginManagerCore.getPlugin(myPlugin.getPluginId()) == null && !installedWithoutRestart,
IdeBundle.message("plugins.configurable.installed"));
myInstallButton.setVisible(!installed);
myUpdateButton.setVisible(false);
myGearButton.setVisible(false);
}
else {
myInstallButton.setVisible(false);
boolean uninstalled = myPlugin instanceof IdeaPluginDescriptorImpl && ((IdeaPluginDescriptorImpl)myPlugin).isDeleted();
boolean uninstalledWithoutRestart = InstalledPluginsState.getInstance().wasUninstalledWithoutRestart(myPlugin.getPluginId());
if (!uninstalled) {
InstalledPluginsState pluginsState = InstalledPluginsState.getInstance();
PluginId id = myPlugin.getPluginId();
uninstalled = pluginsState.wasInstalled(id) || pluginsState.wasUpdated(id);
}
if (uninstalled) {
if (uninstalledWithoutRestart) {
myRestartButton.setVisible(false);
myInstallButton.setVisible(true);
myInstallButton.setEnabled(false, IdeBundle.message("plugins.configurable.uninstalled"));
}
else {
myRestartButton.setVisible(true);
}
myUpdateButton.setVisible(false);
}
else {
myRestartButton.setVisible(false);
updateEnabledForProject();
myUpdateButton.setVisible(myUpdateDescriptor != null && !installedWithoutRestart);
}
myGearButton.setVisible(!uninstalled);
updateEnableForNameAndIcon();
updateErrors();
}
}
private void updateIcon() {
updateIcon(myPluginModel.getErrors(myPlugin));
}
private void updateIcon(@NotNull List<? extends HtmlChunk> errors) {
boolean hasErrors = !myMarketplace && !errors.isEmpty();
myIconLabel.setEnabled(myMarketplace || myPluginModel.isEnabled(myPlugin));
myIconLabel.setIcon(myPluginModel.getIcon(myPlugin, true, hasErrors, false));
myIconLabel.setDisabledIcon(myPluginModel.getIcon(myPlugin, true, hasErrors, true));
}
private void updateErrors() {
@NotNull List<? extends HtmlChunk> errors = myPluginModel.getErrors(myPlugin);
updateIcon(errors);
myErrorComponent.setErrors(errors, this::handleErrors);
}
private void handleErrors() {
myPluginModel.enableRequiredPlugins(myPlugin);
updateIcon();
updateEnabledState();
fullRepaint();
}
public void showProgress() {
myIndicator = new OneLineProgressIndicator(false);
myIndicator.setCancelRunnable(() -> myPluginModel.finishInstall(myPlugin, null, false, false, true));
myNameAndButtons.setProgressComponent(null, myIndicator.createBaselineWrapper());
MyPluginModel.addProgress(myPlugin, myIndicator);
fullRepaint();
}
private void fullRepaint() {
doLayout();
revalidate();
repaint();
}
public void hideProgress(boolean success) {
hideProgress(success, true);
}
private void hideProgress(boolean success, boolean repaint) {
myIndicator = null;
myNameAndButtons.removeProgressComponent();
if (success) {
updateButtons();
}
if (repaint) {
fullRepaint();
}
}
private void updateEnableForNameAndIcon() {
boolean enabled = myPluginModel.isEnabled(myPlugin);
myNameComponent.setForeground(enabled ? null : ListPluginComponent.DisabledColor);
myIconLabel.setEnabled(enabled);
}
public void updateEnabledState() {
if (myMarketplace || myPlugin == null) {
return;
}
updateEnableForNameAndIcon();
updateErrors();
updateEnabledForProject();
myUpdateButton.setVisible(myUpdateDescriptor != null);
fullRepaint();
}
private void updateEnabledForProject() {
ProjectDependentPluginEnabledState state = myPluginModel.getProjectDependentState(Objects.requireNonNull(myPlugin));
myEnabledForProject.setText(state.toString());
myEnabledForProject.setIcon(state.getIcon());
}
public void startLoading() {
select(1, true);
setEmptyState(EmptyState.PROGRESS);
fullRepaint();
}
public void stopLoading() {
myLoadingIcon.suspend();
myLoadingIcon.setVisible(false);
fullRepaint();
}
@Override
public void doLayout() {
super.doLayout();
updateIconLocation();
}
@Override
public void paint(Graphics g) {
super.paint(g);
updateIconLocation();
}
private void updateIconLocation() {
if (myLoadingIcon.isVisible()) {
myLoadingIcon.updateLocation(this);
}
}
@Nullable
private @Nls String getDescription() {
String description = myPlugin.getDescription();
return Strings.isEmptyOrSpaces(description) ? null : description;
}
@Nullable
@NlsSafe
private String getChangeNotes() {
if (myUpdateDescriptor != null) {
String notes = myUpdateDescriptor.getChangeNotes();
if (!Strings.isEmptyOrSpaces(notes)) {
return notes;
}
}
String notes = myPlugin.getChangeNotes();
return Strings.isEmptyOrSpaces(notes) ? null : notes;
}
private @NotNull SelectionBasedPluginModelAction.EnableDisableAction<PluginDetailsPageComponent> createEnableDisableAction(@NotNull PluginEnableDisableAction action) {
return new SelectionBasedPluginModelAction.EnableDisableAction<>(myPluginModel,
action,
false,
List.of(this),
PluginDetailsPageComponent::getPlugin);
}
private @NotNull SelectionBasedPluginModelAction.UninstallAction<PluginDetailsPageComponent> createUninstallAction() {
return new SelectionBasedPluginModelAction.UninstallAction<>(myPluginModel,
false,
this,
List.of(this),
PluginDetailsPageComponent::getPlugin);
}
}
| |
package eu.kingconquest.conquest.core;
import eu.kingconquest.conquest.event.ObjectiveCreateEvent;
import eu.kingconquest.conquest.event.ObjectiveDeleteEvent;
import eu.kingconquest.conquest.util.*;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.entity.Player;
import java.util.ArrayList;
import java.util.UUID;
import static org.bukkit.Material.*;
public class Town extends Objective{
public Town(String name, Location loc, Location spawn, Kingdom owner){
this(name
,null
,loc
,spawn
,owner);
}
public Town(String name, String uniqueID, Location loc, Location spawn, Kingdom owner){
super(name, loc, spawn, uniqueID);
if (Validate.notNull(owner))
setOwner(owner);
else
setOwner(Kingdom.getNeutral(getWorld()));
addTown(this);
Marker.update(this);
Marker.setDescription(this);
}
//Getters
/**
* Get Arraylist of Child Outposts
* @return Arraylist<Outpost>
*/
public ArrayList<Village> getChildren(){
return children;
}
//Setters
/**
* Set Outpost as Neutral
* @return void
*/
public void setNeutral(){
setOwner(Kingdom.getKingdom("Neutral", getWorld()));
updateGlass();
Marker.update(this);
}
private static ArrayList<Town> towns = new ArrayList<>();
private ArrayList<Village> children = new ArrayList<>();
public static ArrayList<Town> getTowns(World world) {
ArrayList<Town> towns = new ArrayList<>();
Town.getTowns().stream()
.filter(town->town.getWorld().equals(world))
.forEach(towns::add);
return towns;
}
public static Town getTown(String name, World world) {
for (Town town : getTowns())
if (town.getUUID().equals(UUID.fromString(name))
&& town.getWorld().equals(world))
return town;
return null;
}
public static ArrayList<Town> getTowns(String name, World world) {
ArrayList<Town> townect = new ArrayList<>();
for (Town town : getTowns())
if (town.getName().equals(name)
&& town.getWorld().equals(world))
townect.add(town);
return townect;
}
public static void addTowns(ArrayList<Town> tempTowns) {
towns.addAll(tempTowns);
}
public static ArrayList<Town> getTowns() {
return towns;
}
public static void removeTowns(ArrayList<Town> tempTowns) {
towns.removeAll(tempTowns);
}
public static Town getTown(UUID ID, World world) {
for (Town town : getTowns(world))
if (town.getUUID().equals(ID)
&& town.getWorld().equals(world))
return town;
return null;
}
public static boolean hasTowns() {
return towns.size() != 0;
}
public static void clear() {
Town.getTowns().forEach(town -> town.children.clear());
towns.clear();
}
public static void addTown(Town town) {
towns.add(town);
}
/**
* If Town has Children
*
* @return boolean
*/
public boolean hasChildren() {
return children.size() != 0;
}
/**
* Add an Outpost bound to Town
*
* @param village - Village
* @return void
*/
public void addChild(Village village) {
children.add(village);
}
public static void removeTown(Town town) {
towns.remove(town);
}
/**
* Add an ArrayList of Outposts to bind to Town
*
* @param villages - ArrayList<Village>
* @return void
*/
public void addChildren(ArrayList<Village> villages) {
children = villages;
}
/**
* Remove Towns bound Outposts
*
* @param village - Village
* @return void
*/
public void removeChild(Village village){
children.remove(village);
}
@Override
public boolean create(Player player){
try{
for (Objective objective : Objective.getObjectives(getWorld())){
if (objective.equals(this))
continue;
if (Validate.isWithinArea(player.getLocation(), objective.getLocation(), 20.0d, 20.0d, 20.0d)){
new Message(player, MessageType.CHAT, "{ToClose}");
removeTown(this);
return false;
}
}
if (getTowns(getName(), getWorld()).size() > 1)
new Message(player, MessageType.CHAT, "{AlreadyExists}");
setOwner(Kingdom.getKingdom("Neutral", getWorld()));
Location loc = player.getLocation().clone();
int rows = 5;
loc.setY(loc.getY() - 3);
loc.setX(loc.getX() - Math.ceil((rows / 2)) - 1);
loc.setZ(loc.getZ() - Math.ceil((rows / 2)) - 1);
//Set Iron Blocks! 5x5 area
setBeaconBase(rows, loc.clone(), IRON_BLOCK);
loc = player.getLocation().clone();
loc.setY(loc.getY() - 1);
loc.setX(loc.getX() - Math.ceil((rows / 2)) - 1);
loc.setZ(loc.getZ() - Math.ceil((rows / 2)) - 1);
//Set Upper Blocks! 5x5 area
setBeaconBase(rows, loc.clone(), Material.QUARTZ_SLAB);
updateGlass();
Bukkit.getPluginManager().callEvent(new ObjectiveCreateEvent(player, this));
Cach.StaticTown = this;
new Message(player, MessageType.CHAT, "{TownCreated}");
return true;
}catch (Exception e){
e.printStackTrace();
return false;
}
}
@Override
public boolean delete(Player player){
try{
Location loc = getLocation().clone();
for (int y = 0; y <= 1; y++){
loc.setY(loc.getY() - 1);
setBlock(loc, AIR);
loc.setX(loc.getX() - 1);
setBlock(loc, AIR);
loc.setX(loc.getX() + 1);
loc.setZ(loc.getZ() - 1);
setBlock(loc, AIR);
loc.setZ(loc.getZ() + 1);
loc.setZ(loc.getZ() + 1);
setBlock(loc, AIR);
loc.setZ(loc.getZ() - 1);
loc.setX(loc.getX() + 1);
setBlock(loc, AIR);
loc.setX(loc.getX() - 1);
}
setBase(loc, AIR);
int rows = 5;
loc = getLocation().clone();
loc.setY(loc.getY() - 4);
loc.setX(loc.getX() - Math.ceil((rows / 2)) - 1);
loc.setZ(loc.getZ() - Math.ceil((rows / 2)) - 1);
//Set Iron Blocks! 5x5 area
setBeaconBase(rows, loc, AIR);
loc = getLocation().clone();
loc.setY(loc.getY() - 1);
loc.setX(loc.getX() - Math.ceil((rows / 2)) - 1);
loc.setZ(loc.getZ() - Math.ceil((rows / 2)) - 1);
//Set Upper Blocks! 5x5 area
setBeaconBase(rows, loc, AIR);
Bukkit.getPluginManager().callEvent(new ObjectiveDeleteEvent(player, this));
new Message(player, MessageType.CHAT, "{TownDeleted}");
removeTown(this);
Marker.remove(this);
return true;
}catch (Exception e){
e.printStackTrace();
return false;
}
}
@Override
public void updateGlass() {
Location loc = getLocation().clone();
loc.setY(loc.getY() - 1);
setBlock(loc, Material.WHITE_STAINED_GLASS);
loc.setX(loc.getX() - 1);
setBlock(loc, Material.WHITE_STAINED_GLASS);
loc.setX(loc.getX() + 1);
loc.setZ(loc.getZ() - 1);
setBlock(loc, Material.WHITE_STAINED_GLASS);
loc.setZ(loc.getZ() + 1);
loc.setZ(loc.getZ() + 1);
setBlock(loc, Material.WHITE_STAINED_GLASS);
loc.setZ(loc.getZ() - 1);
loc.setX(loc.getX() + 1);
setBlock(loc, Material.WHITE_STAINED_GLASS);
loc.setX(loc.getX() - 1);
setBase(loc, BEACON);
}
private void setBase(Location loc, Material block){
loc.setY(loc.getY() - 1);
setBlock(loc, block);
loc.setX(loc.getX() - 1);
setBlock(loc, block);
loc.setX(loc.getX() + 1);
loc.setZ(loc.getZ() - 1);
setBlock(loc, block);
loc.setZ(loc.getZ() + 1);
loc.setZ(loc.getZ() + 1);
setBlock(loc, block);
loc.setZ(loc.getZ() - 1);
loc.setX(loc.getX() + 1);
setBlock(loc, block);
loc.setX(loc.getX() - 1);
}
}
| |
package de.mpii.gsm.lash.encoders;
import java.io.IOException;
import de.mpii.gsm.taxonomy.Taxonomy;
public abstract class BaseGapEncoder {
// -- variables
// ---------------------------------------------------------------------------------
/** the maximum gap allowed within a subsequence */
protected int gamma;
/** the maximum length of frequent subsequences */
protected int lambda;
/**
* left reachability is measured as the minimum number of hops to reach a
* pivot at the left from current item
*/
protected int[] leftHops;
/**
* right reachability is measured as the minimum number of hops to reach a
* pivot at the right from current item
*/
protected int[] rightHops;
/** auxiliary variables for computing distances */
protected int lastPdist = -1; // distance of last reachable item from the
// pivot
protected int lastHops = -1; // number of hops to reach that item
protected int prevPdist = -1; // distance from pivot of last reachable item
// with smaller
// distance
/** minimum allowed length */
protected final int MINIMUM_LENGTH = 2;
/** whether unreachable items are removed */
protected boolean removeUnreachable = true;
/**
* whether gaps will be compressed (multiple consecutive gaps grouped
* together, leading and trailing gaps removed)
*/
protected boolean compressGaps = true;
/**
* Taxonomy
*/
Taxonomy taxonomy;
// -- construction
// ------------------------------------------------------------------------------
/**
* Creates a new SimpleGapEncoder.
*
* @param maxGap
* maximum gap allowed within a subsequence
* @param stopWords
* set of stop words
*/
public BaseGapEncoder(int gamma, int lambda, boolean compressGaps,
boolean removeUnreachable) {
this.gamma = gamma;
this.lambda = lambda;
this.compressGaps = compressGaps;
this.removeUnreachable = removeUnreachable;
if (this.removeUnreachable && !this.compressGaps) {
throw new IllegalStateException();
}
// if unreachable items will be removed initialize structures for
// computing
// reachability metric
if (this.removeUnreachable) {
leftHops = new int[100];
rightHops = new int[100];
}
}
public BaseGapEncoder(int gamma, int lambda) {
this(gamma, lambda, true, true);
}
public void setTaxonomy(Taxonomy taxonomy){
this.taxonomy = taxonomy;
}
// -- encoding
// ----------------------------------------------------------------------------------
/**
* Breaks the input transaction into multiple subsequences separated by a
* gap larger than gamma. All items larger than endItem are treated as gaps
* (of length 1). Multiple consecutive gaps can be grouped together (i.e., 3
* gaps of length 1 = 1 gap of length 3). Leading and trailing gaps of a
* subsequence are not encoded. Every subsequence is guaranteed to contain
* at least 2 items, at least one of which is in the pivot set (i.e., in
* [beginItem, endItem]). In case of long transactions, minOffset and
* maxOffset are taken into account to avoid iterating through the whole
* transaction.
*
* A call to this method appends to the current output; use {@code
* #finalize()} after processing an entire transaction.
*
* @param transaction
* long input transaction s
* @param minPivotOffset
* the smallest offset of a pivot item -- default value (0)
* @param maxPivotOffset
* the largest offset of a pivot item -- default value
* (transaction.length - 1)
* @param beginItem
* items in [beginItem,endItem] are pivot items
* @param endItem
* items > endItem are irrelevant (i.e., treated as gaps)
*
* @throws IOException
*/
public void encode(int[] transaction, int minPivotOffset,
int maxPivotOffset, int beginItem, int endItem, boolean append,
boolean finalize) throws IOException {
// whether previous and last should be reset because we now begin
// computation of left/right hops
boolean reset = true;
// a reachable item can be at most lambda * (gamma + 1) hops away from a
// pivot
int maxHops = (lambda - 1) * (gamma + 1);
int len = transaction.length;
// iterate transaction between [leftMostReachablePos,
// rightMostReachablePos]
int leftMostReachablePos = (minPivotOffset - maxHops >= 0) ? minPivotOffset
- maxHops
: 0;
int rightMostReachablePos = (maxPivotOffset + maxHops < len) ? maxPivotOffset
+ maxHops
: len - 1;
if (removeUnreachable) {
// if the arrays for keeping the left/right hops are not large
// enough,
// increase their size
if (leftHops.length <= len) {
increaseHopsLength(len);
}
// before scanning the input transaction from left to right, scan
// from
// right to left to compute right reachability
int pdist = Integer.MAX_VALUE; // distance from pivot (to the right,
// != hops))
for (int pos = rightMostReachablePos; pos >= leftMostReachablePos
&& pos >= 0; pos--) {
int item = transaction[pos];
boolean isRelevant = false;
if(item > 0) {
if (item <= endItem) {
isRelevant = true;
}
else{
while(taxonomy.hasParent(item)){
item = taxonomy.getParent(item);
if(item <= endItem) {
isRelevant = true;
break;
}
}
}
}
boolean isPivot = isRelevant && item >= beginItem;
updateHops(pos, pdist, isPivot, isRelevant, reset, rightHops);
reset = false;
// update pivot distance (for next item)
if (item < 0) {
pdist += -item;
} else if (isPivot) {
pdist = 1;
} else {
pdist++;
}
// System.out.println("Right hops " + rightHops[pos]);
}
// reset again since computation of right hops finished
reset = true;
}
// auxiliary variables
int gap = 0; // current size of gap in input
int uncommittedItems = 0; // how many items are uncommitted?
boolean uncommittedPivot = false; // is there an uncommitted pivot?
// first write identifier
if (!append)
writePartitionId();
// main loop
// we write out items until the end of a subsequence (i.e., a gap >
// gamma)
// if the output conditions hold, we then commit; else we rollback
int pdist = Integer.MAX_VALUE;
for (int position = leftMostReachablePos; position <= rightMostReachablePos; position++) {
// get the next item
int item = transaction[position];
//boolean isRelevant = item <= endItem && item > 0;
boolean isRelevant = false;
if(item > 0) {
if (item <= endItem) {
isRelevant = true;
}
else{
while(taxonomy.hasParent(item)){
item = taxonomy.getParent(item);
if(item <= endItem) {
isRelevant = true;
break;
}
}
}
}
boolean isPivot = isRelevant && item >= beginItem;
boolean isReachable = true;
if (removeUnreachable) {
// compute left hops forward scanning sequence
updateHops(position, pdist, isPivot, isRelevant, reset,
leftHops);
reset = false;
// update pivot distance (for next item)
if (item < 0) {
pdist += -item;
} else if (isPivot) {
pdist = 1;
} else {
pdist++;
}
// System.out.println("Left hops " + leftHops[position]);
isReachable = Math.min(leftHops[position], rightHops[position]) < lambda;
}
// process item
if (!isRelevant) { // i.e., irrelevant item
// skip irrelevant items at the beginning of a subsequence
// unless gap should not be compressed
if ((uncommittedItems == 0 && compressGaps) || !isReachable) {
continue;
}
// otherwise the gap increased by 1 or the
// appropriate gap length in case of input gap
if (item < 0) {
gap += -item;
} else {
gap++;
}
// if the gap is larger than maxGap, we start a new subsequence
if (gap > gamma && removeUnreachable) { // || isSeparationPoint)
// {
if (uncommittedPivot && uncommittedItems > 1) {
commit();
} else {
rollback();
}
// start a new subsequence
gap = 0;
pdist = Integer.MAX_VALUE;
uncommittedPivot = false;
uncommittedItems = 0;
}
} else { // relevant item
// if item is unreachable drop it
if (!isReachable) {
continue;
}
// if there are any gaps, we need to append them to the buffer
if (gap > 0) {
// compress and write all gaps together
if (compressGaps) {
writeGap(gap);
} else {
// no compression is used, each gap is written
// separately
writeGapUncompressed(gap);
}
gap = 0;
// gapLength = 0;
}
// now append the item and a flag whether it is pivot to the
// buffer
uncommittedPivot |= isPivot;
writeItem(item, isPivot);
uncommittedItems++;
}
}
// rollback if necessary
if (!compressGaps) {
if (gap > 0) {
writeGapUncompressed(gap);
commit();
} else if (uncommittedItems > 0) {
commit();
}
} else { // we compress gaps
if (!removeUnreachable) {
if (uncommittedItems > 0) {
commit();
} else {
rollback();
}
} else { // we remove unreachable items
if (uncommittedPivot && uncommittedItems >= MINIMUM_LENGTH) {
commit();
} else {
rollback();
}
}
}
if (finalize)
finalize();
}
// -- methods for computing reachability metric
// -------------------------------------------------
/**
* Compute the minimum number of hops from position pos to the closest pivot
* to the right.
*
* @param pos
* the position of the current item in the sequence
* @param pdist
* the distance of the current item from the last pivot (counting
* gaps, too); ignored if isPivot==true
* @param isPivot
* whether or not the current item is a pivot
* @param isRelevant
* whether or not the current item is a relevant item
* @param reset
* whether previous_ and last_ should be reset
* @param hops
* the hops array to update
*/
private void updateHops(int pos, int pdist, boolean isPivot,
boolean isRelevant, boolean reset, int[] hops) {
if (reset) {
prevPdist = -1; // last reachable item at lower distance
lastPdist = -1; // last reachable item
}
if (isPivot) { // pivot item
hops[pos] = 0;
lastPdist = 0;
prevPdist = 0;
lastHops = 1;
} else if (lastPdist == -1 || pdist - lastPdist > gamma + 1) { // not
// reachable
// from
// last
hops[pos] = Integer.MAX_VALUE;
} else if (!isRelevant) { // update distances for irrelevant items also
hops[pos] = lastHops;
if (pdist - prevPdist > gamma + 1) {
hops[pos]++;
}
} else {
if (pdist - prevPdist > gamma + 1) {
prevPdist = lastPdist;
lastHops++;
}
lastPdist = pdist;
hops[pos] = lastHops;
}
}
/** Increase length of arrays for storing left/right hops */
private void increaseHopsLength(int length) {
assert length > leftHops.length;
int[] oldLeftHops = leftHops;
int[] oldRightHops = rightHops;
leftHops = new int[(length * 3) / 2];
rightHops = new int[(length * 4) / 2];
System.arraycopy(oldLeftHops, 0, leftHops, 0, oldLeftHops.length);
System.arraycopy(oldRightHops, 0, rightHops, 0, oldRightHops.length);
}
public int gamma() {
return gamma;
}
public int lambda() {
return lambda;
}
// -- abstract I/O methods
// ----------------------------------------------------------------------
/** Append an identifier to the current subsequence (uncommitted write). */
public abstract void writePartitionId() throws IOException;
/**
* Append a gap (compressed) to the current subsequence (uncommitted write).
*/
public abstract void writeGap(int gap) throws IOException;
/**
* Append all gaps (uncompressed) to the current subsequence (uncommitted
* write).
*/
public void writeGapUncompressed(int gap) throws IOException {
assert gap > 0;
// write each gap separately (gap integers are used)
for (int i = 0; i < gap; i++) {
writeGap(1);
}
}
/** Append an item to the current subsequence (uncommitted write). */
public abstract void writeItem(int item, boolean isPivot)
throws IOException;
/** Commit the current subsequence and start a new one. */
public abstract void commit() throws IOException;
/** Rollback the current subsequence. */
public abstract void rollback() throws IOException;
/**
* Finalize after all subsequences have been written. Guaranteed to be
* called directly after commit() or rollback().
*/
public abstract void finalize() throws IOException;
/** Start from scratch. */
public abstract void clear() throws IOException;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.nio.ByteBuffer;
import javax.cache.event.EventType;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheDeployable;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.util.GridLongList;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.Nullable;
/**
* Continuous query entry.
*/
public class CacheContinuousQueryEntry implements GridCacheDeployable, Message {
/** */
private static final long serialVersionUID = 0L;
/** */
private static final byte BACKUP_ENTRY = 0b0001;
/** */
private static final byte FILTERED_ENTRY = 0b0010;
/** */
private static final EventType[] EVT_TYPE_VALS = EventType.values();
/**
* @param ord Event type ordinal value.
* @return Event type.
*/
@Nullable public static EventType eventTypeFromOrdinal(int ord) {
return ord >= 0 && ord < EVT_TYPE_VALS.length ? EVT_TYPE_VALS[ord] : null;
}
/** */
private EventType evtType;
/** Key. */
@GridToStringInclude
private KeyCacheObject key;
/** New value. */
@GridToStringInclude
private CacheObject newVal;
/** Old value. */
@GridToStringInclude
private CacheObject oldVal;
/** Cache name. */
private int cacheId;
/** Deployment info. */
@GridToStringExclude
@GridDirectTransient
private GridDeploymentInfo depInfo;
/** Partition. */
private int part;
/** Update counter. */
private long updateCntr;
/** Flags. */
private byte flags;
/** */
@GridToStringInclude
private AffinityTopologyVersion topVer;
/** Filtered events. */
private GridLongList filteredEvts;
/** Keep binary. */
private boolean keepBinary;
/**
* Required by {@link Message}.
*/
public CacheContinuousQueryEntry() {
// No-op.
}
/**
* @param cacheId Cache ID.
* @param evtType Event type.
* @param key Key.
* @param newVal New value.
* @param oldVal Old value.
* @param part Partition.
* @param updateCntr Update partition counter.
* @param topVer Topology version if applicable.
*/
CacheContinuousQueryEntry(
int cacheId,
EventType evtType,
KeyCacheObject key,
@Nullable CacheObject newVal,
@Nullable CacheObject oldVal,
boolean keepBinary,
int part,
long updateCntr,
@Nullable AffinityTopologyVersion topVer) {
this.cacheId = cacheId;
this.evtType = evtType;
this.key = key;
this.newVal = newVal;
this.oldVal = oldVal;
this.part = part;
this.updateCntr = updateCntr;
this.topVer = topVer;
this.keepBinary = keepBinary;
}
/**
* @return Topology version if applicable.
*/
@Nullable AffinityTopologyVersion topologyVersion() {
return topVer;
}
/**
* @return Cache ID.
*/
int cacheId() {
return cacheId;
}
/**
* @return Event type.
*/
EventType eventType() {
return evtType;
}
/**
* @return Partition.
*/
int partition() {
return part;
}
/**
* @return Update counter.
*/
long updateCounter() {
return updateCntr;
}
/**
* Mark that entry create on backup.
*/
void markBackup() {
flags |= BACKUP_ENTRY;
}
/**
* Mark that entry filtered.
*/
void markFiltered() {
flags |= FILTERED_ENTRY;
newVal = null;
oldVal = null;
key = null;
depInfo = null;
}
/**
* @return {@code True} if entry sent by backup node.
*/
boolean isBackup() {
return (flags & BACKUP_ENTRY) != 0;
}
/**
* @return {@code True} if entry was filtered.
*/
boolean isFiltered() {
return (flags & FILTERED_ENTRY) != 0;
}
/**
* @return Keep binary flag.
*/
boolean isKeepBinary() {
return keepBinary;
}
/**
* @param cntrs Filtered events.
*/
void filteredEvents(GridLongList cntrs) {
filteredEvts = cntrs;
}
/**
* @return previous filtered events.
*/
long[] filteredEvents() {
return filteredEvts == null ? null : filteredEvts.array();
}
/**
* @param cctx Cache context.
* @throws IgniteCheckedException In case of error.
*/
void prepareMarshal(GridCacheContext cctx) throws IgniteCheckedException {
assert key != null;
key.prepareMarshal(cctx.cacheObjectContext());
if (newVal != null)
newVal.prepareMarshal(cctx.cacheObjectContext());
if (oldVal != null)
oldVal.prepareMarshal(cctx.cacheObjectContext());
}
/**
* @param cctx Cache context.
* @param ldr Class loader.
* @throws IgniteCheckedException In case of error.
*/
void unmarshal(GridCacheContext cctx, @Nullable ClassLoader ldr) throws IgniteCheckedException {
if (!isFiltered()) {
key.finishUnmarshal(cctx.cacheObjectContext(), ldr);
if (newVal != null)
newVal.finishUnmarshal(cctx.cacheObjectContext(), ldr);
if (oldVal != null)
oldVal.finishUnmarshal(cctx.cacheObjectContext(), ldr);
}
}
/**
* @return Key.
*/
KeyCacheObject key() {
return key;
}
/**
* @return New value.
*/
CacheObject value() {
return newVal;
}
/**
* @return Old value.
*/
CacheObject oldValue() {
return oldVal;
}
/** {@inheritDoc} */
@Override public void prepare(GridDeploymentInfo depInfo) {
this.depInfo = depInfo;
}
/** {@inheritDoc} */
@Override public GridDeploymentInfo deployInfo() {
return depInfo;
}
/** {@inheritDoc} */
@Override public byte directType() {
return 96;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 0:
if (!writer.writeInt("cacheId", cacheId))
return false;
writer.incrementState();
case 1:
if (!writer.writeByte("evtType", evtType != null ? (byte)evtType.ordinal() : -1))
return false;
writer.incrementState();
case 2:
if (!writer.writeMessage("filteredEvts", filteredEvts))
return false;
writer.incrementState();
case 3:
if (!writer.writeByte("flags", flags))
return false;
writer.incrementState();
case 4:
if (!writer.writeBoolean("keepBinary", keepBinary))
return false;
writer.incrementState();
case 5:
if (!writer.writeMessage("key", key))
return false;
writer.incrementState();
case 6:
if (!writer.writeMessage("newVal", newVal))
return false;
writer.incrementState();
case 7:
if (!writer.writeMessage("oldVal", oldVal))
return false;
writer.incrementState();
case 8:
if (!writer.writeInt("part", part))
return false;
writer.incrementState();
case 9:
if (!writer.writeMessage("topVer", topVer))
return false;
writer.incrementState();
case 10:
if (!writer.writeLong("updateCntr", updateCntr))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
switch (reader.state()) {
case 0:
cacheId = reader.readInt("cacheId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 1:
byte evtTypeOrd;
evtTypeOrd = reader.readByte("evtType");
if (!reader.isLastRead())
return false;
evtType = eventTypeFromOrdinal(evtTypeOrd);
reader.incrementState();
case 2:
filteredEvts = reader.readMessage("filteredEvts");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 3:
flags = reader.readByte("flags");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 4:
keepBinary = reader.readBoolean("keepBinary");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 5:
key = reader.readMessage("key");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 6:
newVal = reader.readMessage("newVal");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 7:
oldVal = reader.readMessage("oldVal");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 8:
part = reader.readInt("part");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 9:
topVer = reader.readMessage("topVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 10:
updateCntr = reader.readLong("updateCntr");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(CacheContinuousQueryEntry.class);
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 11;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheContinuousQueryEntry.class, this);
}
}
| |
package com.chimerapps.niddler.urlconnection;
import com.chimerapps.niddler.core.Niddler;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ProtocolException;
import java.net.Proxy;
import java.net.URL;
import java.security.Permission;
import java.security.Principal;
import java.security.cert.Certificate;
import java.util.List;
import java.util.Map;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSocketFactory;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
/**
* @author Nicola Verbeeck
*/
class DelegatingHttpsUrlConnection extends HttpsURLConnection {
private final HttpsURLConnection delegate;
private final DelegatingHttpUrlConnection httpDelegate;
public DelegatingHttpsUrlConnection(@NonNull final URL url,
@NonNull final HttpsURLConnection delegate,
@NonNull final Niddler niddler,
@NonNull final NiddlerUrlConnectionHandler connectionHandler) {
super(url);
this.delegate = delegate;
this.httpDelegate = new DelegatingHttpUrlConnection(url, delegate, niddler, connectionHandler);
}
public DelegatingHttpsUrlConnection(@NonNull final URL url,
@NonNull final Niddler niddler,
@NonNull final NiddlerUrlConnectionHandler connectionHandler) throws IOException {
this(url, (HttpsURLConnection) url.openConnection(), niddler, connectionHandler);
}
public DelegatingHttpsUrlConnection(@NonNull final URL url,
@NonNull final Proxy proxy,
@NonNull final Niddler niddler,
@NonNull final NiddlerUrlConnectionHandler connectionHandler) throws IOException {
this(url, (HttpsURLConnection) url.openConnection(proxy), niddler, connectionHandler);
}
@Override
public String getCipherSuite() {
return delegate.getCipherSuite();
}
@Override
public Certificate[] getLocalCertificates() {
return delegate.getLocalCertificates();
}
@Override
public Certificate[] getServerCertificates() throws SSLPeerUnverifiedException {
return delegate.getServerCertificates();
}
@Override
public void disconnect() {
httpDelegate.disconnect();
}
@Override
public boolean usingProxy() {
return httpDelegate.usingProxy();
}
@Override
public void connect() throws IOException {
httpDelegate.connect();
}
@Override
public Principal getPeerPrincipal() throws SSLPeerUnverifiedException {
return delegate.getPeerPrincipal();
}
@Override
public Principal getLocalPrincipal() {
return delegate.getLocalPrincipal();
}
@Override
public void setHostnameVerifier(final HostnameVerifier hostnameVerifier) {
delegate.setHostnameVerifier(hostnameVerifier);
}
@Override
public HostnameVerifier getHostnameVerifier() {
return delegate.getHostnameVerifier();
}
@Override
public void setSSLSocketFactory(final SSLSocketFactory sslSocketFactory) {
delegate.setSSLSocketFactory(sslSocketFactory);
}
@Override
public SSLSocketFactory getSSLSocketFactory() {
return delegate.getSSLSocketFactory();
}
@Override
public String getHeaderFieldKey(final int i) {
return httpDelegate.getHeaderFieldKey(i);
}
@Override
public void setFixedLengthStreamingMode(final int i) {
httpDelegate.setFixedLengthStreamingMode(i);
}
@Override
public void setFixedLengthStreamingMode(final long l) {
httpDelegate.setFixedLengthStreamingMode(l);
}
@Override
public void setChunkedStreamingMode(final int i) {
httpDelegate.setChunkedStreamingMode(i);
}
@Override
public String getHeaderField(final int i) {
return httpDelegate.getHeaderField(i);
}
@Override
public void setInstanceFollowRedirects(final boolean b) {
httpDelegate.setInstanceFollowRedirects(b);
}
@Override
public boolean getInstanceFollowRedirects() {
return httpDelegate.getInstanceFollowRedirects();
}
@Override
public void setRequestMethod(final String s) throws ProtocolException {
httpDelegate.setRequestMethod(s);
}
@Override
public String getRequestMethod() {
return httpDelegate.getRequestMethod();
}
@Override
public int getResponseCode() throws IOException {
return httpDelegate.getResponseCode();
}
@Override
public String getResponseMessage() throws IOException {
return httpDelegate.getResponseMessage();
}
@Override
public long getHeaderFieldDate(final String s, final long l) {
return httpDelegate.getHeaderFieldDate(s, l);
}
@Override
public Permission getPermission() throws IOException {
return httpDelegate.getPermission();
}
@Override
public InputStream getErrorStream() {
return httpDelegate.getErrorStream();
}
@Override
public void setConnectTimeout(final int i) {
httpDelegate.setConnectTimeout(i);
}
@Override
public int getConnectTimeout() {
return httpDelegate.getConnectTimeout();
}
@Override
public void setReadTimeout(final int i) {
httpDelegate.setReadTimeout(i);
}
@Override
public int getReadTimeout() {
return httpDelegate.getReadTimeout();
}
@Override
public URL getURL() {
return httpDelegate.getURL();
}
@Override
public int getContentLength() {
return httpDelegate.getContentLength();
}
@RequiresApi(api = 24)
@Override
public long getContentLengthLong() {
return httpDelegate.getContentLengthLong();
}
@Override
public String getContentType() {
return httpDelegate.getContentType();
}
@Override
public String getContentEncoding() {
return httpDelegate.getContentEncoding();
}
@Override
public long getExpiration() {
return httpDelegate.getExpiration();
}
@Override
public long getDate() {
return httpDelegate.getDate();
}
@Override
public long getLastModified() {
return httpDelegate.getLastModified();
}
@Override
public String getHeaderField(final String s) {
return httpDelegate.getHeaderField(s);
}
@Override
public Map<String, List<String>> getHeaderFields() {
return httpDelegate.getHeaderFields();
}
@Override
public int getHeaderFieldInt(final String s, final int i) {
return httpDelegate.getHeaderFieldInt(s, i);
}
@RequiresApi(api = 24)
@Override
public long getHeaderFieldLong(final String s, final long l) {
return httpDelegate.getHeaderFieldLong(s, l);
}
@Override
public Object getContent() throws IOException {
return httpDelegate.getContent();
}
@Override
public Object getContent(final Class[] classes) throws IOException {
return httpDelegate.getContent(classes);
}
@Override
public InputStream getInputStream() throws IOException {
return httpDelegate.getInputStream();
}
@Override
public OutputStream getOutputStream() throws IOException {
return httpDelegate.getOutputStream();
}
@Override
public String toString() {
return httpDelegate.toString();
}
@Override
public void setDoInput(final boolean b) {
httpDelegate.setDoInput(b);
}
@Override
public boolean getDoInput() {
return httpDelegate.getDoInput();
}
@Override
public void setDoOutput(final boolean b) {
httpDelegate.setDoOutput(b);
}
@Override
public boolean getDoOutput() {
return httpDelegate.getDoOutput();
}
@Override
public void setAllowUserInteraction(final boolean b) {
httpDelegate.setAllowUserInteraction(b);
}
@Override
public boolean getAllowUserInteraction() {
return httpDelegate.getAllowUserInteraction();
}
@Override
public void setUseCaches(final boolean b) {
httpDelegate.setUseCaches(b);
}
@Override
public boolean getUseCaches() {
return httpDelegate.getUseCaches();
}
@Override
public void setIfModifiedSince(final long l) {
httpDelegate.setIfModifiedSince(l);
}
@Override
public long getIfModifiedSince() {
return httpDelegate.getIfModifiedSince();
}
@Override
public boolean getDefaultUseCaches() {
return httpDelegate.getDefaultUseCaches();
}
@Override
public void setDefaultUseCaches(final boolean b) {
httpDelegate.setDefaultUseCaches(b);
}
@Override
public void setRequestProperty(final String s, final String s1) {
httpDelegate.setRequestProperty(s, s1);
}
@Override
public void addRequestProperty(final String s, final String s1) {
httpDelegate.addRequestProperty(s, s1);
}
@Override
public String getRequestProperty(final String s) {
return httpDelegate.getRequestProperty(s);
}
@Override
public Map<String, List<String>> getRequestProperties() {
return httpDelegate.getRequestProperties();
}
}
| |
/*
* Copyright (c) 2010-2015 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.certification.test;
import com.evolveum.icf.dummy.resource.DummyResource;
import com.evolveum.midpoint.certification.api.CertificationManager;
import com.evolveum.midpoint.certification.impl.AccCertGeneralHelper;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.xml.XmlTypeConverter;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.CertCampaignTypeUtil;
import com.evolveum.midpoint.schema.util.ObjectTypeUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.test.DummyResourceContoller;
import com.evolveum.midpoint.util.exception.CommunicationException;
import com.evolveum.midpoint.util.exception.ConfigurationException;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.exception.SecurityViolationException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AbstractRoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationAssignmentCaseType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationCampaignStateType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationCampaignType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationCaseType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationDecisionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationDefinitionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationResponseType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.xml.datatype.XMLGregorianCalendar;
import java.io.File;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import static org.testng.AssertJUnit.fail;
/**
* @author mederly
*
*/
public class AbstractCertificationTest extends AbstractModelIntegrationTest {
public static final File SYSTEM_CONFIGURATION_FILE = new File(COMMON_DIR, "system-configuration.xml");
public static final String SYSTEM_CONFIGURATION_OID = SystemObjectsType.SYSTEM_CONFIGURATION.value();
protected static final File ORGS_AND_USERS_FILE = new File(COMMON_DIR, "orgs-and-users.xml");
protected static final String ORG_GOVERNOR_OFFICE_OID = "00000000-8888-6666-0000-100000000001";
protected static final String ORG_SCUMM_BAR_OID = "00000000-8888-6666-0000-100000000006";
protected static final String ORG_MINISTRY_OF_OFFENSE_OID = "00000000-8888-6666-0000-100000000003";
protected static final String ORG_MINISTRY_OF_DEFENSE_OID = "00000000-8888-6666-0000-100000000002";
protected static final String ORG_MINISTRY_OF_RUM_OID = "00000000-8888-6666-0000-100000000004";
protected static final String ORG_SWASHBUCKLER_SECTION_OID = "00000000-8888-6666-0000-100000000005";
protected static final String ORG_PROJECT_ROOT_OID = "00000000-8888-6666-0000-200000000000";
protected static final String ORG_SAVE_ELAINE_OID = "00000000-8888-6666-0000-200000000001";
protected static final String ORG_EROOT_OID = "00000000-8888-6666-0000-300000000000";
protected static final String USER_ELAINE_OID = "c0c010c0-d34d-b33f-f00d-11111111111e";
protected static final String USER_GUYBRUSH_OID = "c0c010c0-d34d-b33f-f00d-111111111116";
protected static final String USER_LECHUCK_OID = "c0c010c0-d34d-b33f-f00d-1c1c11cc11c2";
protected static final String USER_CHEESE_OID = "c0c010c0-d34d-b33f-f00d-111111111130";
protected static final String USER_CHEF_OID = "c0c010c0-d34d-b33f-f00d-111111111131";
protected static final String USER_BARKEEPER_OID = "c0c010c0-d34d-b33f-f00d-111111111132";
protected static final String USER_CARLA_OID = "c0c010c0-d34d-b33f-f00d-111111111133";
protected static final String USER_BOB_OID = "c0c010c0-d34d-b33f-f00d-111111111134";
public static final File USER_ADMINISTRATOR_FILE = new File(COMMON_DIR, "user-administrator.xml");
protected static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
protected static final String USER_ADMINISTRATOR_NAME = "administrator";
protected static final File USER_JACK_FILE = new File(COMMON_DIR, "user-jack.xml");
protected static final String USER_JACK_OID = "c0c010c0-d34d-b33f-f00d-111111111111";
protected static final String USER_JACK_USERNAME = "jack";
public static final File ROLE_SUPERUSER_FILE = new File(COMMON_DIR, "role-superuser.xml");
protected static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
public static final File METAROLE_CXO_FILE = new File(COMMON_DIR, "metarole-cxo.xml");
protected static final String METAROLE_CXO_OID = "00000000-d34d-b33f-f00d-444444444444";
public static final File ROLE_CEO_FILE = new File(COMMON_DIR, "role-ceo.xml");
protected static final String ROLE_CEO_OID = "00000000-d34d-b33f-f00d-000000000001";
public static final File ROLE_COO_FILE = new File(COMMON_DIR, "role-coo.xml");
protected static final String ROLE_COO_OID = "00000000-d34d-b33f-f00d-000000000002";
protected DummyResource dummyResource;
protected DummyResourceContoller dummyResourceCtl;
protected ResourceType resourceDummyType;
protected PrismObject<ResourceType> resourceDummy;
protected DummyResource dummyResourceBlack;
protected DummyResourceContoller dummyResourceCtlBlack;
protected ResourceType resourceDummyBlackType;
protected PrismObject<ResourceType> resourceDummyBlack;
protected static final File RESOURCE_DUMMY_FILE = new File(COMMON_DIR, "resource-dummy.xml");
protected static final String RESOURCE_DUMMY_OID = "10000000-0000-0000-0000-000000000004";
protected static final String RESOURCE_DUMMY_NAMESPACE = "http://midpoint.evolveum.com/xml/ns/public/resource/instance/10000000-0000-0000-0000-000000000004";
protected static final String DUMMY_ACCOUNT_ATTRIBUTE_SEA_NAME = "sea";
protected static final String RESOURCE_DUMMY_BLACK_FILENAME = COMMON_DIR + "/resource-dummy-black.xml";
protected static final String RESOURCE_DUMMY_BLACK_OID = "10000000-0000-0000-0000-000000000305";
protected static final String RESOURCE_DUMMY_BLACK_NAME = "black";
protected static final String RESOURCE_DUMMY_BLACK_NAMESPACE = MidPointConstants.NS_RI;
protected static final Trace LOGGER = TraceManager.getTrace(AbstractModelIntegrationTest.class);
@Autowired
protected CertificationManager certificationManager;
@Autowired
protected AccCertGeneralHelper helper;
protected RoleType roleCeo;
protected RoleType roleCoo;
protected RoleType roleSuperuser;
protected UserType userAdministrator;
protected UserType userJack;
protected UserType userElaine;
protected UserType userGuybrush;
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
LOGGER.trace("initSystem");
super.initSystem(initTask, initResult);
modelService.postInit(initResult);
// System Configuration
try {
repoAddObjectFromFile(SYSTEM_CONFIGURATION_FILE, SystemConfigurationType.class, initResult);
} catch (ObjectAlreadyExistsException e) {
throw new ObjectAlreadyExistsException("System configuration already exists in repository;" +
"looks like the previous test haven't cleaned it up", e);
}
repoAddObjectsFromFile(ORGS_AND_USERS_FILE, RoleType.class, initResult);
// roles
repoAddObjectFromFile(METAROLE_CXO_FILE, RoleType.class, initResult);
roleSuperuser = repoAddObjectFromFile(ROLE_SUPERUSER_FILE, RoleType.class, initResult).asObjectable();
roleCeo = repoAddObjectFromFile(ROLE_CEO_FILE, RoleType.class, initResult).asObjectable();
roleCoo = repoAddObjectFromFile(ROLE_COO_FILE, RoleType.class, initResult).asObjectable();
// Administrator
userAdministrator = repoAddObjectFromFile(USER_ADMINISTRATOR_FILE, UserType.class, initResult).asObjectable();
login(userAdministrator.asPrismObject());
// Users
userJack = repoAddObjectFromFile(USER_JACK_FILE, UserType.class, initResult).asObjectable();
userElaine = getUser(USER_ELAINE_OID).asObjectable();
userGuybrush = getUser(USER_GUYBRUSH_OID).asObjectable();
// Resources
dummyResourceCtl = DummyResourceContoller.create(null);
dummyResourceCtl.extendSchemaPirate();
dummyResource = dummyResourceCtl.getDummyResource();
dummyResourceCtl.addAttrDef(dummyResource.getAccountObjectClass(),
DUMMY_ACCOUNT_ATTRIBUTE_SEA_NAME, String.class, false, false);
resourceDummy = importAndGetObjectFromFile(ResourceType.class, RESOURCE_DUMMY_FILE, RESOURCE_DUMMY_OID, initTask, initResult);
resourceDummyType = resourceDummy.asObjectable();
dummyResourceCtl.setResource(resourceDummy);
dummyResourceCtlBlack = DummyResourceContoller.create(RESOURCE_DUMMY_BLACK_NAME, resourceDummyBlack);
dummyResourceCtlBlack.extendSchemaPirate();
dummyResourceBlack = dummyResourceCtlBlack.getDummyResource();
resourceDummyBlack = importAndGetObjectFromFile(ResourceType.class, RESOURCE_DUMMY_BLACK_FILENAME, RESOURCE_DUMMY_BLACK_OID, initTask, initResult);
resourceDummyBlackType = resourceDummyBlack.asObjectable();
dummyResourceCtlBlack.setResource(resourceDummyBlack);
}
protected AccessCertificationCaseType checkCase(Collection<AccessCertificationCaseType> caseList, String subjectOid, String targetOid, FocusType focus) {
AccessCertificationCaseType ccase = findCase(caseList, subjectOid, targetOid);
assertNotNull("Certification case for " + subjectOid + ":" + targetOid + " was not found", ccase);
assertNotNull("reviewRequestedTimestamp", ccase.getReviewRequestedTimestamp());
assertNotNull("deadline", ccase.getReviewDeadline());
assertNull("remediedTimestamp", ccase.getRemediedTimestamp());
return checkSpecificCase(ccase, focus);
}
protected AccessCertificationCaseType checkSpecificCase(AccessCertificationCaseType ccase, FocusType focus) {
assertEquals("Wrong class for case", AccessCertificationAssignmentCaseType.class, ccase.getClass());
AccessCertificationAssignmentCaseType acase = (AccessCertificationAssignmentCaseType) ccase;
long id = acase.getAssignment().getId();
List<AssignmentType> assignmentList;
if (Boolean.TRUE.equals(acase.isIsInducement())) {
assignmentList = ((AbstractRoleType) focus).getInducement();
} else {
assignmentList = focus.getAssignment();
}
for (AssignmentType assignment : assignmentList) {
if (id == assignment.getId()) {
assertEquals("Wrong assignment in certification case", assignment, acase.getAssignment());
return ccase;
}
}
fail("Assignment with ID " + id + " not found among assignments of " + focus);
return null; // won't come here
}
protected AccessCertificationCaseType findCase(Collection<AccessCertificationCaseType> caseList, String subjectOid, String targetOid) {
for (AccessCertificationCaseType acase : caseList) {
if (acase.getTargetRef() != null && acase.getTargetRef().getOid().equals(targetOid) &&
acase.getObjectRef() != null && acase.getObjectRef().getOid().equals(subjectOid)) {
return acase;
}
}
return null;
}
protected void assertApproximateTime(String itemName, Date expected, XMLGregorianCalendar actual) {
assertNotNull("missing " + itemName, actual);
Date actualAsDate = XmlTypeConverter.toDate(actual);
assertTrue(itemName + " out of range; expected " + expected + ", found " + actualAsDate,
Math.abs(actualAsDate.getTime() - expected.getTime()) < 600000); // 10 minutes
}
protected void assertStateAndStage(AccessCertificationCampaignType campaign, AccessCertificationCampaignStateType state, int stage) {
assertEquals("Unexpected campaign state", state, campaign.getState());
assertEquals("Unexpected stage number", stage, campaign.getCurrentStageNumber());
}
protected void assertDefinitionAndOwner(AccessCertificationCampaignType campaign, AccessCertificationDefinitionType certificationDefinition) {
assertEquals("Unexpected ownerRef", ObjectTypeUtil.createObjectRef(USER_ADMINISTRATOR_OID, ObjectTypes.USER), campaign.getOwnerRef());
assertEquals("Unexpected definitionRef",
ObjectTypeUtil.createObjectRef(certificationDefinition),
campaign.getDefinitionRef());
}
protected void assertCaseReviewers(AccessCertificationCaseType _case, AccessCertificationResponseType currentResponse,
boolean enabled, int currentResponseStage, List<String> reviewerOidList) {
assertEquals("wrong current response", currentResponse, _case.getCurrentResponse());
assertEquals("wrong enabled", (Boolean) enabled, _case.isEnabled());
assertEquals("wrong current response stage number", (Integer) currentResponseStage, _case.getCurrentResponseStage());
Set<String> realReviewerOids = new HashSet<>();
for (ObjectReferenceType ref : _case.getReviewerRef()) {
realReviewerOids.add(ref.getOid());
}
assertEquals("wrong reviewer oids", new HashSet<>(reviewerOidList), realReviewerOids);
}
protected void recordDecision(String campaignOid, AccessCertificationCaseType _case, AccessCertificationResponseType response, String comment,
int stageNumber, String reviewerOid, Task task, OperationResult result)
throws CommunicationException, ObjectNotFoundException, ObjectAlreadyExistsException, SchemaException, SecurityViolationException, ConfigurationException {
AccessCertificationDecisionType decision = new AccessCertificationDecisionType(prismContext);
decision.setResponse(response);
decision.setComment(comment);
decision.setStageNumber(stageNumber);
if (reviewerOid != null) {
ObjectReferenceType reviewerRef = ObjectTypeUtil.createObjectRef(reviewerOid, ObjectTypes.USER);
decision.setReviewerRef(reviewerRef);
}
long id = _case.asPrismContainerValue().getId();
certificationManager.recordDecision(campaignOid, id, decision, task, result);
}
protected void assertDecision(AccessCertificationCaseType _case, AccessCertificationResponseType response, String comment, int stageNumber, String reviewerOid, AccessCertificationResponseType aggregatedResponse, boolean enabled) {
assertEquals("wrong # of decisions", 1, _case.getDecision().size());
AccessCertificationDecisionType storedDecision = _case.getDecision().get(0);
assertEquals("wrong response", response, storedDecision.getResponse());
assertEquals("wrong comment", comment, storedDecision.getComment());
assertEquals("wrong reviewerRef", ObjectTypeUtil.createObjectRef(reviewerOid, ObjectTypes.USER), storedDecision.getReviewerRef());
assertEquals("wrong stage number", stageNumber, storedDecision.getStageNumber());
assertApproximateTime("timestamp", new Date(), storedDecision.getTimestamp());
assertEquals("wrong current response", aggregatedResponse, _case.getCurrentResponse());
assertEquals("wrong enabled", (Boolean) enabled, _case.isEnabled());
}
protected void assertNoDecision(AccessCertificationCaseType _case, AccessCertificationResponseType aggregatedResponse, boolean enabled) {
assertEquals("wrong # of decisions", 0, _case.getDecision().size());
assertEquals("wrong current response", aggregatedResponse, _case.getCurrentResponse());
assertEquals("wrong enabled", (Boolean) enabled, _case.isEnabled());
}
protected void assertCurrentState(AccessCertificationCaseType _case, AccessCertificationResponseType aggregatedResponse, int currentResponseStage, boolean enabled) {
assertEquals("wrong current response", aggregatedResponse, _case.getCurrentResponse());
assertEquals("wrong current response stage number", (Integer) currentResponseStage, _case.getCurrentResponseStage());
assertEquals("wrong enabled", (Boolean) enabled, _case.isEnabled());
}
protected void assertDecisions(AccessCertificationCaseType _case, int count) {
assertEquals("Wrong # of decisions", count, _case.getDecision().size());
}
protected void assertDecision2(AccessCertificationCaseType _case, AccessCertificationResponseType response, String comment,
int stageNumber, String reviewerOid, AccessCertificationResponseType aggregatedResponse, boolean enabled) {
AccessCertificationDecisionType decision = CertCampaignTypeUtil.findDecision(_case, stageNumber, reviewerOid);
assertNotNull("decision does not exist", decision);
assertEquals("wrong response", response, decision.getResponse());
assertEquals("wrong comment", comment, decision.getComment());
assertApproximateTime("timestamp", new Date(), decision.getTimestamp());
assertEquals("wrong current response", aggregatedResponse, _case.getCurrentResponse());
assertEquals("wrong enabled", (Boolean) enabled, _case.isEnabled());
}
}
| |
/*
* An XML document type.
* Localname: release
* Namespace: http://schema.mytestbed.net/omf/6.0/protocol
* Java type: net.mytestbed.schema.omf.x60.protocol.ReleaseDocument
*
* Automatically generated - do not modify.
*/
package net.mytestbed.schema.omf.x60.protocol;
/**
* A document containing one release(@http://schema.mytestbed.net/omf/6.0/protocol) element.
*
* This is a complex type.
*/
public interface ReleaseDocument extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(ReleaseDocument.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.sD09C80FBB114BA180EC1CB4846999213").resolveHandle("releasea320doctype");
/**
* Gets the "release" element
*/
net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release getRelease();
/**
* Sets the "release" element
*/
void setRelease(net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release release);
/**
* Appends and returns a new empty "release" element
*/
net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release addNewRelease();
/**
* An XML release(@http://schema.mytestbed.net/omf/6.0/protocol).
*
* This is a complex type.
*/
public interface Release extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(Release.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.sD09C80FBB114BA180EC1CB4846999213").resolveHandle("release994delemtype");
/**
* Gets a List of "ts" elements
*/
java.util.List<java.lang.String> getTsList();
/**
* Gets array of all "ts" elements
* @deprecated
*/
java.lang.String[] getTsArray();
/**
* Gets ith "ts" element
*/
java.lang.String getTsArray(int i);
/**
* Gets (as xml) a List of "ts" elements
*/
java.util.List<org.apache.xmlbeans.XmlString> xgetTsList();
/**
* Gets (as xml) array of all "ts" elements
* @deprecated
*/
org.apache.xmlbeans.XmlString[] xgetTsArray();
/**
* Gets (as xml) ith "ts" element
*/
org.apache.xmlbeans.XmlString xgetTsArray(int i);
/**
* Returns number of "ts" element
*/
int sizeOfTsArray();
/**
* Sets array of all "ts" element
*/
void setTsArray(java.lang.String[] tsArray);
/**
* Sets ith "ts" element
*/
void setTsArray(int i, java.lang.String ts);
/**
* Sets (as xml) array of all "ts" element
*/
void xsetTsArray(org.apache.xmlbeans.XmlString[] tsArray);
/**
* Sets (as xml) ith "ts" element
*/
void xsetTsArray(int i, org.apache.xmlbeans.XmlString ts);
/**
* Inserts the value as the ith "ts" element
*/
void insertTs(int i, java.lang.String ts);
/**
* Appends the value as the last "ts" element
*/
void addTs(java.lang.String ts);
/**
* Inserts and returns a new empty value (as xml) as the ith "ts" element
*/
org.apache.xmlbeans.XmlString insertNewTs(int i);
/**
* Appends and returns a new empty value (as xml) as the last "ts" element
*/
org.apache.xmlbeans.XmlString addNewTs();
/**
* Removes the ith "ts" element
*/
void removeTs(int i);
/**
* Gets a List of "src" elements
*/
java.util.List<java.lang.String> getSrcList();
/**
* Gets array of all "src" elements
* @deprecated
*/
java.lang.String[] getSrcArray();
/**
* Gets ith "src" element
*/
java.lang.String getSrcArray(int i);
/**
* Gets (as xml) a List of "src" elements
*/
java.util.List<org.apache.xmlbeans.XmlString> xgetSrcList();
/**
* Gets (as xml) array of all "src" elements
* @deprecated
*/
org.apache.xmlbeans.XmlString[] xgetSrcArray();
/**
* Gets (as xml) ith "src" element
*/
org.apache.xmlbeans.XmlString xgetSrcArray(int i);
/**
* Returns number of "src" element
*/
int sizeOfSrcArray();
/**
* Sets array of all "src" element
*/
void setSrcArray(java.lang.String[] srcArray);
/**
* Sets ith "src" element
*/
void setSrcArray(int i, java.lang.String src);
/**
* Sets (as xml) array of all "src" element
*/
void xsetSrcArray(org.apache.xmlbeans.XmlString[] srcArray);
/**
* Sets (as xml) ith "src" element
*/
void xsetSrcArray(int i, org.apache.xmlbeans.XmlString src);
/**
* Inserts the value as the ith "src" element
*/
void insertSrc(int i, java.lang.String src);
/**
* Appends the value as the last "src" element
*/
void addSrc(java.lang.String src);
/**
* Inserts and returns a new empty value (as xml) as the ith "src" element
*/
org.apache.xmlbeans.XmlString insertNewSrc(int i);
/**
* Appends and returns a new empty value (as xml) as the last "src" element
*/
org.apache.xmlbeans.XmlString addNewSrc();
/**
* Removes the ith "src" element
*/
void removeSrc(int i);
/**
* Gets a List of "replyto" elements
*/
java.util.List<java.lang.String> getReplytoList();
/**
* Gets array of all "replyto" elements
* @deprecated
*/
java.lang.String[] getReplytoArray();
/**
* Gets ith "replyto" element
*/
java.lang.String getReplytoArray(int i);
/**
* Gets (as xml) a List of "replyto" elements
*/
java.util.List<org.apache.xmlbeans.XmlString> xgetReplytoList();
/**
* Gets (as xml) array of all "replyto" elements
* @deprecated
*/
org.apache.xmlbeans.XmlString[] xgetReplytoArray();
/**
* Gets (as xml) ith "replyto" element
*/
org.apache.xmlbeans.XmlString xgetReplytoArray(int i);
/**
* Returns number of "replyto" element
*/
int sizeOfReplytoArray();
/**
* Sets array of all "replyto" element
*/
void setReplytoArray(java.lang.String[] replytoArray);
/**
* Sets ith "replyto" element
*/
void setReplytoArray(int i, java.lang.String replyto);
/**
* Sets (as xml) array of all "replyto" element
*/
void xsetReplytoArray(org.apache.xmlbeans.XmlString[] replytoArray);
/**
* Sets (as xml) ith "replyto" element
*/
void xsetReplytoArray(int i, org.apache.xmlbeans.XmlString replyto);
/**
* Inserts the value as the ith "replyto" element
*/
void insertReplyto(int i, java.lang.String replyto);
/**
* Appends the value as the last "replyto" element
*/
void addReplyto(java.lang.String replyto);
/**
* Inserts and returns a new empty value (as xml) as the ith "replyto" element
*/
org.apache.xmlbeans.XmlString insertNewReplyto(int i);
/**
* Appends and returns a new empty value (as xml) as the last "replyto" element
*/
org.apache.xmlbeans.XmlString addNewReplyto();
/**
* Removes the ith "replyto" element
*/
void removeReplyto(int i);
/**
* Gets a List of "props" elements
*/
java.util.List<net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props> getPropsList();
/**
* Gets array of all "props" elements
* @deprecated
*/
net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props[] getPropsArray();
/**
* Gets ith "props" element
*/
net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props getPropsArray(int i);
/**
* Returns number of "props" element
*/
int sizeOfPropsArray();
/**
* Sets array of all "props" element
*/
void setPropsArray(net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props[] propsArray);
/**
* Sets ith "props" element
*/
void setPropsArray(int i, net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props props);
/**
* Inserts and returns a new empty value (as xml) as the ith "props" element
*/
net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props insertNewProps(int i);
/**
* Appends and returns a new empty value (as xml) as the last "props" element
*/
net.mytestbed.schema.omf.x60.protocol.PropsDocument.Props addNewProps();
/**
* Removes the ith "props" element
*/
void removeProps(int i);
/**
* Gets a List of "guard" elements
*/
java.util.List<net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard> getGuardList();
/**
* Gets array of all "guard" elements
* @deprecated
*/
net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard[] getGuardArray();
/**
* Gets ith "guard" element
*/
net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard getGuardArray(int i);
/**
* Returns number of "guard" element
*/
int sizeOfGuardArray();
/**
* Sets array of all "guard" element
*/
void setGuardArray(net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard[] guardArray);
/**
* Sets ith "guard" element
*/
void setGuardArray(int i, net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard guard);
/**
* Inserts and returns a new empty value (as xml) as the ith "guard" element
*/
net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard insertNewGuard(int i);
/**
* Appends and returns a new empty value (as xml) as the last "guard" element
*/
net.mytestbed.schema.omf.x60.protocol.GuardDocument.Guard addNewGuard();
/**
* Removes the ith "guard" element
*/
void removeGuard(int i);
/**
* Gets a List of "res_id" elements
*/
java.util.List<java.lang.String> getResIdList();
/**
* Gets array of all "res_id" elements
* @deprecated
*/
java.lang.String[] getResIdArray();
/**
* Gets ith "res_id" element
*/
java.lang.String getResIdArray(int i);
/**
* Gets (as xml) a List of "res_id" elements
*/
java.util.List<org.apache.xmlbeans.XmlString> xgetResIdList();
/**
* Gets (as xml) array of all "res_id" elements
* @deprecated
*/
org.apache.xmlbeans.XmlString[] xgetResIdArray();
/**
* Gets (as xml) ith "res_id" element
*/
org.apache.xmlbeans.XmlString xgetResIdArray(int i);
/**
* Returns number of "res_id" element
*/
int sizeOfResIdArray();
/**
* Sets array of all "res_id" element
*/
void setResIdArray(java.lang.String[] resIdArray);
/**
* Sets ith "res_id" element
*/
void setResIdArray(int i, java.lang.String resId);
/**
* Sets (as xml) array of all "res_id" element
*/
void xsetResIdArray(org.apache.xmlbeans.XmlString[] resIdArray);
/**
* Sets (as xml) ith "res_id" element
*/
void xsetResIdArray(int i, org.apache.xmlbeans.XmlString resId);
/**
* Inserts the value as the ith "res_id" element
*/
void insertResId(int i, java.lang.String resId);
/**
* Appends the value as the last "res_id" element
*/
void addResId(java.lang.String resId);
/**
* Inserts and returns a new empty value (as xml) as the ith "res_id" element
*/
org.apache.xmlbeans.XmlString insertNewResId(int i);
/**
* Appends and returns a new empty value (as xml) as the last "res_id" element
*/
org.apache.xmlbeans.XmlString addNewResId();
/**
* Removes the ith "res_id" element
*/
void removeResId(int i);
/**
* Gets the "mid" attribute
*/
java.lang.String getMid();
/**
* Gets (as xml) the "mid" attribute
*/
org.apache.xmlbeans.XmlString xgetMid();
/**
* True if has "mid" attribute
*/
boolean isSetMid();
/**
* Sets the "mid" attribute
*/
void setMid(java.lang.String mid);
/**
* Sets (as xml) the "mid" attribute
*/
void xsetMid(org.apache.xmlbeans.XmlString mid);
/**
* Unsets the "mid" attribute
*/
void unsetMid();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release newInstance() {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release newInstance(org.apache.xmlbeans.XmlOptions options) {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument.Release) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
private Factory() { } // No instance of this class allowed
}
}
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument newInstance() {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument newInstance(org.apache.xmlbeans.XmlOptions options) {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static net.mytestbed.schema.omf.x60.protocol.ReleaseDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (net.mytestbed.schema.omf.x60.protocol.ReleaseDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| |
package org.terasology.config;
import org.lwjgl.opengl.DisplayMode;
import org.lwjgl.opengl.PixelFormat;
import org.terasology.game.CoreRegistry;
import org.terasology.rendering.world.WorldRenderer;
/**
* @author Immortius
*/
public class RenderingConfig {
private PixelFormat pixelFormat = new PixelFormat().withDepthBits(24);
private int windowWidth = 1280;
private int windowHeight = 720;
private boolean fullscreen = false;
private int viewDistanceNear = 8;
private int viewDistanceModerate = 16;
private int viewDistanceFar = 32;
private int viewDistanceUltra = 48;
private int activeViewDistanceMode = 0;
private int maxChunkVBOs = 512;
private boolean flickeringLight = false;
private boolean animateGrass = false;
private boolean animateWater = false;
private float fieldOfView = 90;
private boolean cameraBobbing = true;
private boolean renderPlacingBox = true;
private int blurIntensity = 0;
private boolean reflectiveWater = false;
private boolean vignette = true;
private boolean motionBlur = false;
private boolean ssao = false;
private boolean filmGrain = true;
private boolean outline = true;
private boolean lightShafts = false;
private boolean eyeAdaptation = true;
private boolean bloom = false;
private boolean dynamicShadows = false;
public int getBlurRadius() {
return Math.max(1, blurIntensity);
}
public PixelFormat getPixelFormat() {
return pixelFormat;
}
public void setPixelFormat(PixelFormat pixelFormat) {
this.pixelFormat = pixelFormat;
}
public int getWindowWidth() {
return windowWidth;
}
public void setWindowWidth(int windowWidth) {
this.windowWidth = windowWidth;
}
public int getWindowHeight() {
return windowHeight;
}
public void setWindowHeight(int windowHeight) {
this.windowHeight = windowHeight;
}
public DisplayMode getDisplayMode() {
return new DisplayMode(windowWidth, windowHeight);
}
public boolean isFullscreen() {
return fullscreen;
}
public void setFullscreen(boolean fullscreen) {
this.fullscreen = fullscreen;
}
public int getViewDistanceNear() {
return viewDistanceNear;
}
public void setViewDistanceNear(int viewDistanceNear) {
this.viewDistanceNear = viewDistanceNear;
}
public int getViewDistanceModerate() {
return viewDistanceModerate;
}
public void setViewDistanceModerate(int viewDistanceModerate) {
this.viewDistanceModerate = viewDistanceModerate;
}
public int getViewDistanceFar() {
return viewDistanceFar;
}
public void setViewDistanceFar(int viewDistanceFar) {
this.viewDistanceFar = viewDistanceFar;
}
public int getViewDistanceUltra() {
return viewDistanceUltra;
}
public void setViewDistanceUltra(int viewDistanceUltra) {
this.viewDistanceUltra = viewDistanceUltra;
}
public int getActiveViewDistanceMode() {
return activeViewDistanceMode;
}
public void setActiveViewDistanceMode(int activeViewDistanceMode) {
this.activeViewDistanceMode = activeViewDistanceMode;
// TODO: Remove this, switch to a property change listener
int chunksToLoad = getActiveViewingDistance() * getActiveViewingDistance();
setMaxChunkVBOs(chunksToLoad >= 512 ? 512 : chunksToLoad);
WorldRenderer worldRenderer = CoreRegistry.get(WorldRenderer.class);
if (worldRenderer != null) {
worldRenderer.changeViewDistance(getActiveViewingDistance());
}
}
public int getMaxChunkVBOs() {
return maxChunkVBOs;
}
public void setMaxChunkVBOs(int maxChunkVBOs) {
this.maxChunkVBOs = maxChunkVBOs;
}
public boolean isFlickeringLight() {
return flickeringLight;
}
public void setFlickeringLight(boolean flickeringLight) {
this.flickeringLight = flickeringLight;
}
public boolean isAnimateGrass() {
return animateGrass;
}
public void setAnimateGrass(boolean animateGrass) {
this.animateGrass = animateGrass;
}
public boolean isAnimateWater() {
return animateWater;
}
public void setAnimateWater(boolean animateWater) {
this.animateWater = animateWater;
}
public boolean isDynamicShadows() {
return dynamicShadows;
}
public void setDynamicShadows(boolean dynamicShadows) {
this.dynamicShadows = dynamicShadows;
}
public float getFieldOfView() {
return fieldOfView;
}
public void setFieldOfView(float fieldOfView) {
this.fieldOfView = fieldOfView;
}
public boolean isCameraBobbing() {
return cameraBobbing;
}
public void setCameraBobbing(boolean cameraBobbing) {
this.cameraBobbing = cameraBobbing;
}
public boolean isRenderPlacingBox() {
return renderPlacingBox;
}
public void setRenderPlacingBox(boolean renderPlacingBox) {
this.renderPlacingBox = renderPlacingBox;
}
public int getBlurIntensity() {
return blurIntensity;
}
public void setBlurIntensity(int blurIntensity) {
this.blurIntensity = blurIntensity;
}
public boolean isReflectiveWater() {
return reflectiveWater;
}
public void setReflectiveWater(boolean reflectiveWater) {
this.reflectiveWater = reflectiveWater;
}
public boolean isVignette() {
return vignette;
}
public void setVignette(boolean vignette) {
this.vignette = vignette;
}
public boolean isMotionBlur() {
return motionBlur;
}
public void setMotionBlur(boolean motionBlur) {
this.motionBlur = motionBlur;
}
public boolean isSsao() {
return ssao;
}
public void setSsao(boolean ssao) {
this.ssao = ssao;
}
public boolean isFilmGrain() {
return filmGrain;
}
public void setFilmGrain(boolean filmGrain) {
this.filmGrain = filmGrain;
}
public boolean isOutline() {
return outline;
}
public void setOutline(boolean outline) {
this.outline = outline;
}
public boolean isLightShafts() {
return lightShafts;
}
public void setLightShafts(boolean lightShafts) {
this.lightShafts = lightShafts;
}
public boolean isEyeAdaptation() {
return eyeAdaptation;
}
public void setEyeAdaptation(boolean eyeAdapting) {
this.eyeAdaptation = eyeAdapting;
}
public boolean isBloom() {
return bloom;
}
public void setBloom(boolean bloom) {
this.bloom = bloom;
}
public int getActiveViewingDistance() {
switch (activeViewDistanceMode) {
case 1:
return viewDistanceModerate;
case 2:
return viewDistanceFar;
case 3:
return viewDistanceUltra;
default:
return viewDistanceNear;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon.supervisor;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Writer;
import java.lang.ProcessBuilder.Redirect;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.storm.Config;
import org.apache.storm.DaemonConfig;
import org.apache.storm.container.ResourceIsolationInterface;
import org.apache.storm.generated.LSWorkerHeartbeat;
import org.apache.storm.generated.LocalAssignment;
import org.apache.storm.generated.ProfileRequest;
import org.apache.storm.generated.WorkerMetricList;
import org.apache.storm.generated.WorkerMetricPoint;
import org.apache.storm.generated.WorkerMetrics;
import org.apache.storm.metric.StormMetricsRegistry;
import org.apache.storm.metricstore.MetricException;
import org.apache.storm.metricstore.WorkerMetricsProcessor;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.LocalState;
import org.apache.storm.utils.ServerConfigUtils;
import org.apache.storm.utils.ServerUtils;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
/**
* Represents a container that a worker will run in.
*/
public abstract class Container implements Killable {
private static final Logger LOG = LoggerFactory.getLogger(Container.class);
private static final String MEMORY_USED_METRIC = "UsedMemory";
private static final String SYSTEM_COMPONENT_ID = "System";
private static final String INVALID_EXECUTOR_ID = "-1";
private static final String INVALID_STREAM_ID = "None";
private static final ConcurrentHashMap<Integer, TopoAndMemory> _usedMemory =
new ConcurrentHashMap<>();
private static final ConcurrentHashMap<Integer, TopoAndMemory> _reservedMemory =
new ConcurrentHashMap<>();
static {
StormMetricsRegistry.registerGauge(
"supervisor:current-used-memory-mb",
() -> {
Long val =
_usedMemory.values().stream().mapToLong((topoAndMem) -> topoAndMem.memory).sum();
int ret = val.intValue();
if (val > Integer.MAX_VALUE) { // Would only happen at 2 PB so we are OK for now
ret = Integer.MAX_VALUE;
}
return ret;
});
StormMetricsRegistry.registerGauge(
"supervisor:current-reserved-memory-mb",
() -> {
Long val =
_reservedMemory.values().stream().mapToLong((topoAndMem) -> topoAndMem.memory).sum();
int ret = val.intValue();
if (val > Integer.MAX_VALUE) { // Would only happen at 2 PB so we are OK for now
ret = Integer.MAX_VALUE;
}
return ret;
});
}
protected final Map<String, Object> _conf;
protected final Map<String, Object> _topoConf; //Not set if RECOVER_PARTIAL
protected final String _topologyId; //Not set if RECOVER_PARTIAL
protected final String _supervisorId;
protected final int _supervisorPort;
protected final int _port; //Not set if RECOVER_PARTIAL
protected final LocalAssignment _assignment; //Not set if RECOVER_PARTIAL
protected final AdvancedFSOps _ops;
protected final ResourceIsolationInterface _resourceIsolationManager;
protected final boolean _symlinksDisabled;
protected String _workerId;
protected ContainerType _type;
private long lastMetricProcessTime = 0L;
/**
* Create a new Container.
*
* @param type the type of container being made.
* @param conf the supervisor config
* @param supervisorId the ID of the supervisor this is a part of.
* @param supervisorPort the thrift server port of the supervisor this is a part of.
* @param port the port the container is on. Should be <= 0 if only a partial recovery
* @param assignment the assignment for this container. Should be null if only a partial recovery.
* @param resourceIsolationManager used to isolate resources for a container can be null if no isolation is used.
* @param workerId the id of the worker to use. Must not be null if doing a partial recovery.
* @param topoConf the config of the topology (mostly for testing) if null and not a partial recovery the real conf is
* read.
* @param ops file system operations (mostly for testing) if null a new one is made
* @throws IOException on any error.
*/
protected Container(ContainerType type, Map<String, Object> conf, String supervisorId, int supervisorPort,
int port, LocalAssignment assignment, ResourceIsolationInterface resourceIsolationManager,
String workerId, Map<String, Object> topoConf, AdvancedFSOps ops) throws IOException {
assert (type != null);
assert (conf != null);
assert (supervisorId != null);
_symlinksDisabled = (boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false);
if (ops == null) {
ops = AdvancedFSOps.make(conf);
}
_workerId = workerId;
_type = type;
_port = port;
_ops = ops;
_conf = conf;
_supervisorId = supervisorId;
_supervisorPort = supervisorPort;
_resourceIsolationManager = resourceIsolationManager;
_assignment = assignment;
if (_type.isOnlyKillable()) {
assert (_assignment == null);
assert (_port <= 0);
assert (_workerId != null);
_topologyId = null;
_topoConf = null;
} else {
assert (assignment != null);
assert (port > 0);
_topologyId = assignment.get_topology_id();
if (!_ops.doRequiredTopoFilesExist(_conf, _topologyId)) {
LOG.info(
"Missing topology storm code, so can't launch worker with assignment {} for this supervisor {} on port {} with id {}",
_assignment,
_supervisorId, _port, _workerId);
throw new ContainerRecoveryException("Missing required topology files...");
}
if (topoConf == null) {
_topoConf = readTopoConf();
} else {
//For testing...
_topoConf = topoConf;
}
}
}
@Override
public String toString() {
return "topo:" + _topologyId + " worker:" + _workerId;
}
protected Map<String, Object> readTopoConf() throws IOException {
assert (_topologyId != null);
return ConfigUtils.readSupervisorStormConf(_conf, _topologyId);
}
/**
* Kill a given process.
*
* @param pid the id of the process to kill
* @throws IOException
*/
protected void kill(long pid) throws IOException {
ServerUtils.killProcessWithSigTerm(String.valueOf(pid));
}
/**
* Kill a given process.
*
* @param pid the id of the process to kill
* @throws IOException
*/
protected void forceKill(long pid) throws IOException {
ServerUtils.forceKillProcess(String.valueOf(pid));
}
@Override
public void kill() throws IOException {
LOG.info("Killing {}:{}", _supervisorId, _workerId);
Set<Long> pids = getAllPids();
for (Long pid : pids) {
kill(pid);
}
}
@Override
public void forceKill() throws IOException {
LOG.info("Force Killing {}:{}", _supervisorId, _workerId);
Set<Long> pids = getAllPids();
for (Long pid : pids) {
forceKill(pid);
}
}
/**
* Read the Heartbeat for the current container.
*
* @return the Heartbeat
*
* @throws IOException on any error
*/
public LSWorkerHeartbeat readHeartbeat() throws IOException {
LocalState localState = ConfigUtils.workerState(_conf, _workerId);
LSWorkerHeartbeat hb = localState.getWorkerHeartBeat();
LOG.trace("{}: Reading heartbeat {}", _workerId, hb);
return hb;
}
/**
* Is a process alive and running?.
*
* @param pid the PID of the running process
* @param user the user that is expected to own that process
* @return true if it is, else false
*
* @throws IOException on any error
*/
protected boolean isProcessAlive(long pid, String user) throws IOException {
if (ServerUtils.IS_ON_WINDOWS) {
return isWindowsProcessAlive(pid, user);
}
return isPosixProcessAlive(pid, user);
}
private boolean isWindowsProcessAlive(long pid, String user) throws IOException {
boolean ret = false;
ProcessBuilder pb = new ProcessBuilder("tasklist", "/fo", "list", "/fi", "pid eq " + pid, "/v");
pb.redirectError(Redirect.INHERIT);
Process p = pb.start();
try (BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String read;
while ((read = in.readLine()) != null) {
if (read.contains("User Name:")) { //Check for : in case someone called their user "User Name"
//This line contains the user name for the pid we're looking up
//Example line: "User Name: exampleDomain\exampleUser"
List<String> userNameLineSplitOnWhitespace = Arrays.asList(read.split(":"));
if (userNameLineSplitOnWhitespace.size() == 2) {
List<String> userAndMaybeDomain = Arrays.asList(userNameLineSplitOnWhitespace.get(1).trim().split("\\\\"));
String processUser = userAndMaybeDomain.size() == 2 ? userAndMaybeDomain.get(1) : userAndMaybeDomain.get(0);
if (user.equals(processUser)) {
ret = true;
} else {
LOG.info("Found {} running as {}, but expected it to be {}", pid, processUser, user);
}
} else {
LOG.error("Received unexpected output from tasklist command. Expected one colon in user name line. Line was {}",
read);
}
break;
}
}
}
return ret;
}
private boolean isPosixProcessAlive(long pid, String user) throws IOException {
boolean ret = false;
ProcessBuilder pb = new ProcessBuilder("ps", "-o", "user", "-p", String.valueOf(pid));
pb.redirectError(Redirect.INHERIT);
Process p = pb.start();
try (BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
String first = in.readLine();
assert ("USER".equals(first));
String processUser;
while ((processUser = in.readLine()) != null) {
if (user.equals(processUser)) {
ret = true;
break;
} else {
LOG.info("Found {} running as {}, but expected it to be {}", pid, processUser, user);
}
}
}
return ret;
}
@Override
public boolean areAllProcessesDead() throws IOException {
Set<Long> pids = getAllPids();
String user = getRunWorkerAsUser();
boolean allDead = true;
for (Long pid: pids) {
LOG.debug("Checking if pid {} owner {} is alive", pid, user);
if (!isProcessAlive(pid, user)) {
LOG.debug("{}: PID {} is dead", _workerId, pid);
} else {
allDead = false;
break;
}
}
return allDead;
}
@Override
public void cleanUp() throws IOException {
_usedMemory.remove(_port);
_reservedMemory.remove(_port);
cleanUpForRestart();
}
/**
* Setup the container to run. By default this creates the needed directories/links in the local file system PREREQUISITE: All needed
* blobs and topology, jars/configs have been downloaded and placed in the appropriate locations
*
* @throws IOException on any error
*/
protected void setup() throws IOException {
_type.assertFull();
if (!_ops.doRequiredTopoFilesExist(_conf, _topologyId)) {
LOG.info("Missing topology storm code, so can't launch worker with assignment {} for this supervisor {} on port {} with id {}",
_assignment,
_supervisorId, _port, _workerId);
throw new IllegalStateException("Not all needed files are here!!!!");
}
LOG.info("Setting up {}:{}", _supervisorId, _workerId);
_ops.forceMkdir(new File(ConfigUtils.workerPidsRoot(_conf, _workerId)));
_ops.forceMkdir(new File(ConfigUtils.workerTmpRoot(_conf, _workerId)));
_ops.forceMkdir(new File(ConfigUtils.workerHeartbeatsRoot(_conf, _workerId)));
File workerArtifacts = new File(ConfigUtils.workerArtifactsRoot(_conf, _topologyId, _port));
if (!_ops.fileExists(workerArtifacts)) {
_ops.forceMkdir(workerArtifacts);
_ops.setupWorkerArtifactsDir(_assignment.get_owner(), workerArtifacts);
}
String user = getWorkerUser();
writeLogMetadata(user);
saveWorkerUser(user);
createArtifactsLink();
createBlobstoreLinks();
}
/**
* Write out the file used by the log viewer to allow/reject log access.
*
* @param user the user this is going to run as
* @throws IOException on any error
*/
@SuppressWarnings("unchecked")
protected void writeLogMetadata(String user) throws IOException {
_type.assertFull();
Map<String, Object> data = new HashMap<>();
data.put(Config.TOPOLOGY_SUBMITTER_USER, user);
data.put("worker-id", _workerId);
Set<String> logsGroups = new HashSet<>();
if (_topoConf.get(DaemonConfig.LOGS_GROUPS) != null) {
List<String> groups = (List<String>) _topoConf.get(DaemonConfig.LOGS_GROUPS);
for (String group : groups) {
logsGroups.add(group);
}
}
if (_topoConf.get(Config.TOPOLOGY_GROUPS) != null) {
List<String> topGroups = (List<String>) _topoConf.get(Config.TOPOLOGY_GROUPS);
logsGroups.addAll(topGroups);
}
data.put(DaemonConfig.LOGS_GROUPS, logsGroups.toArray());
Set<String> logsUsers = new HashSet<>();
if (_topoConf.get(DaemonConfig.LOGS_USERS) != null) {
List<String> logUsers = (List<String>) _topoConf.get(DaemonConfig.LOGS_USERS);
for (String logUser : logUsers) {
logsUsers.add(logUser);
}
}
if (_topoConf.get(Config.TOPOLOGY_USERS) != null) {
List<String> topUsers = (List<String>) _topoConf.get(Config.TOPOLOGY_USERS);
for (String logUser : topUsers) {
logsUsers.add(logUser);
}
}
data.put(DaemonConfig.LOGS_USERS, logsUsers.toArray());
File file = ServerConfigUtils.getLogMetaDataFile(_conf, _topologyId, _port);
Yaml yaml = new Yaml();
try (Writer writer = _ops.getWriter(file)) {
yaml.dump(data, writer);
}
}
/**
* Create symlink from the containers directory/artifacts to the artifacts directory.
*
* @throws IOException on any error
*/
protected void createArtifactsLink() throws IOException {
_type.assertFull();
if (!_symlinksDisabled) {
File workerDir = new File(ConfigUtils.workerRoot(_conf, _workerId));
File topoDir = new File(ConfigUtils.workerArtifactsRoot(_conf, _topologyId, _port));
if (_ops.fileExists(workerDir)) {
LOG.debug("Creating symlinks for worker-id: {} topology-id: {} to its port artifacts directory", _workerId, _topologyId);
_ops.createSymlink(new File(workerDir, "artifacts"), topoDir);
}
}
}
/**
* Create symlinks for each of the blobs from the container's directory to corresponding links in the storm dist directory.
*
* @throws IOException on any error.
*/
protected void createBlobstoreLinks() throws IOException {
_type.assertFull();
String stormRoot = ConfigUtils.supervisorStormDistRoot(_conf, _topologyId);
String workerRoot = ConfigUtils.workerRoot(_conf, _workerId);
@SuppressWarnings("unchecked")
Map<String, Map<String, Object>> blobstoreMap = (Map<String, Map<String, Object>>) _topoConf.get(Config.TOPOLOGY_BLOBSTORE_MAP);
List<String> blobFileNames = new ArrayList<>();
if (blobstoreMap != null) {
for (Map.Entry<String, Map<String, Object>> entry : blobstoreMap.entrySet()) {
String key = entry.getKey();
Map<String, Object> blobInfo = entry.getValue();
String ret = null;
if (blobInfo != null && blobInfo.containsKey("localname")) {
ret = (String) blobInfo.get("localname");
} else {
ret = key;
}
blobFileNames.add(ret);
}
}
File targetResourcesDir = new File(stormRoot, ServerConfigUtils.RESOURCES_SUBDIR);
List<String> resourceFileNames = new ArrayList<>();
if (targetResourcesDir.exists()) {
resourceFileNames.add(ServerConfigUtils.RESOURCES_SUBDIR);
}
resourceFileNames.addAll(blobFileNames);
if (!_symlinksDisabled) {
LOG.info("Creating symlinks for worker-id: {} storm-id: {} for files({}): {}", _workerId, _topologyId, resourceFileNames.size(),
resourceFileNames);
if (targetResourcesDir.exists()) {
_ops.createSymlink(new File(workerRoot, ServerConfigUtils.RESOURCES_SUBDIR), targetResourcesDir);
} else {
LOG.info("Topology jar for worker-id: {} storm-id: {} does not contain re sources directory {}.", _workerId, _topologyId,
targetResourcesDir.toString());
}
for (String fileName : blobFileNames) {
_ops.createSymlink(new File(workerRoot, fileName),
new File(stormRoot, fileName));
}
} else if (blobFileNames.size() > 0) {
LOG.warn("Symlinks are disabled, no symlinks created for blobs {}", blobFileNames);
}
}
/**
* @return all of the pids that are a part of this container.
*/
protected Set<Long> getAllPids() throws IOException {
Set<Long> ret = new HashSet<>();
for (String listing : ConfigUtils.readDirContents(ConfigUtils.workerPidsRoot(_conf, _workerId))) {
ret.add(Long.valueOf(listing));
}
if (_resourceIsolationManager != null) {
Set<Long> morePids = _resourceIsolationManager.getRunningPids(_workerId);
assert (morePids != null);
ret.addAll(morePids);
}
return ret;
}
/**
* @return the user that some operations should be done as.
*
* @throws IOException on any error
*/
protected String getWorkerUser() throws IOException {
LOG.info("GET worker-user for {}", _workerId);
File file = new File(ConfigUtils.workerUserFile(_conf, _workerId));
if (_ops.fileExists(file)) {
return _ops.slurpString(file).trim();
} else if (_assignment != null && _assignment.is_set_owner()) {
return _assignment.get_owner();
}
if (ConfigUtils.isLocalMode(_conf)) {
return System.getProperty("user.name");
} else {
File f = new File(ConfigUtils.workerArtifactsRoot(_conf));
if (f.exists()) {
return Files.getOwner(f.toPath()).getName();
}
throw new IllegalStateException("Could not recover the user for " + _workerId);
}
}
/**
* Returns the user that the worker process is running as.
*
* The default behavior is to launch the worker as the user supervisor is running as (e.g. 'storm')
*
* @return the user that the worker process is running as.
*/
protected String getRunWorkerAsUser() {
return System.getProperty("user.name");
}
protected void saveWorkerUser(String user) throws IOException {
_type.assertFull();
LOG.info("SET worker-user {} {}", _workerId, user);
_ops.dump(new File(ConfigUtils.workerUserFile(_conf, _workerId)), user);
}
protected void deleteSavedWorkerUser() throws IOException {
LOG.info("REMOVE worker-user {}", _workerId);
_ops.deleteIfExists(new File(ConfigUtils.workerUserFile(_conf, _workerId)));
}
/**
* Clean up the container partly preparing for restart. By default delete all of the temp directories we are going to get a new
* worker_id anyways. POST CONDITION: the workerId will be set to null
*
* @throws IOException on any error
*/
public void cleanUpForRestart() throws IOException {
LOG.info("Cleaning up {}:{}", _supervisorId, _workerId);
Set<Long> pids = getAllPids();
String user = getWorkerUser();
for (Long pid : pids) {
File path = new File(ConfigUtils.workerPidPath(_conf, _workerId, pid));
_ops.deleteIfExists(path, user, _workerId);
}
//clean up for resource isolation if enabled
if (_resourceIsolationManager != null) {
_resourceIsolationManager.releaseResourcesForWorker(_workerId);
}
//Always make sure to clean up everything else before worker directory
//is removed since that is what is going to trigger the retry for cleanup
_ops.deleteIfExists(new File(ConfigUtils.workerHeartbeatsRoot(_conf, _workerId)), user, _workerId);
_ops.deleteIfExists(new File(ConfigUtils.workerPidsRoot(_conf, _workerId)), user, _workerId);
_ops.deleteIfExists(new File(ConfigUtils.workerTmpRoot(_conf, _workerId)), user, _workerId);
_ops.deleteIfExists(new File(ConfigUtils.workerRoot(_conf, _workerId)), user, _workerId);
deleteSavedWorkerUser();
_workerId = null;
}
/**
* Check if the container is over its memory limit AND needs to be killed. This does not necessarily mean that it just went over the
* limit.
*
* @throws IOException on any error
*/
public boolean isMemoryLimitViolated(LocalAssignment withUpdatedLimits) throws IOException {
updateMemoryAccounting();
return false;
}
protected void updateMemoryAccounting() {
_type.assertFull();
long used = getMemoryUsageMb();
long reserved = getMemoryReservationMb();
_usedMemory.put(_port, new TopoAndMemory(_topologyId, used));
_reservedMemory.put(_port, new TopoAndMemory(_topologyId, reserved));
}
/**
* Get the total memory used (on and off heap).
*/
public long getTotalTopologyMemoryUsed() {
updateMemoryAccounting();
return _usedMemory
.values()
.stream()
.filter((topoAndMem) -> _topologyId.equals(topoAndMem.topoId))
.mapToLong((topoAndMem) -> topoAndMem.memory)
.sum();
}
/**
* Get the total memory reserved.
*
* @param withUpdatedLimits the local assignment with shared memory
* @return the total memory reserved.
*/
public long getTotalTopologyMemoryReserved(LocalAssignment withUpdatedLimits) {
updateMemoryAccounting();
long ret =
_reservedMemory
.values()
.stream()
.filter((topoAndMem) -> _topologyId.equals(topoAndMem.topoId))
.mapToLong((topoAndMem) -> topoAndMem.memory)
.sum();
if (withUpdatedLimits.is_set_total_node_shared()) {
ret += withUpdatedLimits.get_total_node_shared();
}
return ret;
}
/**
* Get the number of workers for this topology.
*/
public long getTotalWorkersForThisTopology() {
return _usedMemory
.values()
.stream()
.filter((topoAndMem) -> _topologyId.equals(topoAndMem.topoId))
.count();
}
/**
* Get the current memory usage of this container.
*/
public long getMemoryUsageMb() {
return 0;
}
/**
* Get the current memory reservation of this container.
*/
public long getMemoryReservationMb() {
return 0;
}
/**
* Launch the process for the first time. PREREQUISITE: setup has run and passed
*
* @throws IOException on any error
*/
public abstract void launch() throws IOException;
/**
* Restart the processes in this container. PREREQUISITE: cleanUpForRestart has run and passed
*
* @throws IOException on any error
*/
public abstract void relaunch() throws IOException;
/**
* Return true if the main process exited, else false. This is just best effort return false if unknown.
*/
public abstract boolean didMainProcessExit();
/**
* Run a profiling request.
*
* @param request the request to run
* @param stop is this a stop request?
* @return true if it succeeded, else false
*
* @throws IOException on any error
* @throws InterruptedException if running the command is interrupted.
*/
public abstract boolean runProfiling(ProfileRequest request, boolean stop) throws IOException, InterruptedException;
/**
* Get the id of the container or null if there is no worker id right now.
*/
public String getWorkerId() {
return _workerId;
}
/**
* Send worker metrics to Nimbus.
*/
void processMetrics(OnlyLatestExecutor<Integer> exec, WorkerMetricsProcessor processor) {
try {
if (_usedMemory.get(_port) != null) {
// Make sure we don't process too frequently.
long nextMetricProcessTime = this.lastMetricProcessTime + 60L * 1000L;
long currentTimeMsec = System.currentTimeMillis();
if (currentTimeMsec < nextMetricProcessTime) {
return;
}
String hostname = Utils.hostname();
// create metric for memory
long timestamp = System.currentTimeMillis();
double value = _usedMemory.get(_port).memory;
WorkerMetricPoint workerMetric = new WorkerMetricPoint(MEMORY_USED_METRIC, timestamp, value, SYSTEM_COMPONENT_ID,
INVALID_EXECUTOR_ID, INVALID_STREAM_ID);
WorkerMetricList metricList = new WorkerMetricList();
metricList.add_to_metrics(workerMetric);
WorkerMetrics metrics = new WorkerMetrics(_topologyId, _port, hostname, metricList);
exec.execute(_port, () -> {
try {
processor.processWorkerMetrics(_conf, metrics);
} catch (MetricException e) {
LOG.error("Failed to process metrics", e);
}
});
}
} catch (Exception e) {
LOG.error("Failed to process metrics", e);
} finally {
this.lastMetricProcessTime = System.currentTimeMillis();
}
}
public static enum ContainerType {
LAUNCH(false, false),
RECOVER_FULL(true, false),
RECOVER_PARTIAL(true, true);
private final boolean _recovery;
private final boolean _onlyKillable;
ContainerType(boolean recovery, boolean onlyKillable) {
_recovery = recovery;
_onlyKillable = onlyKillable;
}
public boolean isRecovery() {
return _recovery;
}
public void assertFull() {
if (_onlyKillable) {
throw new IllegalStateException("Container is only Killable.");
}
}
public boolean isOnlyKillable() {
return _onlyKillable;
}
}
private static class TopoAndMemory {
public final String topoId;
public final long memory;
public TopoAndMemory(String id, long mem) {
topoId = id;
memory = mem;
}
@Override
public String toString() {
return "{TOPO: " + topoId + " at " + memory + " MB}";
}
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.internal.store.disk;
import org.ehcache.Cache;
import org.ehcache.CacheConfigurationChangeEvent;
import org.ehcache.CacheConfigurationChangeListener;
import org.ehcache.CacheConfigurationProperty;
import org.ehcache.config.Eviction;
import org.ehcache.config.EvictionPrioritizer;
import org.ehcache.config.ResourcePool;
import org.ehcache.config.ResourcePools;
import org.ehcache.config.ResourceType;
import org.ehcache.config.units.EntryUnit;
import org.ehcache.events.StoreEventListener;
import org.ehcache.exceptions.CacheAccessException;
import org.ehcache.exceptions.CachePersistenceException;
import org.ehcache.expiry.Duration;
import org.ehcache.expiry.Expiry;
import org.ehcache.function.BiFunction;
import org.ehcache.function.Function;
import org.ehcache.function.NullaryFunction;
import org.ehcache.function.Predicate;
import org.ehcache.function.Predicates;
import org.ehcache.internal.SystemTimeSource;
import org.ehcache.internal.TimeSource;
import org.ehcache.internal.TimeSourceConfiguration;
import org.ehcache.spi.cache.CacheStoreHelper;
import org.ehcache.spi.cache.tiering.AuthoritativeTier;
import org.ehcache.internal.store.disk.DiskStorageFactory.Element;
import org.ehcache.spi.ServiceProvider;
import org.ehcache.spi.cache.Store;
import org.ehcache.spi.serialization.SerializationProvider;
import org.ehcache.spi.serialization.Serializer;
import org.ehcache.spi.service.FileBasedPersistenceContext;
import org.ehcache.spi.service.LocalPersistenceService;
import org.ehcache.spi.service.ServiceConfiguration;
import org.ehcache.spi.service.SupplementaryService;
import org.ehcache.statistics.StoreOperationOutcomes;
import org.ehcache.util.ConcurrentWeakIdentityHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terracotta.statistics.observer.OperationObserver;
import java.io.FileNotFoundException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static org.ehcache.spi.ServiceLocator.findSingletonAmongst;
import static org.terracotta.statistics.StatisticBuilder.operation;
/**
* Implements a persistent-to-disk store.
* <p/>
* All new elements are automatically scheduled for writing to disk.
*
* @author Ludovic Orban
*/
public class DiskStore<K, V> implements AuthoritativeTier<K, V> {
private static final Logger LOG = LoggerFactory.getLogger(DiskStore.class);
private static final int ATTEMPT_RATIO = 4;
private static final int EVICTION_RATIO = 2;
private static final int DEFAULT_SEGMENT_COUNT = 16;
private static final int DEFAULT_QUEUE_CAPACITY = 16;
private static final int DEFAULT_EXPIRY_THREAD_INTERVAL = 30000;
private final Class<K> keyType;
private final Class<V> valueType;
private final TimeSource timeSource;
private final Expiry<? super K, ? super V> expiry;
private final Serializer<Element> elementSerializer;
private final Serializer<Serializable> indexSerializer;
private final FileBasedPersistenceContext persistenceContext;
private volatile long capacity;
private final Predicate<DiskStorageFactory.DiskSubstitute<K, V>> evictionVeto;
private final Comparator<DiskStorageFactory.DiskSubstitute<K, V>> evictionPrioritizer;
private final Random random = new Random();
private final AtomicLong idSequence = new AtomicLong(Long.MIN_VALUE);
private volatile DiskStorageFactory<K, V> diskStorageFactory;
private volatile Segment<K, V>[] segments;
private volatile int segmentShift;
private final CacheConfigurationChangeListener configurationListener = new CacheConfigurationChangeListener() {
@Override
public void cacheConfigurationChange(CacheConfigurationChangeEvent event) {
if(event.getProperty().equals(CacheConfigurationProperty.UPDATESIZE)) {
ResourcePools updatedPools = (ResourcePools)event.getNewValue();
ResourcePools configuredPools = (ResourcePools)event.getOldValue();
if(updatedPools.getPoolForResource(ResourceType.Core.DISK).getSize() !=
configuredPools.getPoolForResource(ResourceType.Core.DISK).getSize()) {
LOG.info("Setting size: " + updatedPools.getPoolForResource(ResourceType.Core.DISK).getSize());
capacity = updatedPools.getPoolForResource(ResourceType.Core.DISK).getSize();
}
}
}
};
private final OperationObserver<StoreOperationOutcomes.EvictionOutcome> evictionObserver = operation(StoreOperationOutcomes.EvictionOutcome.class).named("eviction").of(this).tag("disk-store").build();
public DiskStore(final Configuration<K, V> config, FileBasedPersistenceContext persistenceContext, TimeSource timeSource, Serializer<Element> elementSerializer, Serializer<Serializable> indexSerializer) {
this.persistenceContext = persistenceContext;
ResourcePool diskPool = config.getResourcePools().getPoolForResource(ResourceType.Core.DISK);
if (diskPool == null) {
throw new IllegalArgumentException("Disk store must be configured with a resource of type 'disk'");
}
if (!diskPool.getUnit().equals(EntryUnit.ENTRIES)) {
throw new IllegalArgumentException("Disk store only handles resource unit 'entries'");
}
this.capacity = diskPool.getSize();
EvictionPrioritizer<? super K, ? super V> prioritizer = config.getEvictionPrioritizer();
if (prioritizer == null) {
prioritizer = Eviction.Prioritizer.LRU;
}
this.evictionVeto = wrap((Predicate) config.getEvictionVeto());
this.evictionPrioritizer = (Comparator) wrap((Comparator) prioritizer);
this.keyType = config.getKeyType();
this.valueType = config.getValueType();
this.timeSource = timeSource;
this.expiry = config.getExpiry();
this.elementSerializer = elementSerializer;
this.indexSerializer = indexSerializer;
}
private Predicate<DiskStorageFactory.DiskSubstitute<K, V>> wrap(final Predicate<Cache.Entry<K, V>> predicate) {
if (predicate == null) {
return Predicates.none();
} else {
return new Predicate<DiskStorageFactory.DiskSubstitute<K, V>>() {
@Override
public boolean test(DiskStorageFactory.DiskSubstitute<K, V> argument) {
return predicate.test(wrap(argument));
}
};
}
}
private Cache.Entry<K, V> wrap(final DiskStorageFactory.DiskSubstitute<K, V> value) {
K key = value.getKey();
int hash = hash(key.hashCode());
DiskStorageFactory.Element<K, V> element = segmentFor(hash).get(key, hash, false);
final ValueHolder<V> vh = element == null ? null : element.getValueHolder();
return CacheStoreHelper.cacheEntry(key, vh);
}
private Comparator<DiskStorageFactory.DiskSubstitute<K, V>> wrap(final Comparator<Cache.Entry<K, V>> comparator) {
return new Comparator<DiskStorageFactory.DiskSubstitute<K, V>>() {
@Override
public int compare(DiskStorageFactory.DiskSubstitute<K, V> t, DiskStorageFactory.DiskSubstitute<K, V> u) {
return comparator.compare(wrap(t), wrap(u));
}
};
}
private void checkKey(K keyObject) {
if (keyObject == null) {
throw new NullPointerException();
}
if (!keyType.isAssignableFrom(keyObject.getClass())) {
throw new ClassCastException("Invalid key type, expected : " + keyType.getName() + " but was : " + keyObject.getClass().getName());
}
}
private void checkValue(V valueObject) {
if (valueObject == null) {
throw new NullPointerException();
}
if (!valueType.isAssignableFrom(valueObject.getClass())) {
throw new ClassCastException("Invalid value type, expected : " + valueType.getName() + " but was : " + valueObject.getClass().getName());
}
}
private int size() {
int size = 0;
for (Segment<K, V> segment : segments) {
size += segment.count;
}
return size;
}
private static int hash(int hash) {
int spread = hash;
spread += (spread << 15 ^ 0xFFFFCD7D);
spread ^= spread >>> 10;
spread += (spread << 3);
spread ^= spread >>> 6;
spread += (spread << 2) + (spread << 14);
return (spread ^ spread >>> 16);
}
private Segment<K, V> segmentFor(int hash) {
return segments[hash >>> segmentShift];
}
@Override
public ValueHolder<V> get(K key) throws CacheAccessException {
return internalGetAndFault(key, false);
}
ValueHolder<V> internalGetAndFault(final K key, boolean markFaulted) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
DiskStorageFactory.Element<K, V> existingElement = segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
final long now = timeSource.getTimeMillis();
if (mappedValue.isExpired(now)) {
return null;
}
setAccessTimeAndExpiry(key, mappedValue, now);
return mappedValue;
}
}, Segment.Compute.IF_PRESENT, true, markFaulted);
return existingElement == null ? null : existingElement.getValueHolder();
}
@Override
public boolean containsKey(K key) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
return segmentFor(hash).containsKey(key, hash);
}
@Override
public void put(final K key, final V value) throws CacheAccessException {
checkKey(key);
checkValue(value);
int hash = hash(key.hashCode());
final long now = timeSource.getTimeMillis();
final AtomicBoolean entryActuallyAdded = new AtomicBoolean();
segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
entryActuallyAdded.set(mappedValue == null);
if (mappedValue != null && mappedValue.isExpired(now)) {
mappedValue = null;
}
if (mappedValue == null) {
return newCreateValueHolder(key, value, now);
} else {
return newUpdateValueHolder(key, mappedValue, value, now);
}
}
}, Segment.Compute.ALWAYS, false, false);
if (entryActuallyAdded.get()) {
enforceCapacity(1);
}
}
@Override
public ValueHolder<V> putIfAbsent(final K key, final V value) throws CacheAccessException {
checkKey(key);
checkValue(value);
int hash = hash(key.hashCode());
final long now = timeSource.getTimeMillis();
final AtomicReference<ValueHolder<V>> returnValue = new AtomicReference<ValueHolder<V>>(null);
segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
if (mappedValue == null || mappedValue.isExpired(now)) {
return newCreateValueHolder(key, value, now);
}
returnValue.set(mappedValue.getValueHolder());
setAccessTimeAndExpiry(key, mappedValue, now);
return mappedValue;
}
}, Segment.Compute.ALWAYS, false, false);
return returnValue.get();
}
@Override
public void remove(K key) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
segmentFor(hash).remove(key, hash, null);
}
@Override
public boolean remove(final K key, final V value) throws CacheAccessException {
checkKey(key);
checkValue(value);
int hash = hash(key.hashCode());
final AtomicBoolean removed = new AtomicBoolean(false);
segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
final long now = timeSource.getTimeMillis();
if (mappedValue.isExpired(now)) {
return null;
} else if (value.equals(mappedValue.getValueHolder().value())) {
removed.set(true);
return null;
} else {
setAccessTimeAndExpiry(key, mappedValue, now);
return mappedValue;
}
}
}, Segment.Compute.IF_PRESENT, false, false);
return removed.get();
}
@Override
public ValueHolder<V> replace(final K key, final V value) throws CacheAccessException {
checkKey(key);
checkValue(value);
int hash = hash(key.hashCode());
final AtomicReference<ValueHolder<V>> returnValue = new AtomicReference<ValueHolder<V>>(null);
segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
final long now = timeSource.getTimeMillis();
if (mappedValue.isExpired(now)) {
return null;
} else {
returnValue.set(mappedValue.getValueHolder());
return newUpdateValueHolder(key, mappedValue, value, now);
}
}
}, Segment.Compute.IF_PRESENT, false, false);
return returnValue.get();
}
@Override
public boolean replace(final K key, final V oldValue, final V newValue) throws CacheAccessException {
checkKey(key);
checkValue(oldValue);
checkValue(newValue);
int hash = hash(key.hashCode());
final AtomicBoolean returnValue = new AtomicBoolean(false);
segmentFor(hash).compute(key, hash, new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedValue) {
final long now = timeSource.getTimeMillis();
if (mappedValue.isExpired(now)) {
return null;
} else if (oldValue.equals(mappedValue.getValueHolder().value())) {
returnValue.set(true);
return newUpdateValueHolder(key, mappedValue, newValue, now);
} else {
setAccessTimeAndExpiry(key, mappedValue, now);
return mappedValue;
}
}
}, Segment.Compute.IF_PRESENT, false, false);
return returnValue.get();
}
@Override
public void clear() throws CacheAccessException {
internalClear();
}
void internalClear() {
if (segments != null) {
for (Segment s : segments) {
s.clear();
}
}
}
@Override
public void enableStoreEventNotifications(StoreEventListener<K, V> listener) {
//todo: events are missing
}
@Override
public void disableStoreEventNotifications() {
//todo: events are missing
}
@Override
public Iterator<Cache.Entry<K, ValueHolder<V>>> iterator() throws CacheAccessException {
return new DiskStoreIterator();
}
@Override
public ValueHolder<V> getAndFault(K key) throws CacheAccessException {
return internalGetAndFault(key, true);
}
@Override
public ValueHolder<V> computeIfAbsentAndFault(K key, Function<? super K, ? extends V> mappingFunction) throws CacheAccessException {
return internalComputeIfAbsent(key, mappingFunction, true);
}
@Override
public boolean flush(K key, ValueHolder<V> valueHolder) {
if (valueHolder instanceof DiskStorageFactory.DiskValueHolder) {
throw new IllegalArgumentException("Value holder must be of a class coming from the caching tier");
}
int hash = hash(key.hashCode());
return segmentFor(hash).flush(key, hash, valueHolder);
}
class DiskStoreIterator implements Iterator<Cache.Entry<K, ValueHolder<V>>> {
private final DiskSubstituteIterator diskSubstituteIterator = new DiskSubstituteIterator();
private DiskStorageFactory.Element<K, V> next;
DiskStoreIterator() {
advance();
}
private void advance() {
next = null;
while (diskSubstituteIterator.hasNext()) {
DiskStorageFactory.DiskSubstitute<K, V> nextSubstitute = diskSubstituteIterator.next();
final K key = nextSubstitute.getKey();
int hash = hash(key.hashCode());
next = segmentFor(hash).get(key, hash, false);
if (next != null) {
break;
}
}
}
@Override
public boolean hasNext() throws CacheAccessException {
return next != null;
}
@Override
public Cache.Entry<K, ValueHolder<V>> next() throws CacheAccessException {
if (!hasNext()) {
throw new NoSuchElementException();
}
DiskStorageFactory.Element<K, V> element = next;
advance();
final K key = element.getKey();
final ValueHolder<V> valueHolder = element.getValueHolder();
return new Cache.Entry<K, ValueHolder<V>>() {
@Override
public K getKey() {
return key;
}
@Override
public ValueHolder<V> getValue() {
return valueHolder;
}
@Override
public long getCreationTime(TimeUnit unit) {
return valueHolder == null ? 0 : valueHolder.creationTime(unit);
}
@Override
public long getLastAccessTime(TimeUnit unit) {
return valueHolder == null ? 0 : valueHolder.lastAccessTime(unit);
}
@Override
public float getHitRate(TimeUnit unit) {
return valueHolder == null ? 0 : valueHolder.hitRate(unit);
}
};
}
}
private static final NullaryFunction<Boolean> REPLACE_EQUALS_TRUE = new NullaryFunction<Boolean>() {
@Override
public Boolean apply() {
return Boolean.TRUE;
}
};
private static boolean eq(Object o1, Object o2) {
return (o1 == o2) || (o1 != null && o1.equals(o2));
}
private void setAccessTimeAndExpiry(K key, DiskStorageFactory.Element<K, V> element, long now) {
DiskStorageFactory.DiskValueHolder<V> valueHolder = element.getValueHolder();
valueHolder.accessed(now, expiry.getExpiryForAccess(key, valueHolder.value()));
}
private static long safeExpireTime(long now, Duration duration) {
long millis = DiskStorageFactory.DiskValueHolder.TIME_UNIT.convert(duration.getAmount(), duration.getTimeUnit());
if (millis == Long.MAX_VALUE) {
return Long.MAX_VALUE;
}
long result = now + millis;
if (result < 0) {
return Long.MAX_VALUE;
}
return result;
}
private DiskStorageFactory.Element<K, V> newUpdateValueHolder(K key, DiskStorageFactory.Element<K, V> oldValue, V newValue, long now) {
if (oldValue == null || newValue == null) {
throw new NullPointerException();
}
Duration duration = expiry.getExpiryForUpdate(key, oldValue.getValueHolder().value(), newValue);
if (Duration.ZERO.equals(duration)) {
return null;
}
if (duration == null) {
return new DiskStorageFactory.ElementImpl<K, V>(idSequence.getAndIncrement(), key, newValue, now, oldValue.getValueHolder().expirationTime(DiskStorageFactory.DiskValueHolder.TIME_UNIT));
} else {
if (duration.isForever()) {
return new DiskStorageFactory.ElementImpl<K, V>(idSequence.getAndIncrement(), key, newValue, now, DiskStorageFactory.DiskValueHolder.NO_EXPIRE);
} else {
return new DiskStorageFactory.ElementImpl<K, V>(idSequence.getAndIncrement(), key, newValue, now, safeExpireTime(now, duration));
}
}
}
private DiskStorageFactory.Element<K, V> newCreateValueHolder(K key, V value, long now) {
if (value == null) {
throw new NullPointerException();
}
Duration duration = expiry.getExpiryForCreation(key, value);
if (Duration.ZERO.equals(duration)) {
return null;
}
if (duration.isForever()) {
return new DiskStorageFactory.ElementImpl<K, V>(idSequence.getAndIncrement(), key, value, now, DiskStorageFactory.DiskValueHolder.NO_EXPIRE);
} else {
return new DiskStorageFactory.ElementImpl<K, V>(idSequence.getAndIncrement(), key, value, now, safeExpireTime(now, duration));
}
}
ValueHolder<V> enforceCapacityIfValueNotNull(final DiskStorageFactory.Element<K, V> computeResult) {
if (computeResult != null) {
enforceCapacity(1);
}
return computeResult == null ? null : computeResult.getValueHolder();
}
void enforceCapacity(int delta) {
for (int attempts = 0, evicted = 0; attempts < ATTEMPT_RATIO * delta && evicted < EVICTION_RATIO * delta
&& capacity < size(); attempts++) {
evicted += diskStorageFactory.evict(1);
}
}
DiskStorageFactory.Element<K, V> evict(K key, DiskStorageFactory.DiskSubstitute<K, V> diskSubstitute) {
return evictElement(key, diskSubstitute);
}
DiskStorageFactory.Element<K, V> expire(K key, DiskStorageFactory.DiskSubstitute<K, V> diskSubstitute) {
return evictElement(key, diskSubstitute);
}
private DiskStorageFactory.Element<K, V> evictElement(K key, DiskStorageFactory.DiskSubstitute<K, V> diskSubstitute) {
int hash = hash(key.hashCode());
return segmentFor(hash).evict(key, hash, diskSubstitute);
}
@Override
public ValueHolder<V> compute(K key, BiFunction<? super K, ? super V, ? extends V> mappingFunction) throws CacheAccessException {
return compute(key, mappingFunction, REPLACE_EQUALS_TRUE);
}
@Override
public ValueHolder<V> compute(final K key, final BiFunction<? super K, ? super V, ? extends V> mappingFunction, final NullaryFunction<Boolean> replaceEqual) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
final long now = timeSource.getTimeMillis();
BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>> biFunction = new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedElement) {
if (mappedElement != null && mappedElement.isExpired(now)) {
mappedElement = null;
}
V existingValue = mappedElement == null ? null : mappedElement.getValueHolder().value();
V computedValue = mappingFunction.apply(mappedKey, existingValue);
if (computedValue == null) {
return null;
} else if ((eq(existingValue, computedValue)) && (!replaceEqual.apply())) {
if (mappedElement != null) {
setAccessTimeAndExpiry(key, mappedElement, now);
}
return mappedElement;
}
checkValue(computedValue);
if (mappedElement != null) {
return newUpdateValueHolder(key, mappedElement, computedValue, now);
} else {
return newCreateValueHolder(key, computedValue, now);
}
}
};
DiskStorageFactory.Element<K, V> computedElement = segmentFor(hash).compute(key, hash, biFunction, Segment.Compute.ALWAYS, false, false);
return enforceCapacityIfValueNotNull(computedElement);
}
@Override
public ValueHolder<V> computeIfAbsent(final K key, final Function<? super K, ? extends V> mappingFunction) throws CacheAccessException {
return internalComputeIfAbsent(key, mappingFunction, false);
}
private ValueHolder<V> internalComputeIfAbsent(final K key, final Function<? super K, ? extends V> mappingFunction, boolean fault) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
final long now = timeSource.getTimeMillis();
BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>> biFunction = new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedElement) {
if (mappedElement == null || mappedElement.isExpired(now)) {
V computedValue = mappingFunction.apply(mappedKey);
if (computedValue == null) {
return null;
}
checkValue(computedValue);
return newCreateValueHolder(key, computedValue, now);
} else {
setAccessTimeAndExpiry(key, mappedElement, now);
return mappedElement;
}
}
};
DiskStorageFactory.Element<K, V> computedElement = segmentFor(hash).compute(key, hash, biFunction, Segment.Compute.IF_ABSENT, false, fault);
return enforceCapacityIfValueNotNull(computedElement);
}
@Override
public ValueHolder<V> computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) throws CacheAccessException {
return computeIfPresent(key, remappingFunction, REPLACE_EQUALS_TRUE);
}
@Override
public ValueHolder<V> computeIfPresent(final K key, final BiFunction<? super K, ? super V, ? extends V> remappingFunction, final NullaryFunction<Boolean> replaceEqual) throws CacheAccessException {
checkKey(key);
int hash = hash(key.hashCode());
BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>> biFunction = new BiFunction<K, DiskStorageFactory.Element<K, V>, DiskStorageFactory.Element<K, V>>() {
@Override
public DiskStorageFactory.Element<K, V> apply(K mappedKey, DiskStorageFactory.Element<K, V> mappedElement) {
final long now = timeSource.getTimeMillis();
if (mappedElement != null && mappedElement.isExpired(now)) {
return null;
}
V existingValue = mappedElement == null ? null : mappedElement.getValueHolder().value();
V computedValue = remappingFunction.apply(mappedKey, existingValue);
if (computedValue == null) {
return null;
}
if ((eq(existingValue, computedValue)) && (!replaceEqual.apply())) {
setAccessTimeAndExpiry(key, mappedElement, now);
return mappedElement;
}
checkValue(computedValue);
return newUpdateValueHolder(key, mappedElement, computedValue, now);
}
};
DiskStorageFactory.Element<K, V> computedElement = segmentFor(hash).compute(key, hash, biFunction, Segment.Compute.IF_PRESENT, false, false);
return computedElement == null ? null : computedElement.getValueHolder();
}
@Override
public Map<K, ValueHolder<V>> bulkCompute(Set<? extends K> keys, Function<Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction) throws CacheAccessException {
return bulkCompute(keys, remappingFunction, REPLACE_EQUALS_TRUE);
}
@Override
public Map<K, ValueHolder<V>> bulkCompute(Set<? extends K> keys, final Function<Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction, NullaryFunction<Boolean> replaceEqual) throws CacheAccessException {
Map<K, ValueHolder<V>> result = new HashMap<K, ValueHolder<V>>();
for (K key : keys) {
checkKey(key);
BiFunction<K, V, V> biFunction = new BiFunction<K, V, V>() {
@Override
public V apply(final K k, final V v) {
Map.Entry<K, V> entry = new Map.Entry<K, V>() {
@Override
public K getKey() {
return k;
}
@Override
public V getValue() {
return v;
}
@Override
public V setValue(V value) {
throw new UnsupportedOperationException();
}
};
java.util.Iterator<? extends Map.Entry<? extends K, ? extends V>> iterator = remappingFunction.apply(Collections.singleton(entry)).iterator();
Map.Entry<? extends K, ? extends V> result = iterator.next();
if (result != null) {
checkKey(result.getKey());
return result.getValue();
} else {
return null;
}
}
};
ValueHolder<V> computed = compute(key, biFunction, replaceEqual);
result.put(key, computed);
}
return result;
}
@Override
public Map<K, ValueHolder<V>> bulkComputeIfAbsent(Set<? extends K> keys, final Function<Iterable<? extends K>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> mappingFunction) throws CacheAccessException {
Map<K, ValueHolder<V>> result = new HashMap<K, ValueHolder<V>>();
for (K key : keys) {
checkKey(key);
Function<K, V> function = new Function<K, V>() {
@Override
public V apply(K k) {
java.util.Iterator<? extends Map.Entry<? extends K, ? extends V>> iterator = mappingFunction.apply(Collections.singleton(k)).iterator();
Map.Entry<? extends K, ? extends V> result = iterator.next();
if (result != null) {
checkKey(result.getKey());
return result.getValue();
} else {
return null;
}
}
};
ValueHolder<V> computed = computeIfAbsent(key, function);
result.put(key, computed);
}
return result;
}
@Override
public List<CacheConfigurationChangeListener> getConfigurationChangeListeners() {
List<CacheConfigurationChangeListener> configurationChangeListenerList
= new ArrayList<CacheConfigurationChangeListener>();
configurationChangeListenerList.add(this.configurationListener);
return configurationChangeListenerList;
}
public void flushToDisk() throws ExecutionException, InterruptedException {
diskStorageFactory.flush().get();
diskStorageFactory.evictToSize();
}
boolean fault(K key, DiskStorageFactory.Placeholder<K, V> expect, DiskStorageFactory.DiskMarker<K, V> fault) {
int hash = hash(key.hashCode());
return segmentFor(hash).fault(key, hash, expect, fault, false);
}
DiskStorageFactory.DiskSubstitute<K, V> unretrievedGet(K key) {
if (key == null) {
return null;
}
int hash = hash(key.hashCode());
DiskStorageFactory.DiskSubstitute<K, V> o = segmentFor(hash).unretrievedGet(key, hash);
return o;
}
java.util.Iterator<DiskStorageFactory.DiskSubstitute<K, V>> diskSubstituteIterator() {
return new DiskSubstituteIterator();
}
boolean putRawIfAbsent(K key, DiskStorageFactory.DiskMarker<K, V> encoded) {
int hash = hash(key.hashCode());
return segmentFor(hash).putRawIfAbsent(key, hash, encoded);
}
/**
* Select a random sample of elements generated by the supplied factory.
*
* @param factory generator of the given type
* @param sampleSize minimum number of elements to return
* @param keyHint a key on which we are currently working
* @return list of sampled elements/element substitute
*/
public List<DiskStorageFactory.DiskSubstitute<K, V>> getRandomSample(ElementSubstituteFilter factory, int sampleSize, Object keyHint) {
ArrayList<DiskStorageFactory.DiskSubstitute<K, V>> sampled = new ArrayList<DiskStorageFactory.DiskSubstitute<K, V>>(sampleSize);
// pick a random starting point in the map
int randomHash = random.nextInt();
final int segmentStart;
if (keyHint == null) {
segmentStart = (randomHash >>> segmentShift);
} else {
segmentStart = (hash(keyHint.hashCode()) >>> segmentShift);
}
int segmentIndex = segmentStart;
do {
segments[segmentIndex].addRandomSample(factory, sampleSize, sampled, randomHash);
if (sampled.size() >= sampleSize) {
break;
}
// move to next segment
segmentIndex = (segmentIndex + 1) & (segments.length - 1);
} while (segmentIndex != segmentStart);
return sampled;
}
abstract class HashIterator {
private int segmentIndex;
private java.util.Iterator<HashEntry<K, V>> currentIterator;
/**
* Constructs a new HashIterator
*/
HashIterator() {
segmentIndex = segments.length;
while (segmentIndex > 0) {
segmentIndex--;
currentIterator = segments[segmentIndex].hashIterator();
if (currentIterator.hasNext()) {
return;
}
}
}
/**
* {@inheritDoc}
*/
public boolean hasNext() {
if (this.currentIterator == null) {
return false;
}
if (this.currentIterator.hasNext()) {
return true;
} else {
while (segmentIndex > 0) {
segmentIndex--;
currentIterator = segments[segmentIndex].hashIterator();
if (currentIterator.hasNext()) {
return true;
}
}
}
return false;
}
/**
* Returns the next hash-entry - called by subclasses
*
* @return next HashEntry
*/
protected HashEntry<K, V> nextEntry() {
if (currentIterator == null) {
return null;
}
if (currentIterator.hasNext()) {
return currentIterator.next();
} else {
while (segmentIndex > 0) {
segmentIndex--;
currentIterator = segments[segmentIndex].hashIterator();
if (currentIterator.hasNext()) {
return currentIterator.next();
}
}
}
return null;
}
/**
* {@inheritDoc}
*/
public void remove() {
currentIterator.remove();
}
}
class DiskSubstituteIterator extends HashIterator implements java.util.Iterator<DiskStorageFactory.DiskSubstitute<K, V>> {
/**
* {@inheritDoc}
*/
public DiskStorageFactory.DiskSubstitute<K, V> next() {
return super.nextEntry().element;
}
}
@SupplementaryService
public static class Provider implements Store.Provider, AuthoritativeTier.Provider {
private volatile ServiceProvider serviceProvider;
private final Set<Store<?, ?>> createdStores = Collections.newSetFromMap(new ConcurrentWeakIdentityHashMap<Store<?, ?>, Boolean>());
@Override
public <K, V> DiskStore<K, V> createStore(final Configuration<K, V> storeConfig, final ServiceConfiguration<?>... serviceConfigs) {
if (serviceProvider == null) {
throw new RuntimeException("ServiceProvider is null.");
}
TimeSourceConfiguration timeSourceConfig = findSingletonAmongst(TimeSourceConfiguration.class, (Object[]) serviceConfigs);
TimeSource timeSource = timeSourceConfig != null ? timeSourceConfig.getTimeSource() : SystemTimeSource.INSTANCE;
SerializationProvider serializationProvider = serviceProvider.findService(SerializationProvider.class);
Serializer<Element> elementSerializer = serializationProvider.createKeySerializer(Element.class, storeConfig.getClassLoader(), serviceConfigs);
Serializer<Serializable> objectSerializer = serializationProvider.createValueSerializer(Serializable.class, storeConfig.getClassLoader(), serviceConfigs);
PersistentStoreConfiguration persistentStoreConfiguration;
if (!(storeConfig instanceof PersistentStoreConfiguration<?, ?, ?>)) {
throw new IllegalArgumentException("Store.Configuration for DiskStore should implement Store.PersistentStoreConfiguration");
}
persistentStoreConfiguration = (PersistentStoreConfiguration) storeConfig;
LocalPersistenceService localPersistenceService = serviceProvider.findService(LocalPersistenceService.class);
try {
FileBasedPersistenceContext persistenceContext = localPersistenceService.createPersistenceContext(persistentStoreConfiguration.getIdentifier(),
persistentStoreConfiguration);
DiskStore<K, V> diskStore = new DiskStore<K, V>(storeConfig, persistenceContext, timeSource, elementSerializer, objectSerializer);
createdStores.add(diskStore);
return diskStore;
} catch (CachePersistenceException e) {
throw new RuntimeException("Unable to create persistence context for " + persistentStoreConfiguration.getIdentifier(), e);
}
}
@Override
public void releaseStore(final Store<?, ?> resource) {
if (!createdStores.contains(resource)) {
throw new IllegalArgumentException("Given store is not managed by this provider : " + resource);
}
close((DiskStore)resource);
}
static void close(final DiskStore resource) {
if (resource.diskStorageFactory == null) {
LOG.warn("disk store already closed");
return;
}
resource.diskStorageFactory.unbind();
resource.diskStorageFactory = null;
resource.segments = null;
}
@Override
public void initStore(Store<?, ?> resource) {
if (!createdStores.contains(resource)) {
throw new IllegalArgumentException("Given store is not managed by this provider : " + resource);
}
init((DiskStore)resource);
}
static void init(final DiskStore resource) {
try {
resource.diskStorageFactory = new DiskStorageFactory<Object, Object>(resource.capacity, resource.evictionVeto, resource.evictionPrioritizer,
resource.timeSource, resource.elementSerializer, resource.indexSerializer, resource.persistenceContext,
DEFAULT_SEGMENT_COUNT, DEFAULT_QUEUE_CAPACITY, DEFAULT_EXPIRY_THREAD_INTERVAL);
} catch (FileNotFoundException fnfe) {
throw new IllegalStateException(fnfe);
}
resource.segments = new Segment[DEFAULT_SEGMENT_COUNT];
for (int i = 0; i < resource.segments.length; i++) {
resource.segments[i] = new Segment<Object, Object>(resource.diskStorageFactory, resource.timeSource, resource);
}
resource.segmentShift = Integer.numberOfLeadingZeros(resource.segments.length - 1);
resource.diskStorageFactory.bind(resource);
}
@Override
public void start(final ServiceConfiguration<?> config, final ServiceProvider serviceProvider) {
this.serviceProvider = serviceProvider;
}
@Override
public void stop() {
this.serviceProvider = null;
createdStores.clear();
}
@Override
public <K, V> AuthoritativeTier<K, V> createAuthoritativeTier(Configuration<K, V> storeConfig, ServiceConfiguration<?>... serviceConfigs) {
return createStore(storeConfig, serviceConfigs);
}
@Override
public void releaseAuthoritativeTier(AuthoritativeTier<?, ?> resource) {
releaseStore(resource);
}
@Override
public void initAuthoritativeTier(AuthoritativeTier<?, ?> resource) {
initStore(resource);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2006, 2013 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.junit.buildpath;
import org.eclipse.jdt.junit.JUnitCore;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.jface.layout.PixelConverter;
import org.eclipse.jdt.core.IClasspathContainer;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.internal.junit.BasicElementLabels;
import org.eclipse.jdt.internal.junit.ui.JUnitMessages;
import org.eclipse.jdt.internal.junit.util.ExceptionHandler;
import org.eclipse.jdt.internal.junit.util.JUnitStatus;
import org.eclipse.jdt.internal.junit.util.JUnitStubUtility;
import org.eclipse.jdt.ui.JavaElementLabels;
import org.eclipse.jdt.ui.wizards.IClasspathContainerPage;
import org.eclipse.jdt.ui.wizards.IClasspathContainerPageExtension;
import org.eclipse.jdt.ui.wizards.NewElementWizardPage;
import org.eclipse.jdt.internal.ui.JavaPluginImages;
import org.eclipse.jdt.internal.ui.util.SWTUtil;
public class JUnitContainerWizardPage extends NewElementWizardPage implements IClasspathContainerPage, IClasspathContainerPageExtension {
private IJavaProject fProject;
private IClasspathEntry fContainerEntryResult;
private Combo fVersionCombo;
private Text fResolvedPath;
private Text fResolvedSourcePath;
public JUnitContainerWizardPage() {
super("JUnitContainerPage"); //$NON-NLS-1$
setTitle(JUnitMessages.JUnitContainerWizardPage_wizard_title);
setDescription(JUnitMessages.JUnitContainerWizardPage_wizard_description);
setImageDescriptor(JavaPluginImages.DESC_WIZBAN_ADD_LIBRARY);
fContainerEntryResult= JavaCore.newContainerEntry(JUnitCore.JUNIT3_CONTAINER_PATH);
}
public static IJavaProject getPlaceholderProject() {
String name= "####internal"; //$NON-NLS-1$
IWorkspaceRoot root= ResourcesPlugin.getWorkspace().getRoot();
while (true) {
IProject project= root.getProject(name);
if (!project.exists()) {
return JavaCore.create(project);
}
name += '1';
}
}
public boolean finish() {
try {
IJavaProject[] javaProjects= new IJavaProject[] { getPlaceholderProject() };
IClasspathContainer[] containers= { null };
JavaCore.setClasspathContainer(fContainerEntryResult.getPath(), javaProjects, containers, null);
} catch (JavaModelException e) {
ExceptionHandler.handle(e, getShell(), JUnitMessages.JUnitContainerWizardPage_error_title, JUnitMessages.JUnitContainerWizardPage_error_problem_configuring_container);
return false;
}
return true;
}
public IClasspathEntry getSelection() {
return fContainerEntryResult;
}
public void setSelection(IClasspathEntry containerEntry) {
fContainerEntryResult= containerEntry;
}
public void createControl(Composite parent) {
PixelConverter converter= new PixelConverter(parent);
Composite composite= new Composite(parent, SWT.NONE);
composite.setFont(parent.getFont());
composite.setLayout(new GridLayout(2, false));
Label label= new Label(composite, SWT.NONE);
label.setFont(composite.getFont());
label.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, false, false, 1, 1));
label.setText(JUnitMessages.JUnitContainerWizardPage_combo_label);
fVersionCombo= new Combo(composite, SWT.READ_ONLY);
fVersionCombo.setItems(new String[] {
JUnitMessages.JUnitContainerWizardPage_option_junit3,
JUnitMessages.JUnitContainerWizardPage_option_junit4
});
fVersionCombo.setFont(composite.getFont());
GridData data= new GridData(GridData.BEGINNING, GridData.CENTER, false, false, 1, 1);
data.widthHint= converter.convertWidthInCharsToPixels(15);
fVersionCombo.setLayoutData(data);
if (fContainerEntryResult != null && JUnitCore.JUNIT3_CONTAINER_PATH.equals(fContainerEntryResult.getPath())) {
fVersionCombo.select(0);
} else {
fVersionCombo.select(1);
}
fVersionCombo.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
doSelectionChanged();
}
});
label= new Label(composite, SWT.NONE);
label.setFont(composite.getFont());
label.setText(JUnitMessages.JUnitContainerWizardPage_resolved_label);
label.setLayoutData(new GridData(GridData.FILL, GridData.BEGINNING, false, false, 1, 1));
fResolvedPath= new Text(composite, SWT.READ_ONLY | SWT.WRAP);
SWTUtil.fixReadonlyTextBackground(fResolvedPath);
data= new GridData(GridData.FILL, GridData.FILL, true, false, 1, 1);
data.widthHint= converter.convertWidthInCharsToPixels(60);
fResolvedPath.setFont(composite.getFont());
fResolvedPath.setLayoutData(data);
label= new Label(composite, SWT.NONE);
label.setFont(composite.getFont());
label.setText(JUnitMessages.JUnitContainerWizardPage_source_location_label);
label.setLayoutData(new GridData(GridData.FILL, GridData.BEGINNING, false, false, 1, 1));
fResolvedSourcePath= new Text(composite, SWT.READ_ONLY | SWT.WRAP);
SWTUtil.fixReadonlyTextBackground(fResolvedSourcePath);
data= new GridData(GridData.FILL, GridData.FILL, true, false, 1, 1);
data.widthHint= converter.convertWidthInCharsToPixels(60);
fResolvedSourcePath.setFont(composite.getFont());
fResolvedSourcePath.setLayoutData(data);
doSelectionChanged();
setControl(composite);
}
@Override
public void setVisible(boolean visible) {
super.setVisible(visible);
if (visible) {
fVersionCombo.setFocus();
}
}
protected void doSelectionChanged() {
JUnitStatus status= new JUnitStatus();
IClasspathEntry libEntry;
IPath containerPath;
if (fVersionCombo != null && fVersionCombo.getSelectionIndex() == 1) {
containerPath= JUnitCore.JUNIT4_CONTAINER_PATH;
libEntry= BuildPathSupport.getJUnit4LibraryEntry();
} else {
containerPath= JUnitCore.JUNIT3_CONTAINER_PATH;
libEntry= BuildPathSupport.getJUnit3LibraryEntry();
if (libEntry == null)
libEntry= BuildPathSupport.getJUnit4as3LibraryEntry(); // JUnit 4 includes most of JUnit 3, so let's cheat
}
if (libEntry == null) {
status.setError(JUnitMessages.JUnitContainerWizardPage_error_version_not_available);
} else if (JUnitCore.JUNIT4_CONTAINER_PATH.equals(containerPath)) {
if (fProject != null && !JUnitStubUtility.is50OrHigher(fProject)) {
status.setWarning(JUnitMessages.JUnitContainerWizardPage_warning_java5_required);
}
}
fContainerEntryResult= JavaCore.newContainerEntry(containerPath);
if (fResolvedPath != null && !fResolvedPath.isDisposed()) {
if (libEntry != null) {
fResolvedPath.setText(getPathLabel(libEntry.getPath()));
} else {
fResolvedPath.setText(JUnitMessages.JUnitContainerWizardPage_lib_not_found);
}
}
if (fResolvedSourcePath != null && !fResolvedSourcePath.isDisposed()) {
if (libEntry != null && libEntry.getSourceAttachmentPath() != null) {
fResolvedSourcePath.setText(getPathLabel(libEntry.getSourceAttachmentPath()));
} else {
fResolvedSourcePath.setText(JUnitMessages.JUnitContainerWizardPage_source_not_found);
}
}
updateStatus(status);
}
private String getPathLabel(IPath path) {
StringBuffer buf= new StringBuffer(BasicElementLabels.getResourceName(path.lastSegment()));
buf.append(JavaElementLabels.CONCAT_STRING);
buf.append(BasicElementLabels.getPathLabel(path.removeLastSegments(1), true));
return buf.toString();
}
/* (non-Javadoc)
* @see org.eclipse.jdt.ui.wizards.IClasspathContainerPageExtension#initialize(org.eclipse.jdt.core.IJavaProject, org.eclipse.jdt.core.IClasspathEntry[])
*/
public void initialize(IJavaProject project, IClasspathEntry[] currentEntries) {
fProject= project;
}
}
| |
/**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.subsys.template.fixture;
import org.commonjava.indy.IndyWorkflowException;
import org.commonjava.indy.content.DownloadManager;
import org.commonjava.indy.content.StoreResource;
import org.commonjava.indy.model.core.ArtifactStore;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.maven.galley.event.EventMetadata;
import org.commonjava.maven.galley.model.Transfer;
import org.commonjava.maven.galley.model.TransferOperation;
import javax.enterprise.inject.Alternative;
import java.io.InputStream;
import java.util.List;
/**
* Created by gli on 1/5/17.
*/
@Alternative
public class MockDownloadManager implements DownloadManager
{
@Override
public Transfer retrieveFirst( List<? extends ArtifactStore> stores, String path )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer retrieveFirst( List<? extends ArtifactStore> stores, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public List<Transfer> retrieveAll( List<? extends ArtifactStore> stores, String path )
throws IndyWorkflowException
{
return null;
}
@Override
public List<Transfer> retrieveAll( List<? extends ArtifactStore> stores, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer retrieve( ArtifactStore store, String path )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer retrieve( ArtifactStore store, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer store( ArtifactStore store, String path, InputStream stream, TransferOperation op )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer store( ArtifactStore store, String path, InputStream stream, TransferOperation op,
EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer store( List<? extends ArtifactStore> stores, String path, InputStream stream, TransferOperation op )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer store( List<? extends ArtifactStore> stores, String path, InputStream stream, TransferOperation op,
EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public boolean delete( ArtifactStore store, String path )
throws IndyWorkflowException
{
return false;
}
@Override
public boolean delete( ArtifactStore store, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return false;
}
@Override
public boolean deleteAll( List<? extends ArtifactStore> stores, String path )
throws IndyWorkflowException
{
return false;
}
@Override
public void rescan( ArtifactStore store )
throws IndyWorkflowException
{
}
@Override
public void rescan( ArtifactStore store, EventMetadata eventMetadata )
throws IndyWorkflowException
{
}
@Override
public void rescanAll( List<? extends ArtifactStore> stores )
throws IndyWorkflowException
{
}
@Override
public void rescanAll( List<? extends ArtifactStore> stores, EventMetadata eventMetadata )
throws IndyWorkflowException
{
}
@Override
public List<StoreResource> list( ArtifactStore store, String path )
throws IndyWorkflowException
{
return null;
}
@Override
public List<StoreResource> list( ArtifactStore store, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public List<StoreResource> list( List<? extends ArtifactStore> stores, String path )
throws IndyWorkflowException
{
return null;
}
@Override
public List<StoreResource> list( List<? extends ArtifactStore> stores, String path, EventMetadata eventMetadata )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer getStoreRootDirectory( StoreKey key )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer getStoreRootDirectory( ArtifactStore store )
{
return null;
}
@Override
public Transfer getStorageReference( StoreKey key, String... path )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer getStorageReference( ArtifactStore store, String... path )
{
return null;
}
@Override
public List<Transfer> listRecursively( StoreKey src, String startPath )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer getStorageReference( List<ArtifactStore> stores, String path, TransferOperation op )
throws IndyWorkflowException
{
return null;
}
@Override
public Transfer getStorageReference( ArtifactStore store, String path, TransferOperation op )
throws IndyWorkflowException
{
return null;
}
@Override
public boolean exists( ArtifactStore store, String path )
throws IndyWorkflowException
{
return false;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.yarn.server.federation.policies;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyException;
import org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyInitializationException;
import org.apache.hadoop.yarn.server.federation.policies.manager.FederationPolicyManager;
import org.apache.hadoop.yarn.server.federation.policies.router.FederationRouterPolicy;
import org.apache.hadoop.yarn.server.federation.resolver.SubClusterResolver;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterId;
import org.apache.hadoop.yarn.server.federation.store.records.SubClusterPolicyConfiguration;
import org.apache.hadoop.yarn.server.federation.utils.FederationStateStoreFacade;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
/**
* This class provides a facade to the policy subsystem, and handles the
* lifecycle of policies (e.g., refresh from remote, default behaviors etc.).
*/
public class RouterPolicyFacade {
private static final Logger LOG =
LoggerFactory.getLogger(RouterPolicyFacade.class);
private final SubClusterResolver subClusterResolver;
private final FederationStateStoreFacade federationFacade;
private Map<String, SubClusterPolicyConfiguration> globalConfMap;
@VisibleForTesting
Map<String, FederationRouterPolicy> globalPolicyMap;
public RouterPolicyFacade(Configuration conf,
FederationStateStoreFacade facade, SubClusterResolver resolver,
SubClusterId homeSubcluster)
throws FederationPolicyInitializationException {
this.federationFacade = facade;
this.subClusterResolver = resolver;
this.globalConfMap = new ConcurrentHashMap<>();
this.globalPolicyMap = new ConcurrentHashMap<>();
// load default behavior from store if possible
String defaultKey = YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY;
SubClusterPolicyConfiguration configuration = null;
try {
configuration = federationFacade.getPolicyConfiguration(defaultKey);
} catch (YarnException e) {
LOG.warn("No fallback behavior defined in store, defaulting to XML "
+ "configuration fallback behavior.");
}
// or from XML conf otherwise.
if (configuration == null) {
String defaultFederationPolicyManager =
conf.get(YarnConfiguration.FEDERATION_POLICY_MANAGER,
YarnConfiguration.DEFAULT_FEDERATION_POLICY_MANAGER);
String defaultPolicyParamString =
conf.get(YarnConfiguration.FEDERATION_POLICY_MANAGER_PARAMS,
YarnConfiguration.DEFAULT_FEDERATION_POLICY_MANAGER_PARAMS);
ByteBuffer defaultPolicyParam = ByteBuffer
.wrap(defaultPolicyParamString.getBytes(StandardCharsets.UTF_8));
configuration = SubClusterPolicyConfiguration.newInstance(defaultKey,
defaultFederationPolicyManager, defaultPolicyParam);
}
// construct the required policy manager
FederationPolicyInitializationContext fallbackContext =
new FederationPolicyInitializationContext(configuration,
subClusterResolver, federationFacade, homeSubcluster);
FederationPolicyManager fallbackPolicyManager =
FederationPolicyUtils.instantiatePolicyManager(configuration.getType());
fallbackPolicyManager.setQueue(defaultKey);
// add to the cache the fallback behavior
globalConfMap.put(defaultKey,
fallbackContext.getSubClusterPolicyConfiguration());
globalPolicyMap.put(defaultKey,
fallbackPolicyManager.getRouterPolicy(fallbackContext, null));
}
/**
* This method provides a wrapper of all policy functionalities for routing .
* Internally it manages configuration changes, and policy init/reinit.
*
* @param appSubmissionContext the {@link ApplicationSubmissionContext} that
* has to be routed to an appropriate subCluster for execution.
*
* @param blackListSubClusters the list of subClusters as identified by
* {@link SubClusterId} to blackList from the selection of the home
* subCluster.
*
* @return the {@link SubClusterId} that will be the "home" for this
* application.
*
* @throws YarnException if there are issues initializing policies, or no
* valid sub-cluster id could be found for this app.
*/
public SubClusterId getHomeSubcluster(
ApplicationSubmissionContext appSubmissionContext,
List<SubClusterId> blackListSubClusters) throws YarnException {
// the maps are concurrent, but we need to protect from reset()
// reinitialization mid-execution by creating a new reference local to this
// method.
Map<String, SubClusterPolicyConfiguration> cachedConfs = globalConfMap;
Map<String, FederationRouterPolicy> policyMap = globalPolicyMap;
if (appSubmissionContext == null) {
throw new FederationPolicyException(
"The ApplicationSubmissionContext " + "cannot be null.");
}
String queue = appSubmissionContext.getQueue();
// respecting YARN behavior we assume default queue if the queue is not
// specified. This also ensures that "null" can be used as a key to get the
// default behavior.
if (queue == null) {
queue = YarnConfiguration.DEFAULT_QUEUE_NAME;
}
// the facade might cache this request, based on its parameterization
SubClusterPolicyConfiguration configuration = null;
try {
configuration = federationFacade.getPolicyConfiguration(queue);
} catch (YarnException e) {
String errMsg = "There is no policy configured for the queue: " + queue
+ ", falling back to defaults.";
LOG.warn(errMsg, e);
}
// If there is no policy configured for this queue, fallback to the baseline
// policy that is configured either in the store or via XML config (and
// cached)
if (configuration == null) {
LOG.warn("There is no policies configured for queue: " + queue + " we"
+ " fallback to default policy for: "
+ YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY);
queue = YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY;
try {
configuration = federationFacade.getPolicyConfiguration(queue);
} catch (YarnException e) {
String errMsg = "Cannot retrieve policy configured for the queue: "
+ queue + ", falling back to defaults.";
LOG.warn(errMsg, e);
}
}
// the fallback is not configure via store, but via XML, using
// previously loaded configuration.
if (configuration == null) {
configuration =
cachedConfs.get(YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY);
}
// if the configuration has changed since last loaded, reinit the policy
// based on current configuration
if (!cachedConfs.containsKey(queue)
|| !cachedConfs.get(queue).equals(configuration)) {
singlePolicyReinit(policyMap, cachedConfs, queue, configuration);
}
FederationRouterPolicy policy = policyMap.get(queue);
if (policy == null) {
// this should never happen, as the to maps are updated together
throw new FederationPolicyException("No FederationRouterPolicy found "
+ "for queue: " + appSubmissionContext.getQueue() + " (for "
+ "application: " + appSubmissionContext.getApplicationId() + ") "
+ "and no default specified.");
}
return policy.getHomeSubcluster(appSubmissionContext, blackListSubClusters);
}
/**
* This method reinitializes a policy and loads it in the policyMap.
*
* @param queue the queue to initialize a policy for.
* @param conf the configuration to use for initalization.
*
* @throws FederationPolicyInitializationException if initialization fails.
*/
private void singlePolicyReinit(Map<String, FederationRouterPolicy> policyMap,
Map<String, SubClusterPolicyConfiguration> cachedConfs, String queue,
SubClusterPolicyConfiguration conf)
throws FederationPolicyInitializationException {
FederationPolicyInitializationContext context =
new FederationPolicyInitializationContext(conf, subClusterResolver,
federationFacade, null);
String newType = context.getSubClusterPolicyConfiguration().getType();
FederationRouterPolicy routerPolicy = policyMap.get(queue);
FederationPolicyManager federationPolicyManager =
FederationPolicyUtils.instantiatePolicyManager(newType);
// set queue, reinit policy if required (implementation lazily check
// content of conf), and cache it
federationPolicyManager.setQueue(queue);
routerPolicy =
federationPolicyManager.getRouterPolicy(context, routerPolicy);
// we need the two put to be atomic (across multiple threads invoking
// this and reset operations)
synchronized (this) {
policyMap.put(queue, routerPolicy);
cachedConfs.put(queue, conf);
}
}
/**
* This method flushes all cached configurations and policies. This should be
* invoked if the facade remains activity after very large churn of queues in
* the system.
*/
public synchronized void reset() {
// remember the fallBack
SubClusterPolicyConfiguration conf =
globalConfMap.get(YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY);
FederationRouterPolicy policy =
globalPolicyMap.get(YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY);
globalConfMap = new ConcurrentHashMap<>();
globalPolicyMap = new ConcurrentHashMap<>();
// add to the cache a fallback with keyword null
globalConfMap.put(YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY, conf);
globalPolicyMap.put(YarnConfiguration.DEFAULT_FEDERATION_POLICY_KEY,
policy);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flink.graphs.spargel;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.Validate;
import org.apache.flink.api.common.aggregators.Aggregator;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.operators.DeltaIteration;
import org.apache.flink.api.common.functions.RichCoGroupFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.operators.CoGroupOperator;
import org.apache.flink.api.java.operators.CustomUnaryOperation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.Collector;
import flink.graphs.Edge;
import flink.graphs.Vertex;
/**
* This class represents iterative graph computations, programmed in a vertex-centric perspective.
* It is a special case of <i>Bulk Synchronous Parallel<i> computation. The paradigm has also been
* implemented by Google's <i>Pregel</i> system and by <i>Apache Giraph</i>.
* <p>
* Vertex centric algorithms operate on graphs, which are defined through vertices and edges. The
* algorithms send messages along the edges and update the state of vertices based on
* the old state and the incoming messages. All vertices have an initial state.
* The computation terminates once no vertex updates it state any more.
* Additionally, a maximum number of iterations (supersteps) may be specified.
* <p>
* The computation is here represented by two functions:
* <ul>
* <li>The {@link VertexUpdateFunction} receives incoming messages and may updates the state for
* the vertex. If a state is updated, messages are sent from this vertex. Initially, all vertices are
* considered updated.</li>
* <li>The {@link MessagingFunction} takes the new vertex state and sends messages along the outgoing
* edges of the vertex. The outgoing edges may optionally have an associated value, such as a weight.</li>
* </ul>
* <p>
* Vertex-centric graph iterations are instantiated by the
* {@link #withPlainEdges(DataSet, VertexUpdateFunction, MessagingFunction, int)} method, or the
* {@link #withValuedEdges(DataSet, VertexUpdateFunction, MessagingFunction, int)} method, depending on whether
* the graph's edges are carrying values.
*
* @param <VertexKey> The type of the vertex key (the vertex identifier).
* @param <VertexValue> The type of the vertex value (the state of the vertex).
* @param <Message> The type of the message sent between vertices along the edges.
* @param <EdgeValue> The type of the values that are associated with the edges.
*/
public class VertexCentricIteration<VertexKey extends Comparable<VertexKey> & Serializable, VertexValue extends Serializable,
Message, EdgeValue extends Serializable>
implements CustomUnaryOperation<Vertex<VertexKey, VertexValue>, Vertex<VertexKey, VertexValue>>
{
private final VertexUpdateFunction<VertexKey, VertexValue, Message> updateFunction;
private final MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> messagingFunction;
private final DataSet<Edge<VertexKey, EdgeValue>> edgesWithValue;
private final Map<String, Aggregator<?>> aggregators;
private final int maximumNumberOfIterations;
private final List<Tuple2<String, DataSet<?>>> bcVarsUpdate = new ArrayList<Tuple2<String,DataSet<?>>>(4);
private final List<Tuple2<String, DataSet<?>>> bcVarsMessaging = new ArrayList<Tuple2<String,DataSet<?>>>(4);
private final TypeInformation<Message> messageType;
private DataSet<Vertex<VertexKey, VertexValue>> initialVertices;
private String name;
private int parallelism = -1;
private boolean unmanagedSolutionSet;
// ----------------------------------------------------------------------------------
private VertexCentricIteration(VertexUpdateFunction<VertexKey, VertexValue, Message> uf,
MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> mf,
DataSet<Edge<VertexKey, EdgeValue>> edgesWithValue,
int maximumNumberOfIterations, boolean edgeHasValueMarker)
{
Validate.notNull(uf);
Validate.notNull(mf);
Validate.notNull(edgesWithValue);
Validate.isTrue(maximumNumberOfIterations > 0, "The maximum number of iterations must be at least one.");
this.updateFunction = uf;
this.messagingFunction = mf;
this.edgesWithValue = edgesWithValue;
this.maximumNumberOfIterations = maximumNumberOfIterations;
this.aggregators = new HashMap<String, Aggregator<?>>();
this.messageType = getMessageType(mf);
}
private TypeInformation<Message> getMessageType(MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> mf) {
return TypeExtractor.createTypeInfo(MessagingFunction.class, mf.getClass(), 2, null, null);
}
/**
* Registers a new aggregator. Aggregators registered here are available during the execution of the vertex updates
* via {@link VertexUpdateFunction#getIterationAggregator(String)} and
* {@link VertexUpdateFunction#getPreviousIterationAggregate(String)}.
*
* @param name The name of the aggregator, used to retrieve it and its aggregates during execution.
* @param aggregator The aggregator.
*/
public void registerAggregator(String name, Aggregator<?> aggregator) {
this.aggregators.put(name, aggregator);
}
/**
* Adds a data set as a broadcast set to the messaging function.
*
* @param name The name under which the broadcast data is available in the messaging function.
* @param data The data set to be broadcasted.
*/
public void addBroadcastSetForMessagingFunction(String name, DataSet<?> data) {
this.bcVarsMessaging.add(new Tuple2<String, DataSet<?>>(name, data));
}
/**
* Adds a data set as a broadcast set to the vertex update function.
*
* @param name The name under which the broadcast data is available in the vertex update function.
* @param data The data set to be broadcasted.
*/
public void addBroadcastSetForUpdateFunction(String name, DataSet<?> data) {
this.bcVarsUpdate.add(new Tuple2<String, DataSet<?>>(name, data));
}
/**
* Sets the name for the vertex-centric iteration. The name is displayed in logs and messages.
*
* @param name The name for the iteration.
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the name from this vertex-centric iteration.
*
* @return The name of the iteration.
*/
public String getName() {
return name;
}
/**
* Sets the degree of parallelism for the iteration.
*
* @param parallelism The degree of parallelism.
*/
public void setParallelism(int parallelism) {
Validate.isTrue(parallelism > 0 || parallelism == -1, "The degree of parallelism must be positive, or -1 (use default).");
this.parallelism = parallelism;
}
/**
* Gets the iteration's degree of parallelism.
*
* @return The iterations parallelism, or -1, if not set.
*/
public int getParallelism() {
return parallelism;
}
/**
* Defines whether the solution set is kept in managed memory (Flink's internal way of keeping object
* in serialized form) or as a simple object map.
* By default, the solution set runs in managed memory.
*
* @param unmanaged True, to keep the solution set in unmanaged memory, false otherwise.
*/
public void setSolutionSetUnmanagedMemory(boolean unmanaged) {
this.unmanagedSolutionSet = unmanaged;
}
/**
* Gets whether the solution set is kept in managed memory (Flink's internal way of keeping object
* in serialized form) or as a simple object map.
* By default, the solution set runs in managed memory.
*
* @return True, if the solution set is in unmanaged memory, false otherwise.
*/
public boolean isSolutionSetUnmanagedMemory() {
return this.unmanagedSolutionSet;
}
// --------------------------------------------------------------------------------------------
// Custom Operator behavior
// --------------------------------------------------------------------------------------------
/**
* Sets the input data set for this operator. In the case of this operator this input data set represents
* the set of vertices with their initial state.
*
* @param inputData The input data set, which in the case of this operator represents the set of
* vertices with their initial state.
*
* @see org.apache.flink.api.java.operators.CustomUnaryOperation#setInput(org.apache.flink.api.java.DataSet)
*/
@Override
public void setInput(DataSet<Vertex<VertexKey, VertexValue>> inputData) {
this.initialVertices = inputData;
}
/**
* Creates the operator that represents this vertex-centric graph computation.
*
* @return The operator that represents this vertex-centric graph computation.
*/
@Override
public DataSet<Vertex<VertexKey, VertexValue>> createResult() {
if (this.initialVertices == null) {
throw new IllegalStateException("The input data set has not been set.");
}
// prepare some type information
TypeInformation<Vertex<VertexKey, VertexValue>> vertexTypes = initialVertices.getType();
TypeInformation<VertexKey> keyType = ((TupleTypeInfo<?>) initialVertices.getType()).getTypeAt(0);
TypeInformation<Tuple2<VertexKey, Message>> messageTypeInfo = new TupleTypeInfo<Tuple2<VertexKey,Message>>(keyType, messageType);
// set up the iteration operator
final String name = (this.name != null) ? this.name :
"Vertex-centric iteration (" + updateFunction + " | " + messagingFunction + ")";
final int[] zeroKeyPos = new int[] {0};
final DeltaIteration<Vertex<VertexKey, VertexValue>, Vertex<VertexKey, VertexValue>> iteration =
this.initialVertices.iterateDelta(this.initialVertices, this.maximumNumberOfIterations, zeroKeyPos);
iteration.name(name);
iteration.parallelism(parallelism);
iteration.setSolutionSetUnManaged(unmanagedSolutionSet);
// register all aggregators
for (Map.Entry<String, Aggregator<?>> entry : this.aggregators.entrySet()) {
iteration.registerAggregator(entry.getKey(), entry.getValue());
}
// build the messaging function (co group)
CoGroupOperator<?, ?, Tuple2<VertexKey, Message>> messages;
MessagingUdfWithEdgeValues<VertexKey, VertexValue, Message, EdgeValue> messenger = new MessagingUdfWithEdgeValues<VertexKey, VertexValue, Message, EdgeValue>(messagingFunction, messageTypeInfo);
messages = this.edgesWithValue.coGroup(iteration.getWorkset()).where(0).equalTo(0).with(messenger);
// configure coGroup message function with name and broadcast variables
messages = messages.name("Messaging");
for (Tuple2<String, DataSet<?>> e : this.bcVarsMessaging) {
messages = messages.withBroadcastSet(e.f1, e.f0);
}
VertexUpdateUdf<VertexKey, VertexValue, Message> updateUdf = new VertexUpdateUdf<VertexKey, VertexValue, Message>(updateFunction, vertexTypes);
// build the update function (co group)
CoGroupOperator<?, ?, Vertex<VertexKey, VertexValue>> updates =
messages.coGroup(iteration.getSolutionSet()).where(0).equalTo(0).with(updateUdf);
// configure coGroup update function with name and broadcast variables
updates = updates.name("Vertex State Updates");
for (Tuple2<String, DataSet<?>> e : this.bcVarsUpdate) {
updates = updates.withBroadcastSet(e.f1, e.f0);
}
// let the operator know that we preserve the key field
updates.withConstantSetFirst("0").withConstantSetSecond("0");
return iteration.closeWith(updates, updates);
}
/**
* Creates a new vertex-centric iteration operator for graphs where the edges are associated with a value (such as
* a weight or distance).
*
* @param edgesWithValue The data set containing edges.
* @param uf The function that updates the state of the vertices from the incoming messages.
* @param mf The function that turns changed vertex states into messages along the edges.
*
* @param <VertexKey> The type of the vertex key (the vertex identifier).
* @param <VertexValue> The type of the vertex value (the state of the vertex).
* @param <Message> The type of the message sent between vertices along the edges.
* @param <EdgeValue> The type of the values that are associated with the edges.
*
* @return An in stance of the vertex-centric graph computation operator.
*/
public static final <VertexKey extends Comparable<VertexKey> & Serializable, VertexValue extends Serializable,
Message, EdgeValue extends Serializable>
VertexCentricIteration<VertexKey, VertexValue, Message, EdgeValue> withEdges(
DataSet<Edge<VertexKey, EdgeValue>> edgesWithValue,
VertexUpdateFunction<VertexKey, VertexValue, Message> uf,
MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> mf,
int maximumNumberOfIterations)
{
return new VertexCentricIteration<VertexKey, VertexValue, Message, EdgeValue>(uf, mf, edgesWithValue, maximumNumberOfIterations, true);
}
// --------------------------------------------------------------------------------------------
// Wrapping UDFs
// --------------------------------------------------------------------------------------------
private static final class VertexUpdateUdf<VertexKey extends Comparable<VertexKey> & Serializable,
VertexValue extends Serializable, Message>
extends RichCoGroupFunction<Tuple2<VertexKey, Message>, Vertex<VertexKey, VertexValue>, Vertex<VertexKey, VertexValue>>
implements ResultTypeQueryable<Vertex<VertexKey, VertexValue>>
{
private static final long serialVersionUID = 1L;
private final VertexUpdateFunction<VertexKey, VertexValue, Message> vertexUpdateFunction;
private final MessageIterator<Message> messageIter = new MessageIterator<Message>();
private transient TypeInformation<Vertex<VertexKey, VertexValue>> resultType;
private VertexUpdateUdf(VertexUpdateFunction<VertexKey, VertexValue, Message> vertexUpdateFunction,
TypeInformation<Vertex<VertexKey, VertexValue>> resultType)
{
this.vertexUpdateFunction = vertexUpdateFunction;
this.resultType = resultType;
}
@Override
public void coGroup(Iterable<Tuple2<VertexKey, Message>> messages, Iterable<Vertex<VertexKey, VertexValue>> vertex,
Collector<Vertex<VertexKey, VertexValue>> out)
throws Exception
{
final Iterator<Vertex<VertexKey, VertexValue>> vertexIter = vertex.iterator();
if (vertexIter.hasNext()) {
Vertex<VertexKey, VertexValue> vertexState = vertexIter.next();
@SuppressWarnings("unchecked")
Iterator<Tuple2<?, Message>> downcastIter = (Iterator<Tuple2<?, Message>>) (Iterator<?>) messages.iterator();
messageIter.setSource(downcastIter);
vertexUpdateFunction.setOutput(vertexState, out);
vertexUpdateFunction.updateVertex(vertexState.f0, vertexState.f1, messageIter);
}
else {
final Iterator<Tuple2<VertexKey, Message>> messageIter = messages.iterator();
if (messageIter.hasNext()) {
String message = "Target vertex does not exist!.";
try {
Tuple2<VertexKey, Message> next = messageIter.next();
message = "Target vertex '" + next.f0 + "' does not exist!.";
} catch (Throwable t) {}
throw new Exception(message);
} else {
throw new Exception();
}
}
}
@Override
public void open(Configuration parameters) throws Exception {
if (getIterationRuntimeContext().getSuperstepNumber() == 1) {
this.vertexUpdateFunction.init(getIterationRuntimeContext());
}
this.vertexUpdateFunction.preSuperstep();
}
@Override
public void close() throws Exception {
this.vertexUpdateFunction.postSuperstep();
}
@Override
public TypeInformation<Vertex<VertexKey, VertexValue>> getProducedType() {
return this.resultType;
}
}
/*
* UDF that encapsulates the message sending function for graphs where the edges have an associated value.
*/
private static final class MessagingUdfWithEdgeValues<VertexKey extends Comparable<VertexKey> & Serializable,
VertexValue extends Serializable, Message, EdgeValue extends Serializable>
extends RichCoGroupFunction<Edge<VertexKey, EdgeValue>, Vertex<VertexKey, VertexValue>, Tuple2<VertexKey, Message>>
implements ResultTypeQueryable<Tuple2<VertexKey, Message>>
{
private static final long serialVersionUID = 1L;
private final MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> messagingFunction;
private transient TypeInformation<Tuple2<VertexKey, Message>> resultType;
private MessagingUdfWithEdgeValues(MessagingFunction<VertexKey, VertexValue, Message, EdgeValue> messagingFunction,
TypeInformation<Tuple2<VertexKey, Message>> resultType)
{
this.messagingFunction = messagingFunction;
this.resultType = resultType;
}
@Override
public void coGroup(Iterable<Edge<VertexKey, EdgeValue>> edges,
Iterable<Vertex<VertexKey, VertexValue>> state, Collector<Tuple2<VertexKey, Message>> out)
throws Exception
{
final Iterator<Vertex<VertexKey, VertexValue>> stateIter = state.iterator();
if (stateIter.hasNext()) {
Vertex<VertexKey, VertexValue> newVertexState = stateIter.next();
messagingFunction.set((Iterator<?>) edges.iterator(), out);
messagingFunction.sendMessages(newVertexState.f0, newVertexState.f1);
}
}
@Override
public void open(Configuration parameters) throws Exception {
if (getIterationRuntimeContext().getSuperstepNumber() == 1) {
this.messagingFunction.init(getIterationRuntimeContext());
}
this.messagingFunction.preSuperstep();
}
@Override
public void close() throws Exception {
this.messagingFunction.postSuperstep();
}
@Override
public TypeInformation<Tuple2<VertexKey, Message>> getProducedType() {
return this.resultType;
}
}
}
| |
/*
Copyright 2012 Selenium committers
Copyright 2012 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.remote;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import org.openqa.selenium.UnhandledAlertException;
import org.openqa.selenium.WebDriverException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.List;
import java.util.Map;
import static org.openqa.selenium.remote.ErrorCodes.SUCCESS;
/**
* Maps exceptions to status codes for sending over the wire.
*
* @author jmleyba@gmail.com (Jason Leyba)
*/
public class ErrorHandler {
private static final String MESSAGE = "message";
private static final String SCREEN_SHOT = "screen";
private static final String CLASS = "class";
private static final String STACK_TRACE = "stackTrace";
private static final String LINE_NUMBER = "lineNumber";
private static final String METHOD_NAME = "methodName";
private static final String CLASS_NAME = "className";
private static final String FILE_NAME = "fileName";
private static final String UNKNOWN_CLASS = "<anonymous class>";
private static final String UNKNOWN_METHOD = "<anonymous method>";
private static final String UNKNOWN_FILE = null;
private final ErrorCodes errorCodes = new ErrorCodes();
private boolean includeServerErrors;
public ErrorHandler() {
this(true);
}
/**
* @param includeServerErrors Whether to include server-side details in thrown exceptions if the
* information is available.
*/
public ErrorHandler(boolean includeServerErrors) {
this.includeServerErrors = includeServerErrors;
}
public boolean isIncludeServerErrors() {
return includeServerErrors;
}
public void setIncludeServerErrors(boolean includeServerErrors) {
this.includeServerErrors = includeServerErrors;
}
@SuppressWarnings("unchecked")
public Response throwIfResponseFailed(Response response, long duration) throws RuntimeException {
if (response.getStatus() == SUCCESS) {
return response;
}
if (response.getValue() instanceof Throwable) {
throw Throwables.propagate((Throwable) response.getValue());
}
Class<? extends WebDriverException> outerErrorType =
errorCodes.getExceptionType(response.getStatus());
Object value = response.getValue();
String message = null;
Throwable cause = null;
if (value instanceof Map) {
Map<String, Object> rawErrorData = (Map<String, Object>) value;
try {
message = (String) rawErrorData.get(MESSAGE);
} catch (ClassCastException e) {
// Ok, try to recover gracefully.
message = String.valueOf(e);
}
Throwable serverError = rebuildServerError(rawErrorData);
// If serverError is null, then the server did not provide a className (only expected if
// the server is a Java process) or a stack trace. The lack of a className is OK, but
// not having a stacktrace really hurts our ability to debug problems.
if (serverError == null) {
if (includeServerErrors) {
// TODO: this should probably link to a wiki article with more info.
message += " (WARNING: The server did not provide any stacktrace information)";
}
} else if (!includeServerErrors) {
// TODO: wiki article with more info.
message += " (WARNING: The client has suppressed server-side stacktraces)";
} else {
cause = serverError;
}
if (rawErrorData.get(SCREEN_SHOT) != null) {
cause = new ScreenshotException(String.valueOf(rawErrorData.get(SCREEN_SHOT)), cause);
}
} else if (value != null) {
message = String.valueOf(value);
}
String duration1 = duration(duration);
if (message != null && message.indexOf(duration1) == -1) {
message = message + duration1;
}
WebDriverException toThrow = null;
if (outerErrorType.equals(UnhandledAlertException.class)
&& value instanceof Map) {
toThrow = createUnhandledAlertException(value);
}
if (toThrow == null) {
toThrow = createThrowable(outerErrorType,
new Class<?>[] {String.class, Throwable.class},
new Object[] {message, cause});
}
if (toThrow == null) {
toThrow = createThrowable(outerErrorType,
new Class<?>[] {String.class},
new Object[] {message});
}
if (toThrow == null) {
toThrow = new WebDriverException(message, cause);
}
throw toThrow;
}
@SuppressWarnings("unchecked")
private UnhandledAlertException createUnhandledAlertException(Object value) {
Map<String, Object> rawErrorData = (Map<String, Object>) value;
if (rawErrorData.containsKey("alert") || rawErrorData.containsKey("alertText")) {
Object alertText = rawErrorData.get("alertText");
if (alertText == null) {
Map<String, Object> alert = (Map<String, Object>) rawErrorData.get("alert");
if (alert != null) {
alertText = alert.get("text");
}
}
return createThrowable(UnhandledAlertException.class,
new Class<?>[] {String.class, String.class},
new Object[] {rawErrorData.get("message"), alertText});
}
return null;
}
private String duration(long duration) {
String prefix = "\nCommand duration or timeout: ";
if (duration < 1000) {
return prefix + duration + " milliseconds";
}
return prefix + (new BigDecimal(duration).divide(new BigDecimal(1000)).setScale(2, RoundingMode.HALF_UP)) + " seconds";
}
private <T extends Throwable> T createThrowable(
Class<T> clazz, Class<?>[] parameterTypes, Object[] parameters) {
try {
Constructor<T> constructor = clazz.getConstructor(parameterTypes);
return constructor.newInstance(parameters);
} catch (NoSuchMethodException e) {
// Do nothing - fall through.
} catch (InvocationTargetException e) {
// Do nothing - fall through.
} catch (InstantiationException e) {
// Do nothing - fall through.
} catch (IllegalAccessException e) {
// Do nothing - fall through.
} catch (OutOfMemoryError error) {
// It can happen...
}
return null;
}
private Throwable rebuildServerError(Map<String, Object> rawErrorData) {
if (!rawErrorData.containsKey(CLASS) && !rawErrorData.containsKey(STACK_TRACE)) {
// Not enough information for us to try to rebuild an error.
return null;
}
Throwable toReturn = null;
String message = (String) rawErrorData.get(MESSAGE);
if (rawErrorData.containsKey(CLASS)) {
String className = (String) rawErrorData.get(CLASS);
try {
Class clazz = Class.forName(className);
if (clazz.equals(UnhandledAlertException.class)) {
toReturn = createUnhandledAlertException(rawErrorData);
} else if (Throwable.class.isAssignableFrom(clazz)) {
@SuppressWarnings({"unchecked"})
Class<? extends Throwable> throwableType = (Class<? extends Throwable>) clazz;
toReturn = createThrowable(throwableType, new Class<?>[] {String.class},
new Object[] {message});
}
} catch (ClassNotFoundException ignored) {
// Ok, fall-through
}
}
if (toReturn == null) {
toReturn = new UnknownServerException(message);
}
// Note: if we have a class name above, we should always have a stack trace.
// The inverse is not always true.
StackTraceElement[] stackTrace = new StackTraceElement[0];
if (rawErrorData.containsKey(STACK_TRACE)) {
@SuppressWarnings({"unchecked"})
List<Map<String, Object>> stackTraceInfo =
(List<Map<String, Object>>) rawErrorData.get(STACK_TRACE);
Iterable<StackTraceElement> stackFrames =
Iterables.transform(stackTraceInfo, new FrameInfoToStackFrame());
stackFrames = Iterables.filter(stackFrames, Predicates.notNull());
stackTrace = Iterables.toArray(stackFrames, StackTraceElement.class);
}
toReturn.setStackTrace(stackTrace);
return toReturn;
}
/**
* Exception used as a place holder if the server returns an error without a stack trace.
*/
public static class UnknownServerException extends WebDriverException {
private UnknownServerException(String s) {
super(s);
}
}
/**
* Function that can rebuild a {@link StackTraceElement} from the frame info included with a
* WebDriver JSON response.
*/
private static class FrameInfoToStackFrame
implements Function<Map<String, Object>, StackTraceElement> {
public StackTraceElement apply(Map<String, Object> frameInfo) {
if (frameInfo == null) {
return null;
}
Number lineNumber = (Number) frameInfo.get(LINE_NUMBER);
if (lineNumber == null) {
return null;
}
// Gracefully handle remote servers that don't (or can't) send back
// complete stack trace info. At least some of this information should
// be included...
String className = frameInfo.containsKey(CLASS_NAME)
? toStringOrNull(frameInfo.get(CLASS_NAME)) : UNKNOWN_CLASS;
String methodName = frameInfo.containsKey(METHOD_NAME)
? toStringOrNull(frameInfo.get(METHOD_NAME)) : UNKNOWN_METHOD;
String fileName = frameInfo.containsKey(FILE_NAME)
? toStringOrNull(frameInfo.get(FILE_NAME)) : UNKNOWN_FILE;
return new StackTraceElement(className, methodName, fileName,
lineNumber.intValue());
}
private static String toStringOrNull(Object o) {
return o == null ? null : o.toString();
}
}
}
| |
/*
* Copyright 2018 Philipp Niedermayer (github.com/eltos)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eltos.simpledialogfragment.form;
import android.content.Context;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import java.util.ArrayList;
import java.util.Collections;
import eltos.simpledialogfragment.CustomViewDialog;
import eltos.simpledialogfragment.R;
import eltos.simpledialogfragment.SimpleDialog;
/**
* A form dialog to display a number of input fields to the user, such as
* - Input fields ({@link Input})
* - Check-boxes ({@link Check})
* - Dropdown-menus ({@link Spinner})
*
* Created by eltos on 20.02.17.
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class SimpleFormDialog extends CustomViewDialog<SimpleFormDialog> implements SimpleDialog.OnDialogResultListener {
public static final String TAG = "SimpleFormDialog.";
public static SimpleFormDialog build(){
return new SimpleFormDialog();
}
/**
* Convenient method to build a form dialog with a single email input
*
* @param emailFieldKey the key that can be used to receive the entered text from the bundle in
* {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildEmailInput(String emailFieldKey){
return SimpleFormDialog.build()
.fields(
Input.email(emailFieldKey).required()
);
}
/**
* Convenient method to build a form dialog with a single password input
*
* @param passwordFieldKey the key that can be used to receive the entered text from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildPasswordInput(String passwordFieldKey){
return SimpleFormDialog.build()
.fields(
Input.password(passwordFieldKey).required()
);
}
/**
* Convenient method to build a form dialog with a single pin code input
*
* @param pinFieldKey the key that can be used to receive the entered text from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildPinCodeInput(String pinFieldKey){
return SimpleFormDialog.build()
.fields(
Input.pin(pinFieldKey).required()
);
}
/**
* Convenient method to build a form dialog with a single pin code input
*
* @param pinFieldKey the key that can be used to receive the entered text from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @param digits the length of the pin code
* @return this instance
*/
public static SimpleFormDialog buildPinCodeInput(String pinFieldKey, int digits){
return SimpleFormDialog.build()
.fields(
Input.pin(pinFieldKey).required().min(digits).max(digits)
);
}
/**
* Convenient method to build a form dialog with a single number input
*
* @param numberFieldKey the key that can be used to receive the entered text from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildNumberInput(String numberFieldKey){
return SimpleFormDialog.build()
.fields(
Input.phone(numberFieldKey).required()
);
}
/**
* Convenient method to build a form dialog with an email input alongside
* a password input for login with email address and password
*
* @param emailFieldKey the key that can be used to receive the entered email from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @param passwordFieldKey the key that can be used to receive the entered password from the
* bundle in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildLoginEmail(String emailFieldKey, String passwordFieldKey){
return SimpleFormDialog.build()
.title(R.string.login)
.pos(R.string.login)
.fields(
Input.email(emailFieldKey).required(),
Input.password(passwordFieldKey).required()
);
}
/**
* Convenient method to build a form dialog with a plain input alongside
* a password input for login with username and password
*
* @param userFieldKey the key that can be used to receive the entered username from the bundle
* in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @param passwordFieldKey the key that can be used to receive the entered password from the
* bundle in {@link eltos.simpledialogfragment.SimpleDialog.OnDialogResultListener#onResult}
* @return this instance
*/
public static SimpleFormDialog buildLogin(String userFieldKey, String passwordFieldKey){
return SimpleFormDialog.build()
.title(R.string.login)
.pos(R.string.login)
.fields(
Input.plain(userFieldKey).hint(R.string.user).required(),
Input.password(passwordFieldKey).required()
);
}
/**
* Convenient method to populate the form with form elements
*
* @param elements the {@link FormElement}s that form should contain
* @return this instance
*/
public SimpleFormDialog fields(FormElement... elements){
ArrayList<FormElement> list = new ArrayList<>(elements.length);
Collections.addAll(list, elements);
getArgs().putParcelableArrayList(INPUT_FIELDS, list);
return this;
}
/**
* En- or disables the automatic focussing of the first field in the form when the dialog opens.
* This is enabled by default.
*
* @param enabled whether or not to autofocus the first field
* @return this instance
*/
public SimpleFormDialog autofocus(boolean enabled){
return setArg(AUTO_FOCUS, enabled);
}
public interface InputValidator {
/**
* Let the hosting fragment or activity implement this interface to make
* custom validations for {@link Input} fields.
* You may also use {@link Input#validatePattern} with a custom or predefined
* pattern.
* The method is called every time the user hits the positive button or next key.
*
* @param dialogTag the tag of this fragment
* @param fieldKey the key of the field as supplied when the corresponding
* {@link Input} was created (see {@link Input#plain(String)} etc)
* @param input the text entered by the user
* @param extras the extras passed with {@link SimpleFormDialog#extra(Bundle)}
*
* @return the error message to display or null if the input is valid
*/
String validate(String dialogTag, String fieldKey, @Nullable String input, @NonNull Bundle extras);
}
///////////////////////////////////////////////////////////////////////////////////////////
protected static final String INPUT_FIELDS = TAG + "inputFields";
protected static final String AUTO_FOCUS = TAG + "autofocus";
protected static final String SAVE_TAG = "form.";
private FocusActions mFocusActions = new FocusActions();
ArrayList<FormElementViewHolder<?>> mViews = new ArrayList<>(0);
ViewGroup mFormContainer;
protected String onValidateInput(String fieldKey, @Nullable String input){
Bundle extras = getExtras();
if (getTargetFragment() instanceof InputValidator) {
return ((InputValidator) getTargetFragment())
.validate(getTag(), fieldKey, input, extras);
}
if (getActivity() instanceof InputValidator) {
return ((InputValidator) getActivity())
.validate(getTag(), fieldKey, input, extras);
}
return null;
}
@Override
protected void onDialogShown() {
// resize dialog when keyboard is shown to prevent fields from hiding behind the keyboard
if (getDialog() != null && getDialog().getWindow() != null) {
getDialog().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE);
}
setPositiveButtonEnabled(posButtonEnabled());
if (getArgs().getBoolean(AUTO_FOCUS, true)){
requestFocus(0);
}
}
@Override
protected boolean acceptsPositiveButtonPress() {
boolean okay = true;
for (FormElementViewHolder holder : mViews){
if (!holder.validate(getContext())){
if (okay) holder.focus(mFocusActions); // focus first element that is not valid
okay = false;
} else if (holder instanceof InputViewHolder){
// custom validation
String error = onValidateInput(holder.field.resultKey, ((InputViewHolder) holder).getText());
if (error != null){
((InputViewHolder) holder).setError(true, error);
if (okay) holder.focus(mFocusActions); // focus first element that is not valid
okay = false;
}
}
}
return okay;
}
protected boolean posButtonEnabled() {
int first = getFirstFocusableIndex();
if (0 <= first && isLastFocusableIndex(first) && first < mViews.size()){
// first==last --> only one
return mViews.get(first).posButtonEnabled(getContext());
}
return true;
}
protected void requestFocus(int viewIndex){
if (0 <= viewIndex && viewIndex < mViews.size()) {
mViews.get(viewIndex).focus(mFocusActions);
}
}
/**
* A Callback Class with useful methods used by {@link FormElementViewHolder#focus}
*/
public class FocusActions {
/**
* Helper to hide the soft keyboard
*/
public void hideKeyboard(){
View view = getDialog().getCurrentFocus();
if (view != null) {
InputMethodManager imm = (InputMethodManager) getContext()
.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) {
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
}
/**
* Helper for opening the soft keyboard on a specified view
*/
public void showKeyboard(final View view){
SimpleFormDialog.this.showKeyboard(view);
}
/**
* Helper to clear the current focus
*/
public void clearCurrentFocus(){
mFormContainer.requestFocus();
}
}
/**
* A Callback Class with useful methods used by {@link FormElementViewHolder#setUpView}
*/
public class DialogActions extends FocusActions {
private int index;
private int lastIndex;
private DialogActions(int index, int lastIndex){
this.index = index;
this.lastIndex = lastIndex;
}
/**
* Helper to request an update of the positive button state
*/
public void updatePosButtonState(){
setPositiveButtonEnabled(posButtonEnabled());
}
/**
* Check if this is the only (focusable) element
*
* @return true if this is the only (focusable) element
*/
public boolean isOnlyFocusableElement(){
return isOnlyFocusableIndex(index);
}
/**
* Check if this is the last (focusable) element
*
* @return true if this is the last (focusable) element
*/
public boolean isLastFocusableElement(){
return isLastFocusableIndex(index);
}
/**
* Helper to move the focus to the next element or to simulate a positive button
* press if this is the last element
*
* @param mayPressPositiveButtonIfLast whether the positive button can be pressed
* if this was the last element
*/
public void continueWithNextElement(boolean mayPressPositiveButtonIfLast){
if (mayPressPositiveButtonIfLast && isLastFocusableElement()){
pressPositiveButton();
} else {
requestFocus(getNextFocusableIndex(index));
}
}
public void showDialog(SimpleDialog dialog, String tag){
dialog.show(SimpleFormDialog.this, tag);
}
}
private boolean isFocusableIndex(int i){
ArrayList<FormElement> fields = getArgs().getParcelableArrayList(INPUT_FIELDS);
return 0 <= i && fields != null && i < fields.size() && !(fields.get(i) instanceof Hint);
}
private int getNextFocusableIndex(int i){
ArrayList<FormElement> fields = getArgs().getParcelableArrayList(INPUT_FIELDS);
do {
i++;
if (fields == null || i >= fields.size()) return Integer.MAX_VALUE;
} while (!isFocusableIndex(i));
return i;
}
private int getFirstFocusableIndex(){
return getNextFocusableIndex(-1);
}
private boolean isOnlyFocusableIndex(int i){
return i == getFirstFocusableIndex() && isLastFocusableIndex(i);
}
private boolean isLastFocusableIndex(int i){
return isFocusableIndex(i) && getNextFocusableIndex(i) == Integer.MAX_VALUE;
}
/**
* Method for view creation. Inflates the layout and calls
* {@link SimpleFormDialog#populateContainer(ViewGroup, Bundle)}
* to populate the container with the fields specified
*
* @param savedInstanceState The last saved instance state of the Fragment,
* or null if this fragment is created for the first time.
*
* @return inflated view
*/
@Override
public View onCreateContentView(Bundle savedInstanceState) {
// inflate custom view
View view = inflate(R.layout.simpledialogfragment_form);
ViewGroup container = (ViewGroup) view.findViewById(R.id.container);
populateContainer(container, savedInstanceState);
setPositiveButtonEnabled(posButtonEnabled());
return view;
}
/**
* Creates FormElements and adds them to the container
*
* @param container the container to hold the FormElements
* @param savedInstanceState saved state
*/
protected void populateContainer(@NonNull ViewGroup container,
@Nullable Bundle savedInstanceState) {
mFormContainer = container;
ArrayList<FormElement> fields = getArgs().getParcelableArrayList(INPUT_FIELDS);
if (fields != null) {
mViews = new ArrayList<>(fields.size());
int lastI = fields.size() - 1;
for (int i = 0; i < fields.size(); i++) {
FormElementViewHolder<?> viewHolder = fields.get(i).buildViewHolder();
View child = inflate(viewHolder.getContentViewLayout(), mFormContainer, false);
Bundle savedState = savedInstanceState == null ? null :
savedInstanceState.getBundle(SAVE_TAG + i);
viewHolder.setUpView(child, getContext(), savedState, new DialogActions(i, lastI));
mFormContainer.addView(child);
mViews.add(viewHolder);
}
}
}
@Override
public Bundle onResult(int which) {
Bundle result = new Bundle();
for (FormElementViewHolder holder : mViews) {
holder.putResults(result, holder.field.resultKey);
}
return result;
}
@Override
public void onSaveInstanceState(Bundle outState) {
for (int i = 0; i < mViews.size(); i++) {
Bundle viewState = new Bundle();
mViews.get(i).saveState(viewState);
outState.putBundle(SAVE_TAG + i, viewState);
}
super.onSaveInstanceState(outState);
}
@Override
public boolean onResult(@NonNull String dialogTag, int which, @NonNull Bundle extras) {
ArrayList<FormElement> fields = getArgs().getParcelableArrayList(INPUT_FIELDS);
if (fields != null) {
for (FormElementViewHolder<?> view : mViews) {
if (view instanceof OnDialogResultListener){
if (((OnDialogResultListener) view).onResult(dialogTag, which, extras)){
return true;
}
}
}
}
return false;
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.entity.player;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.ItemStackHelper;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.util.NonNullList;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.util.text.TextComponentTranslation;
import java.util.Arrays;
import java.util.List;
public class SpongeUserInventory implements IInventory {
// sourced from InventoryPlayer
/** An array of 36 item stacks indicating the main player inventory (including the visible bar). */
final NonNullList<ItemStack> mainInventory = NonNullList.<ItemStack>withSize(36, ItemStack.EMPTY);
/** An array of 4 item stacks containing the currently worn armor pieces. */
final NonNullList<ItemStack> armorInventory = NonNullList.<ItemStack>withSize(4, ItemStack.EMPTY);
final NonNullList<ItemStack> offHandInventory = NonNullList.<ItemStack>withSize(1, ItemStack.EMPTY);
private final List<NonNullList<ItemStack>> allInventories;
/** The index of the currently held item (0-8). */
public int currentItem;
/** The player whose inventory this is. */
public SpongeUser player;
private boolean dirty = false;
public SpongeUserInventory(SpongeUser playerIn) {
this.allInventories = Arrays.<NonNullList<ItemStack>>asList(this.mainInventory, this.armorInventory, this.offHandInventory);
this.player = playerIn;
}
public ItemStack getCurrentItem() {
return InventoryPlayer.isHotbar(this.currentItem) ? this.mainInventory.get(this.currentItem) : ItemStack.EMPTY;
}
/**
* Removes up to a specified number of items from an inventory slot and returns them in a new stack.
*/
@Override
public ItemStack decrStackSize(int index, int count) {
this.markDirty();
List<ItemStack> list = null;
for (NonNullList<ItemStack> nonnulllist : this.allInventories) {
if (index < nonnulllist.size()) {
list = nonnulllist;
break;
}
index -= nonnulllist.size();
}
return list != null && !((ItemStack) list.get(index)).isEmpty() ? ItemStackHelper.getAndSplit(list, index, count) : ItemStack.EMPTY;
}
/**
* Removes a stack from the given slot and returns it.
*/
@Override
public ItemStack removeStackFromSlot(int index) {
this.markDirty();
NonNullList<ItemStack> nonnulllist = null;
for (NonNullList<ItemStack> nonnulllist1 : this.allInventories) {
if (index < nonnulllist1.size()) {
nonnulllist = nonnulllist1;
break;
}
index -= nonnulllist1.size();
}
if (nonnulllist != null && !((ItemStack) nonnulllist.get(index)).isEmpty()) {
ItemStack itemstack = nonnulllist.get(index);
nonnulllist.set(index, ItemStack.EMPTY);
return itemstack;
} else {
return ItemStack.EMPTY;
}
}
/**
* Sets the given item stack to the specified slot in the inventory (can be crafting or armor sections).
*/
@Override
public void setInventorySlotContents(int index, ItemStack stack) {
this.markDirty();
NonNullList<ItemStack> nonnulllist = null;
for (NonNullList<ItemStack> nonnulllist1 : this.allInventories) {
if (index < nonnulllist1.size()) {
nonnulllist = nonnulllist1;
break;
}
index -= nonnulllist1.size();
}
if (nonnulllist != null) {
nonnulllist.set(index, stack);
}
}
/**
* Writes the inventory out as a list of compound tags. This is where the slot indices are used (+100 for armor, +80
* for crafting).
*/
public NBTTagList writeToNBT(NBTTagList nbtTagListIn) {
for (int i = 0; i < this.mainInventory.size(); ++i) {
if (!((ItemStack) this.mainInventory.get(i)).isEmpty()) {
NBTTagCompound nbttagcompound = new NBTTagCompound();
nbttagcompound.setByte("Slot", (byte) i);
((ItemStack) this.mainInventory.get(i)).writeToNBT(nbttagcompound);
nbtTagListIn.appendTag(nbttagcompound);
}
}
for (int j = 0; j < this.armorInventory.size(); ++j) {
if (!((ItemStack) this.armorInventory.get(j)).isEmpty()) {
NBTTagCompound nbttagcompound1 = new NBTTagCompound();
nbttagcompound1.setByte("Slot", (byte) (j + 100));
((ItemStack) this.armorInventory.get(j)).writeToNBT(nbttagcompound1);
nbtTagListIn.appendTag(nbttagcompound1);
}
}
for (int k = 0; k < this.offHandInventory.size(); ++k) {
if (!((ItemStack) this.offHandInventory.get(k)).isEmpty()) {
NBTTagCompound nbttagcompound2 = new NBTTagCompound();
nbttagcompound2.setByte("Slot", (byte) (k + 150));
((ItemStack) this.offHandInventory.get(k)).writeToNBT(nbttagcompound2);
nbtTagListIn.appendTag(nbttagcompound2);
}
}
this.dirty = false;
return nbtTagListIn;
}
/**
* Reads from the given tag list and fills the slots in the inventory with the correct items.
*/
public void readFromNBT(NBTTagList nbtTagListIn) {
this.mainInventory.clear();
this.armorInventory.clear();
this.offHandInventory.clear();
for (int i = 0; i < nbtTagListIn.tagCount(); ++i) {
NBTTagCompound nbttagcompound = nbtTagListIn.getCompoundTagAt(i);
int j = nbttagcompound.getByte("Slot") & 255;
ItemStack itemstack = new ItemStack(nbttagcompound);
if (!itemstack.isEmpty()) {
if (j >= 0 && j < this.mainInventory.size()) {
this.mainInventory.set(j, itemstack);
} else if (j >= 100 && j < this.armorInventory.size() + 100) {
this.armorInventory.set(j - 100, itemstack);
} else if (j >= 150 && j < this.offHandInventory.size() + 150) {
this.offHandInventory.set(j - 150, itemstack);
}
}
}
}
/**
* Returns the number of slots in the inventory.
*/
@Override
public int getSizeInventory() {
return this.mainInventory.size() + this.armorInventory.size() + this.offHandInventory.size();
}
@Override
public boolean isEmpty() {
for (ItemStack itemstack : this.mainInventory) {
if (!itemstack.isEmpty()) {
return false;
}
}
for (ItemStack itemstack1 : this.armorInventory) {
if (!itemstack1.isEmpty()) {
return false;
}
}
for (ItemStack itemstack2 : this.offHandInventory) {
if (!itemstack2.isEmpty()) {
return false;
}
}
return true;
}
/**
* Returns the stack in the given slot.
*/
@Override
public ItemStack getStackInSlot(int index) {
List<ItemStack> list = null;
for (NonNullList<ItemStack> nonnulllist : this.allInventories) {
if (index < nonnulllist.size()) {
list = nonnulllist;
break;
}
index -= nonnulllist.size();
}
return list == null ? ItemStack.EMPTY : list.get(index);
}
/**
* Get the name of this object. For players this returns their username
*/
@Override
public String getName() {
return "container.inventory";
}
/**
* Returns true if this thing is named
*/
@Override
public boolean hasCustomName() {
return false;
}
/**
* Get the formatted ChatComponent that will be used for the sender's username in chat
*/
@Override
public ITextComponent getDisplayName() {
return this.hasCustomName() ? new TextComponentString(this.getName()) : new TextComponentTranslation(this.getName(), new Object[0]);
}
/**
* Returns the maximum stack size for a inventory slot. Seems to always be 64, possibly will be extended.
*/
@Override
public int getInventoryStackLimit() {
return 64;
}
/**
* For tile entities, ensures the chunk containing the tile entity is saved to disk later - the game won't think it
* hasn't changed and skip it.
*/
@Override
public void markDirty() {
this.dirty = true;
this.player.markDirty();
}
/**
* Don't rename this method to canInteractWith due to conflicts with Container
*/
@Override
public boolean isUsableByPlayer(EntityPlayer player) {
return true;
}
@Override
public void openInventory(EntityPlayer player) {
}
@Override
public void closeInventory(EntityPlayer player) {
}
/**
* Returns true if automation is allowed to insert the given stack (ignoring stack size) into the given slot. For
* guis use Slot.isItemValid
*/
@Override
public boolean isItemValidForSlot(int index, ItemStack stack) {
return true;
}
@Override
public int getField(int id) {
return 0;
}
@Override
public void setField(int id, int value) {
}
@Override
public int getFieldCount() {
return 0;
}
@Override
public void clear() {
for (List<ItemStack> list : this.allInventories) {
list.clear();
}
}
}
| |
package io.github.sislivros.persistencia;
import io.github.sislivros.entidades.Usuario;
import io.github.sislivros.enums.TipoUsuario;
import io.github.sislivros.utils.ImagemUtils;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.imageio.ImageIO;
import javax.servlet.http.Part;
import org.apache.commons.codec.digest.DigestUtils;
/**
*
* @author victor
*/
public class UsuarioBdDao extends GenericBancoDadosDao<Usuario, String>{
public boolean salvarFoto(Usuario usuario, InputStream foto){
try{
BufferedImage bf = ImageIO.read(foto);
String path = getClass().getResource("/img/usuario/perfil").getPath();
path+= "/"+usuario.getEmail()+".jpg";
path = path.replaceAll("%20", " ");
File f = new File(path);
if(!f.exists())
f.mkdirs();
FileOutputStream fos = new FileOutputStream(f);
fos.write(ImagemUtils.getByteImage(bf));
path = path.substring(path.indexOf("img/"));
usuario.setFoto(path);
alterar(usuario);
}catch(IOException ex){
ex.printStackTrace();
}
return true;
}
@Override
public boolean salvar(Usuario usuario){
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "INSERT INTO usuario VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)";
PreparedStatement ps = getConnection().prepareStatement(sql);
Date dataNascimento = null;
if(usuario.getDataNascimento() != null){
dataNascimento = new Date(usuario.getDataNascimento().getTime());
}
ps.setString(1, usuario.getEmail());
ps.setString(2, DigestUtils.sha1Hex(usuario.getSenha()));
ps.setString(3, usuario.getApelido());
ps.setDate(4, dataNascimento);
ps.setString(5, usuario.getCidade());
ps.setString(6, usuario.getEstado());
ps.setString(7, usuario.getNome());
ps.setInt(8, usuario.getTipoUsuario().id);
ps.setString(9, usuario.getFoto());
ps.executeUpdate();
return true;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return false;
}
}
@Override
public boolean apagar(Usuario usuario) {
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "DELETE FROM usuario WHERE email = ?";
PreparedStatement ps = getConnection().prepareStatement(sql);
ps.setString(1, usuario.getEmail());
ps.executeUpdate();
return true;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return false;
}
}
@Override
public boolean alterar(Usuario usuario) {
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "UPDATE usuario SET apelido = ?, data_nascimento = ?, cidade = ?"+
", estado = ?, nome = ?, foto = ?, id_tipo_usuario = ? WHERE email = ?";
PreparedStatement ps = getConnection().prepareStatement(sql);
Date dataNascimento = null;
try{
dataNascimento = new Date(usuario.getDataNascimento().getTime());
}catch(NullPointerException ex){}
ps.setString(1, usuario.getApelido());
ps.setDate(2, dataNascimento);
ps.setString(3, usuario.getCidade());
ps.setString(4, usuario.getEstado());
ps.setString(5, usuario.getNome());
ps.setString(6, usuario.getFoto());
ps.setInt(7, usuario.getTipoUsuario().id);
ps.setString(8, usuario.getEmail());
ps.executeUpdate();
return true;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return false;
}
}
@Override
public Usuario buscarId(String id) {
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "SELECT * FROM usuario WHERE email = ?";
PreparedStatement ps = getConnection().prepareStatement(sql);
ps.setString(1, id);
ResultSet rs = ps.executeQuery();
rs.next();
Usuario usuario = new Usuario();
usuario.setApelido(rs.getString("apelido"));
usuario.setDataNascimento(rs.getDate("data_nascimento"));
usuario.setCidade(rs.getString("cidade"));
usuario.setEstado(rs.getString("estado"));
usuario.setEmail(rs.getString("email"));
usuario.setFoto(rs.getString("foto"));
usuario.setNome(rs.getString("nome"));
usuario.setSenha(rs.getString("senha"));
for(TipoUsuario t : TipoUsuario.values()){
if(t.id == rs.getInt("id_tipo_usuario")){
usuario.setTipoUsuario(t);
break;
}
}
return usuario;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return null;
}
}
public Usuario login(String email, String senha){
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "SELECT * FROM usuario WHERE email = ? AND senha = ?";
PreparedStatement ps = getConnection().prepareStatement(sql);
ps.setString(1, email);
ps.setString(2, senha);
ResultSet rs = ps.executeQuery();
rs.next();
Usuario usuario = new Usuario();
usuario.setApelido(rs.getString("apelido"));
usuario.setDataNascimento(rs.getDate("data_nascimento"));
usuario.setCidade(rs.getString("cidade"));
usuario.setEstado(rs.getString("estado"));
usuario.setEmail(rs.getString("email"));
usuario.setFoto(rs.getString("foto"));
usuario.setNome(rs.getString("nome"));
usuario.setSenha(rs.getString("senha"));
for(TipoUsuario t : TipoUsuario.values()){
if(t.id == rs.getInt("id_tipo_usuario")){
usuario.setTipoUsuario(t);
break;
}
}
return usuario;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return null;
}
}
@Override
public List<Usuario> buscarAtributos(Map<String, String> map) {
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
StringBuilder sql = new StringBuilder("SELECT * FROM usuario WHERE ");
Set<String> keys = map.keySet();
Iterator<String> it = keys.iterator();
String key;
while(it.hasNext()){
key = it.next();
sql.append(key);
sql.append(" = ");
sql.append("'").append(map.get(key)).append("'");
if(it.hasNext())
sql.append(" AND ");
}
PreparedStatement ps = getConnection().prepareStatement(sql.toString());
ResultSet rs = ps.executeQuery();
List<Usuario> usuarios = new ArrayList<>();
while(rs.next()){
Usuario usuario = new Usuario();
usuario.setApelido(rs.getString("apelido"));
usuario.setDataNascimento(rs.getDate("data_nascimento"));
usuario.setCidade(rs.getString("cidade"));
usuario.setEstado(rs.getString("estado"));
usuario.setEmail(rs.getString("email"));
usuario.setFoto(rs.getString("foto"));
usuario.setNome(rs.getString("nome"));
usuario.setSenha(rs.getString("senha"));
for(TipoUsuario t : TipoUsuario.values()){
if(t.id == rs.getInt("id_tipo_usuario")){
usuario.setTipoUsuario(t);
break;
}
}
usuarios.add(usuario);
}
return usuarios;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return null;
}
}
@Override
public List<Usuario> listarTodos() {
try {
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "SELECT * FROM usuario";
PreparedStatement ps = getConnection().prepareStatement(sql.toString());
ResultSet rs = ps.executeQuery();
List<Usuario> usuarios = new ArrayList<>();
while(rs.next()){
Usuario usuario = new Usuario();
usuario.setApelido(rs.getString("apelido"));
usuario.setDataNascimento(rs.getDate("data_nascimento"));
usuario.setCidade(rs.getString("cidade"));
usuario.setEstado(rs.getString("estado"));
usuario.setEmail(rs.getString("email"));
usuario.setFoto(rs.getString("foto"));
usuario.setNome(rs.getString("nome"));
usuario.setSenha(rs.getString("senha"));
for(TipoUsuario t : TipoUsuario.values()){
if(t.id == rs.getInt("id_tipo_usuario")){
usuario.setTipoUsuario(t);
break;
}
}
usuarios.add(usuario);
}
return usuarios;
} catch (SQLException | URISyntaxException | IOException | ClassNotFoundException ex) {
ex.printStackTrace();
return null;
}
}
public String getFotoFromEmail(String email){
try{
if(getConnection() == null || getConnection().isClosed()){
conectar();
}
String sql = "SELECT foto FROM usuario WHERE email = ?";
PreparedStatement ps = getConnection().prepareStatement(sql);
ps.setString(1, email);
ResultSet rs = ps.executeQuery();
rs.next();
return rs.getString("foto");
}catch(ClassNotFoundException | IOException | SQLException | URISyntaxException ex){
ex.printStackTrace();
return null;
}
}
}
| |
package nxt.peer;
import nxt.Account;
import nxt.BlockchainProcessor;
import nxt.Constants;
import nxt.Nxt;
import nxt.NxtException;
import nxt.util.Convert;
import nxt.util.CountingInputStream;
import nxt.util.CountingOutputStream;
import nxt.util.Logger;
import org.json.simple.JSONObject;
import org.json.simple.JSONStreamAware;
import org.json.simple.JSONValue;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.zip.GZIPInputStream;
final class PeerImpl implements Peer {
private final String peerAddress;
private volatile String announcedAddress;
private volatile int port;
private volatile boolean shareAddress;
private volatile Hallmark hallmark;
private volatile String platform;
private volatile String application;
private volatile String version;
private volatile boolean isOldVersion;
private volatile long adjustedWeight;
private volatile long blacklistingTime;
private volatile State state;
private volatile long downloadedVolume;
private volatile long uploadedVolume;
private volatile int lastUpdated;
private volatile long hallmarkBalance = -1;
private volatile int hallmarkBalanceHeight;
PeerImpl(String peerAddress, String announcedAddress) {
this.peerAddress = peerAddress;
this.announcedAddress = announcedAddress;
try {
this.port = new URL("http://" + announcedAddress).getPort();
} catch (MalformedURLException ignore) {}
this.state = State.NON_CONNECTED;
this.shareAddress = true;
}
@Override
public String getPeerAddress() {
return peerAddress;
}
@Override
public State getState() {
return state;
}
void setState(State state) {
if (this.state == state) {
return;
}
if (this.state == State.NON_CONNECTED) {
this.state = state;
Peers.notifyListeners(this, Peers.Event.ADDED_ACTIVE_PEER);
} else if (state != State.NON_CONNECTED) {
this.state = state;
Peers.notifyListeners(this, Peers.Event.CHANGED_ACTIVE_PEER);
}
}
@Override
public long getDownloadedVolume() {
return downloadedVolume;
}
void updateDownloadedVolume(long volume) {
synchronized (this) {
downloadedVolume += volume;
}
Peers.notifyListeners(this, Peers.Event.DOWNLOADED_VOLUME);
}
@Override
public long getUploadedVolume() {
return uploadedVolume;
}
void updateUploadedVolume(long volume) {
synchronized (this) {
uploadedVolume += volume;
}
Peers.notifyListeners(this, Peers.Event.UPLOADED_VOLUME);
}
@Override
public String getVersion() {
return version;
}
void setVersion(String version) {
this.version = version;
isOldVersion = false;
if (Nxt.APPLICATION.equals(application) && version != null) {
String[] versions = version.split("\\.");
if (versions.length < Constants.MIN_VERSION.length) {
isOldVersion = true;
} else {
for (int i = 0; i < Constants.MIN_VERSION.length; i++) {
try {
int v = Integer.parseInt(versions[i]);
if (v > Constants.MIN_VERSION[i]) {
isOldVersion = false;
break;
} else if (v < Constants.MIN_VERSION[i]) {
isOldVersion = true;
break;
}
} catch (NumberFormatException e) {
isOldVersion = true;
break;
}
}
}
if (isOldVersion) {
// Logger.logDebugMessage("Blacklisting %s version %s", peerAddress, version);
}
}
}
@Override
public String getApplication() {
return application;
}
void setApplication(String application) {
this.application = application;
}
@Override
public String getPlatform() {
return platform;
}
void setPlatform(String platform) {
this.platform = platform;
}
@Override
public String getSoftware() {
return Convert.truncate(application, "?", 10, false)
+ " (" + Convert.truncate(version, "?", 10, false) + ")"
+ " @ " + Convert.truncate(platform, "?", 10, false);
}
@Override
public boolean shareAddress() {
return shareAddress;
}
void setShareAddress(boolean shareAddress) {
this.shareAddress = shareAddress;
}
@Override
public String getAnnouncedAddress() {
return announcedAddress;
}
void setAnnouncedAddress(String announcedAddress) {
String announcedPeerAddress = Peers.normalizeHostAndPort(announcedAddress);
if (announcedPeerAddress != null) {
this.announcedAddress = announcedPeerAddress;
try {
this.port = new URL("http://" + announcedPeerAddress).getPort();
} catch (MalformedURLException ignore) {}
}
}
int getPort() {
return port;
}
@Override
public boolean isWellKnown() {
return announcedAddress != null && Peers.wellKnownPeers.contains(announcedAddress);
}
@Override
public boolean isRebroadcastTarget() {
return announcedAddress != null && Peers.rebroadcastPeers.contains(announcedAddress);
}
@Override
public Hallmark getHallmark() {
return hallmark;
}
@Override
public int getWeight() {
if (hallmark == null) {
return 0;
}
if (hallmarkBalance == -1 || hallmarkBalanceHeight < Nxt.getBlockchain().getHeight() - 60) {
long accountId = hallmark.getAccountId();
Account account = Account.getAccount(accountId);
hallmarkBalance = account == null ? 0 : account.getBalanceNQT();
hallmarkBalanceHeight = Nxt.getBlockchain().getHeight();
}
return (int)(adjustedWeight * (hallmarkBalance / Constants.ONE_NXT) / Constants.MAX_BALANCE_NXT);
}
@Override
public boolean isBlacklisted() {
return blacklistingTime > 0 || isOldVersion || Peers.knownBlacklistedPeers.contains(peerAddress);
}
@Override
public void blacklist(Exception cause) {
if (cause instanceof NxtException.NotCurrentlyValidException || cause instanceof BlockchainProcessor.BlockOutOfOrderException
|| cause instanceof SQLException || cause.getCause() instanceof SQLException) {
// don't blacklist peers just because a feature is not yet enabled, or because of database timeouts
// prevents erroneous blacklisting during loading of blockchain from scratch
return;
}
if (! isBlacklisted() && ! (cause instanceof IOException)) {
Logger.logDebugMessage("Blacklisting " + peerAddress + " because of: " + cause.toString(), cause);
}
blacklist();
}
@Override
public void blacklist() {
blacklistingTime = System.currentTimeMillis();
setState(State.NON_CONNECTED);
Peers.notifyListeners(this, Peers.Event.BLACKLIST);
}
@Override
public void unBlacklist() {
setState(State.NON_CONNECTED);
blacklistingTime = 0;
Peers.notifyListeners(this, Peers.Event.UNBLACKLIST);
}
void updateBlacklistedStatus(long curTime) {
if (blacklistingTime > 0 && blacklistingTime + Peers.blacklistingPeriod <= curTime) {
unBlacklist();
}
}
@Override
public void deactivate() {
setState(State.NON_CONNECTED);
Peers.notifyListeners(this, Peers.Event.DEACTIVATE);
}
@Override
public void remove() {
Peers.removePeer(this);
Peers.notifyListeners(this, Peers.Event.REMOVE);
}
@Override
public int getLastUpdated() {
return lastUpdated;
}
void setLastUpdated(int lastUpdated) {
this.lastUpdated = lastUpdated;
}
@Override
public JSONObject send(final JSONStreamAware request) {
JSONObject response;
String log = null;
boolean showLog = false;
HttpURLConnection connection = null;
try {
String address = announcedAddress != null ? announcedAddress : peerAddress;
StringBuilder buf = new StringBuilder("http://");
buf.append(address);
if (port <= 0) {
buf.append(':');
buf.append(Constants.isTestnet ? Peers.TESTNET_PEER_PORT : Peers.DEFAULT_PEER_PORT);
}
buf.append("/burst");
URL url = new URL(buf.toString());
if (Peers.communicationLoggingMask != 0) {
StringWriter stringWriter = new StringWriter();
request.writeJSONString(stringWriter);
log = "\"" + url.toString() + "\": " + stringWriter.toString();
}
connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setConnectTimeout(Peers.connectTimeout);
connection.setReadTimeout(Peers.readTimeout);
connection.setRequestProperty("Accept-Encoding", "gzip");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
CountingOutputStream cos = new CountingOutputStream(connection.getOutputStream());
try (Writer writer = new BufferedWriter(new OutputStreamWriter(cos, "UTF-8"))) {
request.writeJSONString(writer);
}
updateUploadedVolume(cos.getCount());
if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) {
CountingInputStream cis = new CountingInputStream(connection.getInputStream());
InputStream responseStream = cis;
if ("gzip".equals(connection.getHeaderField("Content-Encoding"))) {
responseStream = new GZIPInputStream(cis);
}
if ((Peers.communicationLoggingMask & Peers.LOGGING_MASK_200_RESPONSES) != 0) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int numberOfBytes;
try (InputStream inputStream = responseStream) {
while ((numberOfBytes = inputStream.read(buffer, 0, buffer.length)) > 0) {
byteArrayOutputStream.write(buffer, 0, numberOfBytes);
}
}
String responseValue = byteArrayOutputStream.toString("UTF-8");
if (responseValue.length() > 0 && responseStream instanceof GZIPInputStream) {
log += String.format("[length: %d, compression ratio: %.2f]", cis.getCount(), (double)cis.getCount() / (double)responseValue.length());
}
log += " >>> " + responseValue;
showLog = true;
response = (JSONObject) JSONValue.parse(responseValue);
} else {
try (Reader reader = new BufferedReader(new InputStreamReader(responseStream, "UTF-8"))) {
response = (JSONObject)JSONValue.parse(reader);
}
}
updateDownloadedVolume(cis.getCount());
} else {
if ((Peers.communicationLoggingMask & Peers.LOGGING_MASK_NON200_RESPONSES) != 0) {
log += " >>> Peer responded with HTTP " + connection.getResponseCode() + " code!";
showLog = true;
}
if (state == State.CONNECTED) {
setState(State.DISCONNECTED);
} else {
setState(State.NON_CONNECTED);
}
response = null;
}
} catch (RuntimeException|IOException e) {
if (! (e instanceof UnknownHostException || e instanceof SocketTimeoutException || e instanceof SocketException)) {
Logger.logDebugMessage("Error sending JSON request", e);
}
if ((Peers.communicationLoggingMask & Peers.LOGGING_MASK_EXCEPTIONS) != 0) {
log += " >>> " + e.toString();
showLog = true;
}
if (state == State.CONNECTED) {
setState(State.DISCONNECTED);
}
response = null;
}
if (showLog) {
Logger.logMessage(log + "\n");
}
if (connection != null) {
connection.disconnect();
}
return response;
}
@Override
public int compareTo(Peer o) {
if (getWeight() > o.getWeight()) {
return -1;
} else if (getWeight() < o.getWeight()) {
return 1;
}
return 0;
}
void connect() {
JSONObject response = send(Peers.myPeerInfoRequest);
if (response != null) {
application = (String)response.get("application");
setVersion((String) response.get("version"));
platform = (String)response.get("platform");
shareAddress = Boolean.TRUE.equals(response.get("shareAddress"));
String newAnnouncedAddress = Convert.emptyToNull((String)response.get("announcedAddress"));
if (newAnnouncedAddress != null && ! newAnnouncedAddress.equals(announcedAddress)) {
// force verification of changed announced address
setState(Peer.State.NON_CONNECTED);
setAnnouncedAddress(newAnnouncedAddress);
return;
}
if (announcedAddress == null) {
setAnnouncedAddress(peerAddress);
//Logger.logDebugMessage("Connected to peer without announced address, setting to " + peerAddress);
}
if (analyzeHallmark(announcedAddress, (String)response.get("hallmark"))) {
setState(State.CONNECTED);
Peers.updateAddress(this);
} else {
blacklist();
}
lastUpdated = Nxt.getEpochTime();
} else {
setState(State.NON_CONNECTED);
}
}
boolean analyzeHallmark(String address, final String hallmarkString) {
if (hallmarkString == null && this.hallmark == null) {
return true;
}
if (this.hallmark != null && this.hallmark.getHallmarkString().equals(hallmarkString)) {
return true;
}
if (hallmarkString == null) {
this.hallmark = null;
return true;
}
try {
URI uri = new URI("http://" + address.trim());
String host = uri.getHost();
Hallmark hallmark = Hallmark.parseHallmark(hallmarkString);
if (!hallmark.isValid()
|| !(hallmark.getHost().equals(host) || InetAddress.getByName(host).equals(InetAddress.getByName(hallmark.getHost())))) {
//Logger.logDebugMessage("Invalid hallmark for " + host + ", hallmark host is " + hallmark.getHost());
return false;
}
this.hallmark = hallmark;
long accountId = Account.getId(hallmark.getPublicKey());
List<PeerImpl> groupedPeers = new ArrayList<>();
int mostRecentDate = 0;
long totalWeight = 0;
for (PeerImpl peer : Peers.allPeers) {
if (peer.hallmark == null) {
continue;
}
if (accountId == peer.hallmark.getAccountId()) {
groupedPeers.add(peer);
if (peer.hallmark.getDate() > mostRecentDate) {
mostRecentDate = peer.hallmark.getDate();
totalWeight = peer.getHallmarkWeight(mostRecentDate);
} else {
totalWeight += peer.getHallmarkWeight(mostRecentDate);
}
}
}
for (PeerImpl peer : groupedPeers) {
peer.adjustedWeight = Constants.MAX_BALANCE_NXT * peer.getHallmarkWeight(mostRecentDate) / totalWeight;
Peers.notifyListeners(peer, Peers.Event.WEIGHT);
}
return true;
} catch (UnknownHostException ignore) {
} catch (URISyntaxException | RuntimeException e) {
Logger.logDebugMessage("Failed to analyze hallmark for peer " + address + ", " + e.toString(), e);
}
return false;
}
private int getHallmarkWeight(int date) {
if (hallmark == null || ! hallmark.isValid() || hallmark.getDate() != date) {
return 0;
}
return hallmark.getWeight();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
package benchpbnano;
@SuppressWarnings("hiding")
public interface Bench {
// enum Enum
public static final int Apples = 0;
public static final int Pears = 1;
public static final int Bananas = 2;
public static final class Foo extends
com.google.protobuf.nano.MessageNano {
private static volatile Foo[] _emptyArray;
public static Foo[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new Foo[0];
}
}
}
return _emptyArray;
}
// required uint64 id = 1;
public long id;
// required int32 count = 2;
public int count;
// required int32 prefix = 3;
public int prefix;
// required uint32 length = 4;
public int length;
public Foo() {
clear();
}
public Foo clear() {
id = 0L;
count = 0;
prefix = 0;
length = 0;
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
output.writeUInt64(1, this.id);
output.writeInt32(2, this.count);
output.writeInt32(3, this.prefix);
output.writeUInt32(4, this.length);
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeUInt64Size(1, this.id);
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32Size(2, this.count);
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32Size(3, this.prefix);
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeUInt32Size(4, this.length);
return size;
}
@Override
public Foo mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 8: {
this.id = input.readUInt64();
break;
}
case 16: {
this.count = input.readInt32();
break;
}
case 24: {
this.prefix = input.readInt32();
break;
}
case 32: {
this.length = input.readUInt32();
break;
}
}
}
}
public static Foo parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new Foo(), data);
}
public static Foo parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new Foo().mergeFrom(input);
}
}
public static final class Bar extends
com.google.protobuf.nano.MessageNano {
private static volatile Bar[] _emptyArray;
public static Bar[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new Bar[0];
}
}
}
return _emptyArray;
}
// required .benchpbnano.Foo parent = 1;
public benchpbnano.Bench.Foo parent;
// required int32 time = 2;
public int time;
// required float ratio = 3;
public float ratio;
// required uint32 size = 4;
public int size;
public Bar() {
clear();
}
public Bar clear() {
parent = null;
time = 0;
ratio = 0F;
size = 0;
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
if (this.parent != null) {
output.writeMessage(1, this.parent);
}
output.writeInt32(2, this.time);
output.writeFloat(3, this.ratio);
output.writeUInt32(4, this.size);
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
if (this.parent != null) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeMessageSize(1, this.parent);
}
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32Size(2, this.time);
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeFloatSize(3, this.ratio);
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeUInt32Size(4, this.size);
return size;
}
@Override
public Bar mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 10: {
if (this.parent == null) {
this.parent = new benchpbnano.Bench.Foo();
}
input.readMessage(this.parent);
break;
}
case 16: {
this.time = input.readInt32();
break;
}
case 29: {
this.ratio = input.readFloat();
break;
}
case 32: {
this.size = input.readUInt32();
break;
}
}
}
}
public static Bar parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new Bar(), data);
}
public static Bar parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new Bar().mergeFrom(input);
}
}
public static final class FooBar extends
com.google.protobuf.nano.MessageNano {
private static volatile FooBar[] _emptyArray;
public static FooBar[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new FooBar[0];
}
}
}
return _emptyArray;
}
// optional .benchpbnano.Bar sibling = 1;
public benchpbnano.Bench.Bar sibling;
// optional string name = 2;
public java.lang.String name;
// optional double rating = 3;
public double rating;
// optional uint32 postfix = 4;
public int postfix;
public FooBar() {
clear();
}
public FooBar clear() {
sibling = null;
name = "";
rating = 0D;
postfix = 0;
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
if (this.sibling != null) {
output.writeMessage(1, this.sibling);
}
if (!this.name.equals("")) {
output.writeString(2, this.name);
}
if (java.lang.Double.doubleToLongBits(this.rating)
!= java.lang.Double.doubleToLongBits(0D)) {
output.writeDouble(3, this.rating);
}
if (this.postfix != 0) {
output.writeUInt32(4, this.postfix);
}
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
if (this.sibling != null) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeMessageSize(1, this.sibling);
}
if (!this.name.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(2, this.name);
}
if (java.lang.Double.doubleToLongBits(this.rating)
!= java.lang.Double.doubleToLongBits(0D)) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeDoubleSize(3, this.rating);
}
if (this.postfix != 0) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeUInt32Size(4, this.postfix);
}
return size;
}
@Override
public FooBar mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 10: {
if (this.sibling == null) {
this.sibling = new benchpbnano.Bench.Bar();
}
input.readMessage(this.sibling);
break;
}
case 18: {
this.name = input.readString();
break;
}
case 25: {
this.rating = input.readDouble();
break;
}
case 32: {
this.postfix = input.readUInt32();
break;
}
}
}
}
public static FooBar parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new FooBar(), data);
}
public static FooBar parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new FooBar().mergeFrom(input);
}
}
public static final class FooBarContainer extends
com.google.protobuf.nano.MessageNano {
private static volatile FooBarContainer[] _emptyArray;
public static FooBarContainer[] emptyArray() {
// Lazily initializes the empty array
if (_emptyArray == null) {
synchronized (
com.google.protobuf.nano.InternalNano.LAZY_INIT_LOCK) {
if (_emptyArray == null) {
_emptyArray = new FooBarContainer[0];
}
}
}
return _emptyArray;
}
// repeated .benchpbnano.FooBar list = 1;
public benchpbnano.Bench.FooBar[] list;
// optional bool initialized = 2;
public boolean initialized;
// optional .benchpbnano.Enum fruit = 3;
public int fruit;
// optional string location = 4;
public java.lang.String location;
public FooBarContainer() {
clear();
}
public FooBarContainer clear() {
list = benchpbnano.Bench.FooBar.emptyArray();
initialized = false;
fruit = benchpbnano.Bench.Apples;
location = "";
cachedSize = -1;
return this;
}
@Override
public void writeTo(com.google.protobuf.nano.CodedOutputByteBufferNano output)
throws java.io.IOException {
if (this.list != null && this.list.length > 0) {
for (int i = 0; i < this.list.length; i++) {
benchpbnano.Bench.FooBar element = this.list[i];
if (element != null) {
output.writeMessage(1, element);
}
}
}
if (this.initialized != false) {
output.writeBool(2, this.initialized);
}
if (this.fruit != benchpbnano.Bench.Apples) {
output.writeInt32(3, this.fruit);
}
if (!this.location.equals("")) {
output.writeString(4, this.location);
}
super.writeTo(output);
}
@Override
protected int computeSerializedSize() {
int size = super.computeSerializedSize();
if (this.list != null && this.list.length > 0) {
for (int i = 0; i < this.list.length; i++) {
benchpbnano.Bench.FooBar element = this.list[i];
if (element != null) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeMessageSize(1, element);
}
}
}
if (this.initialized != false) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeBoolSize(2, this.initialized);
}
if (this.fruit != benchpbnano.Bench.Apples) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeInt32Size(3, this.fruit);
}
if (!this.location.equals("")) {
size += com.google.protobuf.nano.CodedOutputByteBufferNano
.computeStringSize(4, this.location);
}
return size;
}
@Override
public FooBarContainer mergeFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
return this;
default: {
if (!com.google.protobuf.nano.WireFormatNano.parseUnknownField(input, tag)) {
return this;
}
break;
}
case 10: {
int arrayLength = com.google.protobuf.nano.WireFormatNano
.getRepeatedFieldArrayLength(input, 10);
int i = this.list == null ? 0 : this.list.length;
benchpbnano.Bench.FooBar[] newArray =
new benchpbnano.Bench.FooBar[i + arrayLength];
if (i != 0) {
java.lang.System.arraycopy(this.list, 0, newArray, 0, i);
}
for (; i < newArray.length - 1; i++) {
newArray[i] = new benchpbnano.Bench.FooBar();
input.readMessage(newArray[i]);
input.readTag();
}
// Last one without readTag.
newArray[i] = new benchpbnano.Bench.FooBar();
input.readMessage(newArray[i]);
this.list = newArray;
break;
}
case 16: {
this.initialized = input.readBool();
break;
}
case 24: {
int value = input.readInt32();
switch (value) {
case benchpbnano.Bench.Apples:
case benchpbnano.Bench.Pears:
case benchpbnano.Bench.Bananas:
this.fruit = value;
break;
}
break;
}
case 34: {
this.location = input.readString();
break;
}
}
}
}
public static FooBarContainer parseFrom(byte[] data)
throws com.google.protobuf.nano.InvalidProtocolBufferNanoException {
return com.google.protobuf.nano.MessageNano.mergeFrom(new FooBarContainer(), data);
}
public static FooBarContainer parseFrom(
com.google.protobuf.nano.CodedInputByteBufferNano input)
throws java.io.IOException {
return new FooBarContainer().mergeFrom(input);
}
}
}
| |
package org.robolectric.android.controller;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.robolectric.Shadows.shadowOf;
import static org.robolectric.util.TestUtil.assertStringsInclude;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.TestRunners;
import org.robolectric.annotation.Config;
import org.robolectric.android.runtime.RuntimeAdapter;
import org.robolectric.android.runtime.RuntimeAdapterFactory;
import org.robolectric.shadows.CoreShadowsAdapter;
import org.robolectric.shadows.ShadowLooper;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.view.Window;
import android.widget.LinearLayout;
import org.robolectric.util.ReflectionHelpers;
import org.robolectric.util.Scheduler;
import org.robolectric.util.TestRunnable;
import java.util.ArrayList;
import java.util.List;
@RunWith(TestRunners.SelfTest.class)
public class ActivityControllerTest {
private static final List<String> transcript = new ArrayList<>();
private final ComponentName componentName = new ComponentName("org.robolectric", MyActivity.class.getName());
private final ActivityController<MyActivity> controller = Robolectric.buildActivity(MyActivity.class);
@Before
public void setUp() throws Exception {
transcript.clear();
}
@Test
@Config(manifest = Config.NONE)
public void canCreateActivityNotListedInManifest() {
ActivityController<Activity> activityController = Robolectric.buildActivity(Activity.class);
assertThat(activityController.setup()).isNotNull();
}
public static class TestDelayedPostActivity extends Activity {
TestRunnable r1 = new TestRunnable();
TestRunnable r2 = new TestRunnable();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Handler h = new Handler();
h.post(r1);
h.postDelayed(r2, 60000);
}
}
@Test
public void pendingTasks_areRunEagerly_whenActivityIsStarted_andSchedulerUnPaused() {
final Scheduler s = Robolectric.getForegroundThreadScheduler();
final long startTime = s.getCurrentTime();
TestDelayedPostActivity activity = Robolectric.setupActivity(TestDelayedPostActivity.class);
assertThat(activity.r1.wasRun).as("immediate task").isTrue();
assertThat(s.getCurrentTime()).as("currentTime").isEqualTo(startTime);
}
@Test
public void delayedTasks_areNotRunEagerly_whenActivityIsStarted_andSchedulerUnPaused() {
// Regression test for issue #1509
final Scheduler s = Robolectric.getForegroundThreadScheduler();
final long startTime = s.getCurrentTime();
TestDelayedPostActivity activity = Robolectric.setupActivity(TestDelayedPostActivity.class);
assertThat(activity.r2.wasRun).as("before flush").isFalse();
assertThat(s.getCurrentTime()).as("currentTime before flush").isEqualTo(startTime);
s.advanceToLastPostedRunnable();
assertThat(activity.r2.wasRun).as("after flush").isTrue();
assertThat(s.getCurrentTime()).as("currentTime after flush").isEqualTo(startTime + 60000);
}
@Test
public void shouldSetIntent() throws Exception {
MyActivity myActivity = controller.create().get();
assertThat(myActivity.getIntent()).isNotNull();
assertThat(myActivity.getIntent().getComponent()).isEqualTo(componentName);
}
@Test
public void shouldSetIntentComponentWithCustomIntentWithoutComponentSet() throws Exception {
MyActivity myActivity = Robolectric.buildActivity(MyActivity.class, new Intent(Intent.ACTION_VIEW)).create().get();
assertThat(myActivity.getIntent().getAction()).isEqualTo(Intent.ACTION_VIEW);
assertThat(myActivity.getIntent().getComponent()).isEqualTo(componentName);
}
@Test
public void shouldSetIntentForGivenActivityInstance() throws Exception {
ActivityController<MyActivity> activityController = ActivityController.of(new CoreShadowsAdapter(), new MyActivity()).create();
assertThat(activityController.get().getIntent()).isNotNull();
}
@Test
public void whenLooperIsNotPaused_shouldCreateWithMainLooperPaused() throws Exception {
ShadowLooper.unPauseMainLooper();
controller.create();
assertThat(shadowOf(Looper.getMainLooper()).isPaused()).isFalse();
assertStringsInclude(transcript, "finishedOnCreate", "onCreate");
}
@Test
public void whenLooperIsAlreadyPaused_shouldCreateWithMainLooperPaused() throws Exception {
ShadowLooper.pauseMainLooper();
controller.create();
assertThat(shadowOf(Looper.getMainLooper()).isPaused()).isTrue();
assertStringsInclude(transcript, "finishedOnCreate");
ShadowLooper.unPauseMainLooper();
assertStringsInclude(transcript, "onCreate");
}
@Test
public void visible_addsTheDecorViewToTheWindowManager() {
controller.create().visible();
assertEquals(controller.get().getWindow().getDecorView().getParent().getClass().getName(), "android.view.ViewRootImpl");
}
@Test
public void start_callsPerformStartWhilePaused() {
controller.create().start();
assertStringsInclude(transcript, "finishedOnStart", "onStart");
}
@Test
public void stop_callsPerformStopWhilePaused() {
controller.create().start().stop();
assertStringsInclude(transcript, "finishedOnStop", "onStop");
}
@Test
public void restart_callsPerformRestartWhilePaused() {
controller.create().start().stop().restart();
assertStringsInclude(transcript, "finishedOnRestart", "onRestart");
}
@Test
public void pause_callsPerformPauseWhilePaused() {
controller.create().pause();
assertStringsInclude(transcript, "finishedOnPause", "onPause");
}
@Test
public void resume_callsPerformResumeWhilePaused() {
controller.create().start().resume();
assertStringsInclude(transcript, "finishedOnResume", "onResume");
}
@Test
public void destroy_callsPerformDestroyWhilePaused() {
controller.create().destroy();
assertStringsInclude(transcript, "finishedOnDestroy", "onDestroy");
}
@Test
public void postCreate_callsOnPostCreateWhilePaused() {
controller.create().postCreate(new Bundle());
assertStringsInclude(transcript, "finishedOnPostCreate", "onPostCreate");
}
@Test
public void postResume_callsOnPostResumeWhilePaused() {
controller.create().postResume();
assertStringsInclude(transcript, "finishedOnPostResume", "onPostResume");
}
@Test
public void restoreInstanceState_callsPerformRestoreInstanceStateWhilePaused() {
controller.create().restoreInstanceState(new Bundle());
assertStringsInclude(transcript, "finishedOnRestoreInstanceState", "onRestoreInstanceState");
}
@Test
public void newIntent_callsOnNewIntentWhilePaused() {
controller.create().newIntent(new Intent(Intent.ACTION_VIEW));
assertStringsInclude(transcript, "finishedOnNewIntent", "onNewIntent");
}
@Test
public void userLeaving_callsPerformUserLeavingWhilePaused() {
controller.create().userLeaving();
assertStringsInclude(transcript, "finishedOnUserLeaveHint", "onUserLeaveHint");
}
@Test
public void setup_callsLifecycleMethodsAndMakesVisible() {
controller.setup();
assertStringsInclude(transcript, "onCreate", "onStart", "onPostCreate", "onResume", "onPostResume");
assertEquals(controller.get().getWindow().getDecorView().getParent().getClass().getName(), "android.view.ViewRootImpl");
}
@Test
public void setupWithBundle_callsLifecycleMethodsAndMakesVisible() {
controller.setup(new Bundle());
assertStringsInclude(transcript, "onCreate", "onStart", "onRestoreInstanceState", "onPostCreate", "onResume", "onPostResume");
assertEquals(controller.get().getWindow().getDecorView().getParent().getClass().getName(), "android.view.ViewRootImpl");
}
@Test
@Config(sdk = Build.VERSION_CODES.KITKAT)
public void attach_shouldWorkWithAPI19() {
MyActivity activity = Robolectric.buildActivity(MyActivity.class).create().get();
assertThat(activity).isNotNull();
}
@Test
@Config(sdk = Build.VERSION_CODES.KITKAT)
public void shouldUseCorrectRuntimeAdapter() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", 15);
MyActivity activity = Robolectric.buildActivity(MyActivity.class).setup().get();
assertThat(activity).isNotNull();
RuntimeAdapter adapter = RuntimeAdapterFactory.getInstance();
assertThat(adapter.getClass().getName()).isEqualTo("org.robolectric.android.runtime.Api19RuntimeAdapter");
}
@Test
public void configurationChange_callsLifecycleMethodsAndAppliesConfig() {
Configuration config = new Configuration(RuntimeEnvironment.application.getResources().getConfiguration());
final float newFontScale = config.fontScale *= 2;
controller.configurationChange(config);
assertStringsInclude(
transcript, "onPause",
"onStop",
"onDestroy",
"onCreate",
"onStart",
"onRestoreInstanceState",
"onPostCreate",
"onResume");
assertThat(controller.get().getResources().getConfiguration().fontScale).isEqualTo(newFontScale);
}
@Test
public void configurationChange_callsOnConfigurationChangedAndAppliesConfigWhenAllManaged() {
Configuration config = new Configuration(RuntimeEnvironment.application.getResources().getConfiguration());
final float newFontScale = config.fontScale *= 2;
ActivityController<ConfigAwareActivity> configController = Robolectric.buildActivity(ConfigAwareActivity.class);
configController.configurationChange(config);
assertStringsInclude(transcript, "onConfigurationChanged");
assertThat(configController.get().getResources().getConfiguration().fontScale).isEqualTo(newFontScale);
}
@Test
public void configurationChange_callsLifecycleMethodsAndAppliesConfigWhenAnyNonManaged() {
Configuration config = new Configuration(RuntimeEnvironment.application.getResources().getConfiguration());
final float newFontScale = config.fontScale *= 2;
final int newOrientation = config.orientation = (config.orientation + 1) % 3;
ActivityController<ConfigAwareActivity> configController = Robolectric.buildActivity(ConfigAwareActivity.class);
configController.configurationChange(config);
assertStringsInclude(transcript, "onPause", "onStop", "onDestroy", "onCreate", "onStart", "onResume");
assertThat(configController.get().getResources().getConfiguration().fontScale).isEqualTo(newFontScale);
assertThat(configController.get().getResources().getConfiguration().orientation).isEqualTo(newOrientation);
}
public static class MyActivity extends Activity {
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
transcribeWhilePaused("onRestoreInstanceState");
transcript.add("finishedOnRestoreInstanceState");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().requestFeature(Window.FEATURE_ACTION_BAR);
setContentView(new LinearLayout(RuntimeEnvironment.application));
transcribeWhilePaused("onCreate");
transcript.add("finishedOnCreate");
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
transcribeWhilePaused("onPostCreate");
transcript.add("finishedOnPostCreate");
}
@Override
protected void onPostResume() {
super.onPostResume();
transcribeWhilePaused("onPostResume");
transcript.add("finishedOnPostResume");
}
@Override
protected void onDestroy() {
super.onDestroy();
transcribeWhilePaused("onDestroy");
transcript.add("finishedOnDestroy");
}
@Override
protected void onStart() {
super.onStart();
transcribeWhilePaused("onStart");
transcript.add("finishedOnStart");
}
@Override
protected void onStop() {
super.onStop();
transcribeWhilePaused("onStop");
transcript.add("finishedOnStop");
}
@Override
protected void onResume() {
super.onResume();
transcribeWhilePaused("onResume");
transcript.add("finishedOnResume");
}
@Override
protected void onRestart() {
super.onRestart();
transcribeWhilePaused("onRestart");
transcript.add("finishedOnRestart");
}
@Override
protected void onPause() {
super.onPause();
transcribeWhilePaused("onPause");
transcript.add("finishedOnPause");
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
transcribeWhilePaused("onNewIntent");
transcript.add("finishedOnNewIntent");
}
@Override
protected void onUserLeaveHint() {
super.onUserLeaveHint();
transcribeWhilePaused("onUserLeaveHint");
transcript.add("finishedOnUserLeaveHint");
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
transcribeWhilePaused("onConfigurationChanged");
transcript.add("finishedOnConfigurationChanged");
}
private void transcribeWhilePaused(final String event) {
runOnUiThread(new Runnable() {
@Override public void run() {
transcript.add(event);
}
});
}
}
public static class ConfigAwareActivity extends MyActivity {
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.console.authorization;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.UUID;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import org.keycloak.admin.client.resource.AuthorizationResource;
import org.keycloak.admin.client.resource.PoliciesResource;
import org.keycloak.admin.client.resource.ResourcePermissionsResource;
import org.keycloak.admin.client.resource.ResourcesResource;
import org.keycloak.admin.client.resource.RolePoliciesResource;
import org.keycloak.admin.client.resource.RolesResource;
import org.keycloak.representations.idm.RoleRepresentation;
import org.keycloak.representations.idm.authorization.DecisionStrategy;
import org.keycloak.representations.idm.authorization.ResourcePermissionRepresentation;
import org.keycloak.representations.idm.authorization.ResourceRepresentation;
import org.keycloak.representations.idm.authorization.RolePolicyRepresentation;
import org.keycloak.representations.idm.authorization.UserPolicyRepresentation;
import org.keycloak.testsuite.console.page.clients.authorization.permission.ResourcePermission;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class ResourcePermissionManagementTest extends AbstractAuthorizationSettingsTest {
@Before
public void configureTest() {
super.configureTest();
RolesResource realmRoles = testRealmResource().roles();
realmRoles.create(new RoleRepresentation("Role A", "", false));
realmRoles.create(new RoleRepresentation("Role B", "", false));
RolePolicyRepresentation policyA = new RolePolicyRepresentation();
policyA.setName("Policy A");
policyA.addRole("Role A");
AuthorizationResource authorization = testRealmResource().clients().get(newClient.getId()).authorization();
PoliciesResource policies = authorization.policies();
RolePoliciesResource roles = policies.role();
roles.create(policyA);
RolePolicyRepresentation policyB = new RolePolicyRepresentation();
policyB.setName("Policy B");
policyB.addRole("Role B");
roles.create(policyB);
UserPolicyRepresentation policyC = new UserPolicyRepresentation();
policyC.setName("Policy C");
policyC.addUser("test");
policies.user().create(policyC);
ResourcesResource resources = authorization.resources();
resources.create(new ResourceRepresentation("Resource A"));
resources.create(new ResourceRepresentation("Resource B"));
}
@Test
public void testCreateWithoutPolicies() throws InterruptedException {
authorizationPage.navigateTo();
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName("testCreateWithoutPolicies Permission");
expected.setDescription("description");
expected.addResource("Resource A");
expected = createPermission(expected);
authorizationPage.navigateTo();
ResourcePermission actual = authorizationPage.authorizationTabs().permissions().name(expected.getName());
assertPolicy(expected, actual);
}
@Test
public void testUpdateResource() throws InterruptedException {
authorizationPage.navigateTo();
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName("testUpdateResource Permission");
expected.setDescription("description");
expected.addResource("Resource A");
expected.addPolicy("Policy A");
expected.addPolicy("Policy B");
expected.addPolicy("Policy C");
expected = createPermission(expected);
String previousName = expected.getName();
expected.setName(expected.getName() + " Changed");
expected.setDescription("Changed description");
expected.setDecisionStrategy(DecisionStrategy.CONSENSUS);
expected.getResources().clear();
expected.addResource("Resource B");
expected.getPolicies().clear();
expected.addPolicy("Policy A", "Policy C");
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().update(previousName, expected);
assertAlertSuccess();
authorizationPage.navigateTo();
ResourcePermission actual = authorizationPage.authorizationTabs().permissions().name(expected.getName());
assertPolicy(expected, actual);
expected.getPolicies().clear();
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().update(expected.getName(), expected);
assertAlertSuccess();
authorizationPage.navigateTo();
actual = authorizationPage.authorizationTabs().permissions().name(expected.getName());
assertPolicy(expected, actual);
}
@Test
public void testUpdateResourceType() throws InterruptedException {
authorizationPage.navigateTo();
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName("testUpdateResourceType Permission");
expected.setDescription("description");
expected.setResourceType("test-resource-type");
expected.addPolicy("Policy A");
expected.addPolicy("Policy B");
expected.addPolicy("Policy C");
expected = createPermission(expected);
String previousName = expected.getName();
expected.setName(expected.getName() + " Changed");
expected.setDescription("Changed description");
expected.setDecisionStrategy(DecisionStrategy.CONSENSUS);
expected.setResourceType("changed-resource-type");
expected.setPolicies(expected.getPolicies().stream().filter(policy -> !policy.equals("Policy B")).collect(Collectors.toSet()));
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().update(previousName, expected);
assertAlertSuccess();
authorizationPage.navigateTo();
ResourcePermission actual = authorizationPage.authorizationTabs().permissions().name(expected.getName());
assertPolicy(expected, actual);
expected.setResourceType(null);
expected.addResource("Resource A");
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().update(expected.getName(), expected);
assertAlertSuccess();
ResourcePermissionsResource resourcePermission = testRealmResource().clients().get(newClient.getId()).authorization()
.permissions().resource();
ResourcePermissionRepresentation permission = resourcePermission.findByName(expected.getName());
assertFalse(resourcePermission.findById(permission.getId()).resources().isEmpty());
expected.setResourceType("test");
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().update(expected.getName(), expected);
assertAlertSuccess();
assertTrue(resourcePermission.findById(permission.getId()).resources().isEmpty());
}
@Test
public void testDelete() throws InterruptedException {
authorizationPage.navigateTo();
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName("testDelete Permission");
expected.setDescription("description");
expected.addResource("Resource B");
expected.addPolicy("Policy C");
expected = createPermission(expected);
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().delete(expected.getName());
assertAlertSuccess();
authorizationPage.navigateTo();
assertNull(authorizationPage.authorizationTabs().permissions().permissions().findByName(expected.getName()));
}
@Test
public void testDeleteFromList() throws InterruptedException {
authorizationPage.navigateTo();
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName("testDeleteFromList Permission");
expected.setDescription("description");
expected.addResource("Resource B");
expected.addPolicy("Policy C");
expected = createPermission(expected);
authorizationPage.navigateTo();
authorizationPage.authorizationTabs().permissions().deleteFromList(expected.getName());
authorizationPage.navigateTo();
assertNull(authorizationPage.authorizationTabs().permissions().permissions().findByName(expected.getName()));
}
@Test
public void testCreateWithChild() {
ResourcePermissionRepresentation expected = new ResourcePermissionRepresentation();
expected.setName(UUID.randomUUID().toString());
expected.setDescription("description");
expected.addResource("Resource B");
expected.addPolicy("Policy C");
ResourcePermission policy = authorizationPage.authorizationTabs().permissions().create(expected, false);
RolePolicyRepresentation childPolicy = new RolePolicyRepresentation();
childPolicy.setName(UUID.randomUUID().toString());
childPolicy.addRole("Role A");
policy.createPolicy(childPolicy);
policy.form().save();
assertAlertSuccess();
expected.addPolicy(childPolicy.getName());
authorizationPage.navigateTo();
ResourcePermission actual = authorizationPage.authorizationTabs().permissions().name(expected.getName());
assertPolicy(expected, actual);
}
private ResourcePermissionRepresentation createPermission(ResourcePermissionRepresentation expected) {
ResourcePermission policy = authorizationPage.authorizationTabs().permissions().create(expected, true);
assertAlertSuccess();
return assertPolicy(expected, policy);
}
private ResourcePermissionRepresentation assertPolicy(ResourcePermissionRepresentation expected, ResourcePermission policy) {
ResourcePermissionRepresentation actual = policy.toRepresentation();
assertEquals(expected.getName(), actual.getName());
assertEquals(expected.getDescription(), actual.getDescription());
assertEquals(expected.getDecisionStrategy(), actual.getDecisionStrategy());
assertEquals(expected.getResourceType(), actual.getResourceType());
if (expected.getPolicies() == null) {
assertTrue(actual.getPolicies() == null || actual.getPolicies().isEmpty());
} else {
assertEquals(expected.getPolicies().size(), actual.getPolicies().size());
}
assertEquals(0, actual.getPolicies().stream().filter(actualPolicy -> !expected.getPolicies().stream()
.filter(expectedPolicy -> actualPolicy.equals(expectedPolicy))
.findFirst().isPresent())
.count());
assertEquals(0, actual.getResources().stream().filter(actualResource -> !expected.getResources().stream()
.filter(expectedResource -> actualResource.equals(expectedResource))
.findFirst().isPresent())
.count());
return actual;
}
}
| |
package com.microsoft.azure.management.compute.implementation;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.PagedList;
import com.microsoft.azure.SubResource;
import com.microsoft.azure.management.apigeneration.LangDefinition;
import com.microsoft.azure.management.compute.ApiEntityReference;
import com.microsoft.azure.management.compute.CachingTypes;
import com.microsoft.azure.management.compute.DiskCreateOptionTypes;
import com.microsoft.azure.management.compute.ImageReference;
import com.microsoft.azure.management.compute.KnownLinuxVirtualMachineImage;
import com.microsoft.azure.management.compute.KnownWindowsVirtualMachineImage;
import com.microsoft.azure.management.compute.LinuxConfiguration;
import com.microsoft.azure.management.compute.OperatingSystemTypes;
import com.microsoft.azure.management.compute.SshConfiguration;
import com.microsoft.azure.management.compute.SshPublicKey;
import com.microsoft.azure.management.compute.UpgradeMode;
import com.microsoft.azure.management.compute.UpgradePolicy;
import com.microsoft.azure.management.compute.VirtualHardDisk;
import com.microsoft.azure.management.compute.VirtualMachineScaleSet;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetExtension;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetExtensionProfile;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetNetworkProfile;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetOSProfile;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetSku;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetSkuTypes;
import com.microsoft.azure.management.compute.VirtualMachineScaleSetStorageProfile;
import com.microsoft.azure.management.compute.WinRMConfiguration;
import com.microsoft.azure.management.compute.WinRMListener;
import com.microsoft.azure.management.compute.WindowsConfiguration;
import com.microsoft.azure.management.network.Backend;
import com.microsoft.azure.management.network.Frontend;
import com.microsoft.azure.management.network.InboundNatPool;
import com.microsoft.azure.management.network.LoadBalancer;
import com.microsoft.azure.management.network.Network;
import com.microsoft.azure.management.network.implementation.NetworkManager;
import com.microsoft.azure.management.resources.fluentcore.arm.ResourceUtils;
import com.microsoft.azure.management.resources.fluentcore.arm.models.implementation.GroupableParentResourceImpl;
import com.microsoft.azure.management.resources.fluentcore.model.Creatable;
import com.microsoft.azure.management.resources.fluentcore.utils.PagedListConverter;
import com.microsoft.azure.management.resources.fluentcore.utils.ResourceNamer;
import com.microsoft.azure.management.storage.StorageAccount;
import com.microsoft.azure.management.storage.implementation.StorageManager;
import rx.Observable;
import rx.functions.Func1;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Implementation of {@link VirtualMachineScaleSet}.
*/
@LangDefinition
public class VirtualMachineScaleSetImpl
extends GroupableParentResourceImpl<
VirtualMachineScaleSet,
VirtualMachineScaleSetInner,
VirtualMachineScaleSetImpl,
ComputeManager>
implements
VirtualMachineScaleSet,
VirtualMachineScaleSet.Definition,
VirtualMachineScaleSet.Update {
// Clients
private final VirtualMachineScaleSetsInner client;
private final StorageManager storageManager;
private final NetworkManager networkManager;
// used to generate unique name for any dependency resources
private final ResourceNamer namer;
private boolean isMarketplaceLinuxImage = false;
// name of an existing subnet in the primary network to use
private String existingPrimaryNetworkSubnetNameToAssociate;
// unique key of a creatable storage accounts to be used for virtual machines child resources that
// requires storage [OS disk]
private List<String> creatableStorageAccountKeys = new ArrayList<>();
// reference to an existing storage account to be used for virtual machines child resources that
// requires storage [OS disk]
private List<StorageAccount> existingStorageAccountsToAssociate = new ArrayList<>();
// Name of the container in the storage account to use to store the disks
private String vhdContainerName;
// the child resource extensions
private Map<String, VirtualMachineScaleSetExtension> extensions;
// reference to the primary and internal internet facing load balancer
private LoadBalancer primaryInternetFacingLoadBalancer;
private LoadBalancer primaryInternalLoadBalancer;
// Load balancer specific variables used during update
private boolean removePrimaryInternetFacingLoadBalancerOnUpdate;
private boolean removePrimaryInternalLoadBalancerOnUpdate;
private LoadBalancer primaryInternetFacingLoadBalancerToAttachOnUpdate;
private LoadBalancer primaryInternalLoadBalancerToAttachOnUpdate;
private List<String> primaryInternetFacingLBBackendsToRemoveOnUpdate = new ArrayList<>();
private List<String> primaryInternetFacingLBInboundNatPoolsToRemoveOnUpdate = new ArrayList<>();
private List<String> primaryInternalLBBackendsToRemoveOnUpdate = new ArrayList<>();
private List<String> primaryInternalLBInboundNatPoolsToRemoveOnUpdate = new ArrayList<>();
private List<String> primaryInternetFacingLBBackendsToAddOnUpdate = new ArrayList<>();
private List<String> primaryInternetFacingLBInboundNatPoolsToAddOnUpdate = new ArrayList<>();
private List<String> primaryInternalLBBackendsToAddOnUpdate = new ArrayList<>();
private List<String> primaryInternalLBInboundNatPoolsToAddOnUpdate = new ArrayList<>();
// The paged converter for virtual machine scale set sku
private PagedListConverter<VirtualMachineScaleSetSkuInner, VirtualMachineScaleSetSku> skuConverter;
VirtualMachineScaleSetImpl(String name,
VirtualMachineScaleSetInner innerModel,
VirtualMachineScaleSetsInner client,
final ComputeManager computeManager,
final StorageManager storageManager,
final NetworkManager networkManager) {
super(name, innerModel, computeManager);
this.client = client;
this.storageManager = storageManager;
this.networkManager = networkManager;
this.namer = new ResourceNamer(this.name());
this.skuConverter = new PagedListConverter<VirtualMachineScaleSetSkuInner, VirtualMachineScaleSetSku>() {
@Override
public VirtualMachineScaleSetSku typeConvert(VirtualMachineScaleSetSkuInner inner) {
return new VirtualMachineScaleSetSkuImpl(inner);
}
};
}
@Override
protected void initializeChildrenFromInner() {
this.extensions = new HashMap<>();
if (this.inner().virtualMachineProfile().extensionProfile() != null) {
if (this.inner().virtualMachineProfile().extensionProfile().extensions() != null) {
for (VirtualMachineScaleSetExtensionInner inner : this.inner().virtualMachineProfile().extensionProfile().extensions()) {
this.extensions.put(inner.name(), new VirtualMachineScaleSetExtensionImpl(inner, this));
}
}
}
}
@Override
public PagedList<VirtualMachineScaleSetSku> listAvailableSkus() throws CloudException, IOException {
return this.skuConverter.convert(this.client.listSkus(this.resourceGroupName(), this.name()));
}
@Override
public void deallocate() throws CloudException, IOException, InterruptedException {
this.client.deallocate(this.resourceGroupName(), this.name());
}
@Override
public void powerOff() throws CloudException, IOException, InterruptedException {
this.client.powerOff(this.resourceGroupName(), this.name());
}
@Override
public void restart() throws CloudException, IOException, InterruptedException {
this.client.restart(this.resourceGroupName(), this.name());
}
@Override
public void start() throws CloudException, IOException, InterruptedException {
this.client.start(this.resourceGroupName(), this.name());
}
@Override
public void reimage() throws CloudException, IOException, InterruptedException {
this.client.reimage(this.resourceGroupName(), this.name());
}
@Override
public String computerNamePrefix() {
return this.inner().virtualMachineProfile().osProfile().computerNamePrefix();
}
@Override
public OperatingSystemTypes osType() {
return this.inner().virtualMachineProfile().storageProfile().osDisk().osType();
}
@Override
public CachingTypes osDiskCachingType() {
return this.inner().virtualMachineProfile().storageProfile().osDisk().caching();
}
@Override
public String osDiskName() {
return this.inner().virtualMachineProfile().storageProfile().osDisk().name();
}
@Override
public UpgradeMode upgradeModel() {
// upgradePolicy is a required property so no null check
return this.inner().upgradePolicy().mode();
}
@Override
public boolean overProvisionEnabled() {
return this.inner().overProvision();
}
@Override
public VirtualMachineScaleSetSkuTypes sku() {
return new VirtualMachineScaleSetSkuTypes(this.inner().sku());
}
@Override
public int capacity() {
return this.inner().sku().capacity().intValue();
}
@Override
public Network getPrimaryNetwork() throws IOException {
String subnetId = primaryNicDefaultIPConfiguration().subnet().id();
String virtualNetworkId = ResourceUtils.parentResourcePathFromResourceId(subnetId);
return this.networkManager
.networks()
.getById(virtualNetworkId);
}
@Override
public LoadBalancer getPrimaryInternetFacingLoadBalancer() throws IOException {
if (this.primaryInternetFacingLoadBalancer == null) {
loadCurrentPrimaryLoadBalancersIfAvailable();
}
return this.primaryInternetFacingLoadBalancer;
}
@Override
public Map<String, Backend> listPrimaryInternetFacingLoadBalancerBackends() throws IOException {
if (this.getPrimaryInternetFacingLoadBalancer() != null) {
return getBackendsAssociatedWithIpConfiguration(this.primaryInternetFacingLoadBalancer,
primaryNicDefaultIPConfiguration());
}
return new HashMap<>();
}
@Override
public Map<String, InboundNatPool> listPrimaryInternetFacingLoadBalancerInboundNatPools() throws IOException {
if (this.getPrimaryInternetFacingLoadBalancer() != null) {
return getInboundNatPoolsAssociatedWithIpConfiguration(this.primaryInternetFacingLoadBalancer,
primaryNicDefaultIPConfiguration());
}
return new HashMap<>();
}
@Override
public LoadBalancer getPrimaryInternalLoadBalancer() throws IOException {
if (this.primaryInternalLoadBalancer == null) {
loadCurrentPrimaryLoadBalancersIfAvailable();
}
return this.primaryInternalLoadBalancer;
}
@Override
public Map<String, Backend> listPrimaryInternalLoadBalancerBackends() throws IOException {
if (this.getPrimaryInternalLoadBalancer() != null) {
return getBackendsAssociatedWithIpConfiguration(this.primaryInternalLoadBalancer,
primaryNicDefaultIPConfiguration());
}
return new HashMap<>();
}
@Override
public Map<String, InboundNatPool> listPrimaryInternalLoadBalancerInboundNatPools() throws IOException {
if (this.getPrimaryInternalLoadBalancer() != null) {
return getInboundNatPoolsAssociatedWithIpConfiguration(this.primaryInternalLoadBalancer,
primaryNicDefaultIPConfiguration());
}
return new HashMap<>();
}
@Override
public List<String> primaryPublicIpAddressIds() throws IOException {
LoadBalancer loadBalancer = this.getPrimaryInternetFacingLoadBalancer();
if (loadBalancer != null) {
return loadBalancer.publicIpAddressIds();
}
return new ArrayList<>();
}
@Override
public List<String> vhdContainers() {
if (this.storageProfile() != null
&& this.storageProfile().osDisk() != null
&& this.storageProfile().osDisk().vhdContainers() != null) {
return this.storageProfile().osDisk().vhdContainers();
}
return new ArrayList<>();
}
@Override
public VirtualMachineScaleSetStorageProfile storageProfile() {
return this.inner().virtualMachineProfile().storageProfile();
}
@Override
public VirtualMachineScaleSetNetworkProfile networkProfile() {
return this.inner().virtualMachineProfile().networkProfile();
}
@Override
public Map<String, VirtualMachineScaleSetExtension> extensions() {
return Collections.unmodifiableMap(this.extensions);
}
// Fluent setters
@Override
public VirtualMachineScaleSetImpl withSku(VirtualMachineScaleSetSkuTypes skuType) {
this.inner()
.withSku(skuType.sku());
return this;
}
@Override
public VirtualMachineScaleSetImpl withSku(VirtualMachineScaleSetSku sku) {
return this.withSku(sku.skuType());
}
@Override
public VirtualMachineScaleSetImpl withExistingPrimaryNetworkSubnet(Network network, String subnetName) {
this.existingPrimaryNetworkSubnetNameToAssociate = mergePath(network.id(), "subnets", subnetName);
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer) {
if (loadBalancer.publicIpAddressIds().isEmpty()) {
throw new IllegalArgumentException("Parameter loadBalancer must be an internet facing load balancer");
}
if (isInCreateMode()) {
this.primaryInternetFacingLoadBalancer = loadBalancer;
associateLoadBalancerToIpConfiguration(this.primaryInternetFacingLoadBalancer,
this.primaryNicDefaultIPConfiguration());
} else {
this.primaryInternetFacingLoadBalancerToAttachOnUpdate = loadBalancer;
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternetFacingLoadBalancerBackends(String... backendNames) {
if (this.isInCreateMode()) {
VirtualMachineScaleSetIPConfigurationInner defaultPrimaryIpConfig = this.primaryNicDefaultIPConfiguration();
removeAllBackendAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancer, defaultPrimaryIpConfig);
associateBackEndsToIpConfiguration(this.primaryInternetFacingLoadBalancer.id(),
defaultPrimaryIpConfig,
backendNames);
} else {
addToList(this.primaryInternetFacingLBBackendsToAddOnUpdate, backendNames);
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternetFacingLoadBalancerInboundNatPools(String... natPoolNames) {
if (this.isInCreateMode()) {
VirtualMachineScaleSetIPConfigurationInner defaultPrimaryIpConfig = this.primaryNicDefaultIPConfiguration();
removeAllInboundNatPoolAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancer,
defaultPrimaryIpConfig);
associateInboundNATPoolsToIpConfiguration(this.primaryInternetFacingLoadBalancer.id(),
defaultPrimaryIpConfig,
natPoolNames);
} else {
addToList(this.primaryInternetFacingLBInboundNatPoolsToAddOnUpdate, natPoolNames);
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternalLoadBalancer(LoadBalancer loadBalancer) {
if (!loadBalancer.publicIpAddressIds().isEmpty()) {
throw new IllegalArgumentException("Parameter loadBalancer must be an internal load balancer");
}
String lbNetworkId = null;
for (Frontend frontEnd : loadBalancer.frontends().values()) {
if (frontEnd.inner().subnet().id() != null) {
lbNetworkId = ResourceUtils.parentResourcePathFromResourceId(frontEnd.inner().subnet().id());
}
}
if (isInCreateMode()) {
String vmNICNetworkId = ResourceUtils.parentResourcePathFromResourceId(this.existingPrimaryNetworkSubnetNameToAssociate);
// Azure has a really wired BUG that - it throws exception when vnet of VMSS and LB are not same
// (code: NetworkInterfaceAndInternalLoadBalancerMustUseSameVnet) but at the same time Azure update
// the VMSS's network section to refer this invalid internal LB. This makes VMSS un-usable and portal
// will show a error above VMSS profile page.
//
if (!vmNICNetworkId.equalsIgnoreCase(lbNetworkId)) {
throw new IllegalArgumentException("Virtual network associated with scale set virtual machines"
+ " and internal load balancer must be same. "
+ "'" + vmNICNetworkId + "'"
+ "'" + lbNetworkId);
}
this.primaryInternalLoadBalancer = loadBalancer;
associateLoadBalancerToIpConfiguration(this.primaryInternalLoadBalancer,
this.primaryNicDefaultIPConfiguration());
} else {
String vmNicVnetId = ResourceUtils.parentResourcePathFromResourceId(primaryNicDefaultIPConfiguration()
.subnet()
.id());
if (!vmNicVnetId.equalsIgnoreCase(lbNetworkId)) {
throw new IllegalArgumentException("Virtual network associated with scale set virtual machines"
+ " and internal load balancer must be same. "
+ "'" + vmNicVnetId + "'"
+ "'" + lbNetworkId);
}
this.primaryInternalLoadBalancerToAttachOnUpdate = loadBalancer;
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternalLoadBalancerBackends(String... backendNames) {
if (this.isInCreateMode()) {
VirtualMachineScaleSetIPConfigurationInner defaultPrimaryIpConfig = primaryNicDefaultIPConfiguration();
removeAllBackendAssociationFromIpConfiguration(this.primaryInternalLoadBalancer,
defaultPrimaryIpConfig);
associateBackEndsToIpConfiguration(this.primaryInternalLoadBalancer.id(),
defaultPrimaryIpConfig,
backendNames);
} else {
addToList(this.primaryInternalLBBackendsToAddOnUpdate, backendNames);
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withPrimaryInternalLoadBalancerInboundNatPools(String... natPoolNames) {
if (this.isInCreateMode()) {
VirtualMachineScaleSetIPConfigurationInner defaultPrimaryIpConfig = this.primaryNicDefaultIPConfiguration();
removeAllInboundNatPoolAssociationFromIpConfiguration(this.primaryInternalLoadBalancer,
defaultPrimaryIpConfig);
associateInboundNATPoolsToIpConfiguration(this.primaryInternalLoadBalancer.id(),
defaultPrimaryIpConfig,
natPoolNames);
} else {
addToList(this.primaryInternalLBInboundNatPoolsToAddOnUpdate, natPoolNames);
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternalLoadBalancer() {
if (this.isInUpdateMode()) {
this.removePrimaryInternalLoadBalancerOnUpdate = true;
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternetFacingLoadBalancer() {
if (this.isInUpdateMode()) {
this.removePrimaryInternetFacingLoadBalancerOnUpdate = true;
}
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternetFacingLoadBalancerBackends(String ...backendNames) {
addToList(this.primaryInternetFacingLBBackendsToRemoveOnUpdate, backendNames);
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternalLoadBalancerBackends(String ...backendNames) {
addToList(this.primaryInternalLBBackendsToRemoveOnUpdate, backendNames);
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternetFacingLoadBalancerNatPools(String ...natPoolNames) {
addToList(this.primaryInternalLBInboundNatPoolsToRemoveOnUpdate, natPoolNames);
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutPrimaryInternalLoadBalancerNatPools(String ...natPoolNames) {
addToList(this.primaryInternetFacingLBInboundNatPoolsToRemoveOnUpdate, natPoolNames);
return this;
}
@Override
public VirtualMachineScaleSetImpl withPopularWindowsImage(KnownWindowsVirtualMachineImage knownImage) {
return withSpecificWindowsImageVersion(knownImage.imageReference());
}
@Override
public VirtualMachineScaleSetImpl withLatestWindowsImage(String publisher, String offer, String sku) {
ImageReference imageReference = new ImageReference()
.withPublisher(publisher)
.withOffer(offer)
.withSku(sku)
.withVersion("latest");
return withSpecificWindowsImageVersion(imageReference);
}
@Override
public VirtualMachineScaleSetImpl withSpecificWindowsImageVersion(ImageReference imageReference) {
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withCreateOption(DiskCreateOptionTypes.FROM_IMAGE);
this.inner()
.virtualMachineProfile()
.storageProfile().withImageReference(imageReference);
this.inner()
.virtualMachineProfile()
.osProfile().withWindowsConfiguration(new WindowsConfiguration());
// sets defaults for "Stored(Custom)Image" or "VM(Platform)Image"
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withProvisionVMAgent(true);
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withEnableAutomaticUpdates(true);
return this;
}
@Override
public VirtualMachineScaleSetImpl withStoredWindowsImage(String imageUrl) {
VirtualHardDisk userImageVhd = new VirtualHardDisk();
userImageVhd.withUri(imageUrl);
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withCreateOption(DiskCreateOptionTypes.FROM_IMAGE);
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withImage(userImageVhd);
// For platform image osType will be null, azure will pick it from the image metadata.
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withOsType(OperatingSystemTypes.WINDOWS);
this.inner()
.virtualMachineProfile()
.osProfile().withWindowsConfiguration(new WindowsConfiguration());
// sets defaults for "Stored(Custom)Image" or "VM(Platform)Image"
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withProvisionVMAgent(true);
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withEnableAutomaticUpdates(true);
return this;
}
@Override
public VirtualMachineScaleSetImpl withPopularLinuxImage(KnownLinuxVirtualMachineImage knownImage) {
return withSpecificLinuxImageVersion(knownImage.imageReference());
}
@Override
public VirtualMachineScaleSetImpl withLatestLinuxImage(String publisher, String offer, String sku) {
ImageReference imageReference = new ImageReference()
.withPublisher(publisher)
.withOffer(offer)
.withSku(sku)
.withVersion("latest");
return withSpecificLinuxImageVersion(imageReference);
}
@Override
public VirtualMachineScaleSetImpl withSpecificLinuxImageVersion(ImageReference imageReference) {
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withCreateOption(DiskCreateOptionTypes.FROM_IMAGE);
this.inner()
.virtualMachineProfile()
.storageProfile().withImageReference(imageReference);
this.inner()
.virtualMachineProfile()
.osProfile().withLinuxConfiguration(new LinuxConfiguration());
this.isMarketplaceLinuxImage = true;
return this;
}
@Override
public VirtualMachineScaleSetImpl withStoredLinuxImage(String imageUrl) {
VirtualHardDisk userImageVhd = new VirtualHardDisk();
userImageVhd.withUri(imageUrl);
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withCreateOption(DiskCreateOptionTypes.FROM_IMAGE);
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withImage(userImageVhd);
// For platform image osType will be null, azure will pick it from the image metadata.
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withOsType(OperatingSystemTypes.LINUX);
this.inner()
.virtualMachineProfile()
.osProfile().withLinuxConfiguration(new LinuxConfiguration());
return this;
}
@Override
public VirtualMachineScaleSetImpl withAdminUserName(String adminUserName) {
this.inner()
.virtualMachineProfile()
.osProfile()
.withAdminUsername(adminUserName);
return this;
}
@Override
public VirtualMachineScaleSetImpl withRootUserName(String rootUserName) {
return this.withAdminUserName(rootUserName);
}
@Override
public VirtualMachineScaleSetImpl withPassword(String password) {
this.inner()
.virtualMachineProfile()
.osProfile()
.withAdminPassword(password);
return this;
}
@Override
public VirtualMachineScaleSetImpl withSsh(String publicKeyData) {
VirtualMachineScaleSetOSProfile osProfile = this.inner()
.virtualMachineProfile()
.osProfile();
if (osProfile.linuxConfiguration().ssh() == null) {
SshConfiguration sshConfiguration = new SshConfiguration();
sshConfiguration.withPublicKeys(new ArrayList<SshPublicKey>());
osProfile.linuxConfiguration().withSsh(sshConfiguration);
}
SshPublicKey sshPublicKey = new SshPublicKey();
sshPublicKey.withKeyData(publicKeyData);
sshPublicKey.withPath("/home/" + osProfile.adminUsername() + "/.ssh/authorized_keys");
osProfile.linuxConfiguration().ssh().publicKeys().add(sshPublicKey);
return this;
}
@Override
public VirtualMachineScaleSetImpl withVmAgent() {
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withProvisionVMAgent(true);
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutVmAgent() {
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withProvisionVMAgent(false);
return this;
}
@Override
public VirtualMachineScaleSetImpl withAutoUpdate() {
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withEnableAutomaticUpdates(true);
return this;
}
@Override
public VirtualMachineScaleSetImpl withoutAutoUpdate() {
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withEnableAutomaticUpdates(false);
return this;
}
@Override
public VirtualMachineScaleSetImpl withTimeZone(String timeZone) {
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withTimeZone(timeZone);
return this;
}
@Override
public VirtualMachineScaleSetImpl withWinRm(WinRMListener listener) {
if (this.inner().virtualMachineProfile().osProfile().windowsConfiguration().winRM() == null) {
WinRMConfiguration winRMConfiguration = new WinRMConfiguration();
this.inner()
.virtualMachineProfile()
.osProfile().windowsConfiguration().withWinRM(winRMConfiguration);
}
this.inner()
.virtualMachineProfile()
.osProfile()
.windowsConfiguration()
.winRM()
.listeners()
.add(listener);
return this;
}
@Override
public VirtualMachineScaleSetImpl withOsDiskCaching(CachingTypes cachingType) {
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withCaching(cachingType);
return this;
}
@Override
public VirtualMachineScaleSetImpl withOsDiskName(String name) {
this.inner()
.virtualMachineProfile()
.storageProfile().osDisk().withName(name);
return this;
}
@Override
public VirtualMachineScaleSetImpl withComputerNamePrefix(String namePrefix) {
this.inner()
.virtualMachineProfile()
.osProfile()
.withComputerNamePrefix(namePrefix);
return this;
}
@Override
public VirtualMachineScaleSetImpl withUpgradeMode(UpgradeMode upgradeMode) {
this.inner()
.upgradePolicy()
.withMode(upgradeMode);
return this;
}
@Override
public VirtualMachineScaleSetImpl withOverProvision(boolean enabled) {
this.inner()
.withOverProvision(enabled);
return this;
}
@Override
public VirtualMachineScaleSetImpl withOverProvisioning() {
return this.withOverProvision(true);
}
@Override
public VirtualMachineScaleSetImpl withoutOverProvisioning() {
return this.withOverProvision(false);
}
@Override
public VirtualMachineScaleSetImpl withCapacity(int capacity) {
this.inner()
.sku().withCapacity(new Long(capacity));
return this;
}
@Override
public VirtualMachineScaleSetImpl withNewStorageAccount(String name) {
StorageAccount.DefinitionStages.WithGroup definitionWithGroup = this.storageManager
.storageAccounts()
.define(name)
.withRegion(this.regionName());
Creatable<StorageAccount> definitionAfterGroup;
if (this.creatableGroup != null) {
definitionAfterGroup = definitionWithGroup.withNewResourceGroup(this.creatableGroup);
} else {
definitionAfterGroup = definitionWithGroup.withExistingResourceGroup(this.resourceGroupName());
}
return withNewStorageAccount(definitionAfterGroup);
}
@Override
public VirtualMachineScaleSetImpl withNewStorageAccount(Creatable<StorageAccount> creatable) {
this.creatableStorageAccountKeys.add(creatable.key());
this.addCreatableDependency(creatable);
return this;
}
@Override
public VirtualMachineScaleSetImpl withExistingStorageAccount(StorageAccount storageAccount) {
this.existingStorageAccountsToAssociate.add(storageAccount);
return this;
}
@Override
public VirtualMachineScaleSetExtensionImpl defineNewExtension(String name) {
return new VirtualMachineScaleSetExtensionImpl(new VirtualMachineScaleSetExtensionInner().withName(name), this);
}
protected VirtualMachineScaleSetImpl withExtension(VirtualMachineScaleSetExtensionImpl extension) {
this.extensions.put(extension.name(), extension);
return this;
}
@Override
public VirtualMachineScaleSetExtensionImpl updateExtension(String name) {
return (VirtualMachineScaleSetExtensionImpl) this.extensions.get(name);
}
@Override
public VirtualMachineScaleSetImpl withoutExtension(String name) {
if (this.extensions.containsKey(name)) {
this.extensions.remove(name);
}
return this;
}
// Create Update specific methods
//
@Override
protected void beforeCreating() {
if (this.extensions.size() > 0) {
this.inner()
.virtualMachineProfile()
.withExtensionProfile(new VirtualMachineScaleSetExtensionProfile())
.extensionProfile()
.withExtensions(innersFromWrappers(this.extensions.values()));
}
}
@Override
protected Observable<VirtualMachineScaleSetInner> createInner() {
this.setOSDiskAndOSProfileDefaults();
this.setPrimaryIpConfigurationSubnet();
this.setPrimaryIpConfigurationBackendsAndInboundNatPools();
return this.handleOSDiskContainersAsync()
.flatMap(new Func1<Void, Observable<VirtualMachineScaleSetInner>>() {
@Override
public Observable<VirtualMachineScaleSetInner> call(Void aVoid) {
return client.createOrUpdateAsync(resourceGroupName(), name(), inner());
}
});
}
@Override
protected void afterCreating() {
this.clearCachedProperties();
this.initializeChildrenFromInner();
}
@Override
public VirtualMachineScaleSetImpl refresh() {
VirtualMachineScaleSetInner inner = this.client.get(this.resourceGroupName(), this.name());
this.setInner(inner);
this.clearCachedProperties();
this.initializeChildrenFromInner();
return this;
}
// Helpers
//
private boolean isInUpdateMode() {
return !this.isInCreateMode();
}
private void setOSDiskAndOSProfileDefaults() {
if (isInUpdateMode()) {
return;
}
if (this.inner().sku().capacity() == null) {
this.withCapacity(2);
}
if (this.inner().upgradePolicy() == null
|| this.inner().upgradePolicy().mode() == null) {
this.inner()
.withUpgradePolicy(new UpgradePolicy()
.withMode(UpgradeMode.AUTOMATIC));
}
VirtualMachineScaleSetOSProfile osProfile = this.inner()
.virtualMachineProfile()
.osProfile();
// linux image: Custom or marketplace linux image
if (this.osType() == OperatingSystemTypes.LINUX || this.isMarketplaceLinuxImage) {
if (osProfile.linuxConfiguration() == null) {
osProfile.withLinuxConfiguration(new LinuxConfiguration());
}
osProfile
.linuxConfiguration()
.withDisablePasswordAuthentication(osProfile.adminPassword() == null);
}
if (this.osDiskCachingType() == null) {
withOsDiskCaching(CachingTypes.READ_WRITE);
}
if (this.osDiskName() == null) {
withOsDiskName(this.name() + "-os-disk");
}
if (this.computerNamePrefix() == null) {
// VM name cannot contain only numeric values and cannot exceed 15 chars
if (this.name().matches("[0-9]+")) {
withComputerNamePrefix(ResourceNamer.randomResourceName("vmss-vm", 12));
} else if (this.name().length() <= 12) {
withComputerNamePrefix(this.name() + "-vm");
} else {
withComputerNamePrefix(ResourceNamer.randomResourceName("vmss-vm", 12));
}
}
}
private boolean isCustomImage(VirtualMachineScaleSetStorageProfile storageProfile) {
return storageProfile.osDisk().image() != null
&& storageProfile.osDisk().image().uri() != null;
}
private Observable<Void> handleOSDiskContainersAsync() {
final VirtualMachineScaleSetStorageProfile storageProfile = inner()
.virtualMachineProfile()
.storageProfile();
if (isCustomImage(storageProfile)) {
// There is a restriction currently that virtual machine's disk cannot be stored in multiple storage accounts
// if scale set is based on custom image. Remove this check once azure start supporting it.
storageProfile.osDisk()
.vhdContainers()
.clear();
return Observable.just(null);
}
if (this.isInCreateMode()
&& this.creatableStorageAccountKeys.isEmpty()
&& this.existingStorageAccountsToAssociate.isEmpty()) {
return this.storageManager.storageAccounts()
.define(this.namer.randomName("stg", 24))
.withRegion(this.regionName())
.withExistingResourceGroup(this.resourceGroupName())
.createAsync()
.map(new Func1<StorageAccount, Void>() {
@Override
public Void call(StorageAccount storageAccount) {
String containerName = vhdContainerName;
if (containerName == null) {
containerName = "vhds";
}
storageProfile.osDisk()
.vhdContainers()
.add(mergePath(storageAccount.endPoints().primary().blob(), containerName));
vhdContainerName = null;
creatableStorageAccountKeys.clear();
existingStorageAccountsToAssociate.clear();
return null;
}
});
} else {
String containerName = this.vhdContainerName;
if (containerName == null) {
for (String containerUrl : storageProfile.osDisk().vhdContainers()) {
containerName = containerUrl.substring(containerUrl.lastIndexOf("/") + 1);
break;
}
}
if (containerName == null) {
containerName = "vhds";
}
for (String storageAccountKey : this.creatableStorageAccountKeys) {
StorageAccount storageAccount = (StorageAccount) createdResource(storageAccountKey);
storageProfile.osDisk()
.vhdContainers()
.add(mergePath(storageAccount.endPoints().primary().blob(), containerName));
}
for (StorageAccount storageAccount : this.existingStorageAccountsToAssociate) {
storageProfile.osDisk()
.vhdContainers()
.add(mergePath(storageAccount.endPoints().primary().blob(), containerName));
}
this.vhdContainerName = null;
this.creatableStorageAccountKeys.clear();
this.existingStorageAccountsToAssociate.clear();
return Observable.just(null);
}
}
private void setPrimaryIpConfigurationSubnet() {
if (isInUpdateMode()) {
return;
}
VirtualMachineScaleSetIPConfigurationInner ipConfig = this.primaryNicDefaultIPConfiguration();
ipConfig.withSubnet(new ApiEntityReference().withId(this.existingPrimaryNetworkSubnetNameToAssociate));
this.existingPrimaryNetworkSubnetNameToAssociate = null;
}
private void setPrimaryIpConfigurationBackendsAndInboundNatPools() {
if (isInCreateMode()) {
return;
}
try {
this.loadCurrentPrimaryLoadBalancersIfAvailable();
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
VirtualMachineScaleSetIPConfigurationInner primaryIpConfig = primaryNicDefaultIPConfiguration();
if (this.primaryInternetFacingLoadBalancer != null) {
removeBackendsFromIpConfiguration(this.primaryInternetFacingLoadBalancer.id(),
primaryIpConfig,
this.primaryInternetFacingLBBackendsToRemoveOnUpdate.toArray(new String[0]));
associateBackEndsToIpConfiguration(primaryInternetFacingLoadBalancer.id(),
primaryIpConfig,
this.primaryInternetFacingLBBackendsToAddOnUpdate.toArray(new String[0]));
removeInboundNatPoolsFromIpConfiguration(this.primaryInternetFacingLoadBalancer.id(),
primaryIpConfig,
this.primaryInternetFacingLBInboundNatPoolsToRemoveOnUpdate.toArray(new String[0]));
associateInboundNATPoolsToIpConfiguration(primaryInternetFacingLoadBalancer.id(),
primaryIpConfig,
this.primaryInternetFacingLBInboundNatPoolsToAddOnUpdate.toArray(new String[0]));
}
if (this.primaryInternalLoadBalancer != null) {
removeBackendsFromIpConfiguration(this.primaryInternalLoadBalancer.id(),
primaryIpConfig,
this.primaryInternalLBBackendsToRemoveOnUpdate.toArray(new String[0]));
associateBackEndsToIpConfiguration(primaryInternalLoadBalancer.id(),
primaryIpConfig,
this.primaryInternalLBBackendsToAddOnUpdate.toArray(new String[0]));
removeInboundNatPoolsFromIpConfiguration(this.primaryInternalLoadBalancer.id(),
primaryIpConfig,
this.primaryInternalLBInboundNatPoolsToRemoveOnUpdate.toArray(new String[0]));
associateInboundNATPoolsToIpConfiguration(primaryInternalLoadBalancer.id(),
primaryIpConfig,
this.primaryInternalLBInboundNatPoolsToAddOnUpdate.toArray(new String[0]));
}
if (this.removePrimaryInternetFacingLoadBalancerOnUpdate) {
if (this.primaryInternetFacingLoadBalancer != null) {
removeLoadBalancerAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancer, primaryIpConfig);
}
}
if (this.removePrimaryInternalLoadBalancerOnUpdate) {
if (this.primaryInternalLoadBalancer != null) {
removeLoadBalancerAssociationFromIpConfiguration(this.primaryInternalLoadBalancer, primaryIpConfig);
}
}
if (this.primaryInternetFacingLoadBalancerToAttachOnUpdate != null) {
if (this.primaryInternetFacingLoadBalancer != null) {
removeLoadBalancerAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancer, primaryIpConfig);
}
associateLoadBalancerToIpConfiguration(this.primaryInternetFacingLoadBalancerToAttachOnUpdate, primaryIpConfig);
if (!this.primaryInternetFacingLBBackendsToAddOnUpdate.isEmpty()) {
removeAllBackendAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancerToAttachOnUpdate, primaryIpConfig);
associateBackEndsToIpConfiguration(this.primaryInternetFacingLoadBalancerToAttachOnUpdate.id(),
primaryIpConfig,
this.primaryInternetFacingLBBackendsToAddOnUpdate.toArray(new String[0]));
}
if (!this.primaryInternetFacingLBInboundNatPoolsToAddOnUpdate.isEmpty()) {
removeAllInboundNatPoolAssociationFromIpConfiguration(this.primaryInternetFacingLoadBalancerToAttachOnUpdate, primaryIpConfig);
associateInboundNATPoolsToIpConfiguration(this.primaryInternetFacingLoadBalancerToAttachOnUpdate.id(),
primaryIpConfig,
this.primaryInternetFacingLBInboundNatPoolsToAddOnUpdate.toArray(new String[0]));
}
}
if (this.primaryInternalLoadBalancerToAttachOnUpdate != null) {
if (this.primaryInternalLoadBalancer != null) {
removeLoadBalancerAssociationFromIpConfiguration(this.primaryInternalLoadBalancer, primaryIpConfig);
}
associateLoadBalancerToIpConfiguration(this.primaryInternalLoadBalancerToAttachOnUpdate, primaryIpConfig);
if (!this.primaryInternalLBBackendsToAddOnUpdate.isEmpty()) {
removeAllBackendAssociationFromIpConfiguration(this.primaryInternalLoadBalancerToAttachOnUpdate, primaryIpConfig);
associateBackEndsToIpConfiguration(this.primaryInternalLoadBalancerToAttachOnUpdate.id(),
primaryIpConfig,
this.primaryInternalLBBackendsToAddOnUpdate.toArray(new String[0]));
}
if (!this.primaryInternalLBInboundNatPoolsToAddOnUpdate.isEmpty()) {
removeAllInboundNatPoolAssociationFromIpConfiguration(this.primaryInternalLoadBalancerToAttachOnUpdate, primaryIpConfig);
associateInboundNATPoolsToIpConfiguration(this.primaryInternalLoadBalancerToAttachOnUpdate.id(),
primaryIpConfig,
this.primaryInternalLBInboundNatPoolsToAddOnUpdate.toArray(new String[0]));
}
}
this.removePrimaryInternetFacingLoadBalancerOnUpdate = false;
this.removePrimaryInternalLoadBalancerOnUpdate = false;
this.primaryInternetFacingLoadBalancerToAttachOnUpdate = null;
this.primaryInternalLoadBalancerToAttachOnUpdate = null;
this.primaryInternetFacingLBBackendsToRemoveOnUpdate.clear();
this.primaryInternetFacingLBInboundNatPoolsToRemoveOnUpdate.clear();
this.primaryInternalLBBackendsToRemoveOnUpdate.clear();
this.primaryInternalLBInboundNatPoolsToRemoveOnUpdate.clear();
this.primaryInternetFacingLBBackendsToAddOnUpdate.clear();
this.primaryInternetFacingLBInboundNatPoolsToAddOnUpdate.clear();
this.primaryInternalLBBackendsToAddOnUpdate.clear();
this.primaryInternalLBInboundNatPoolsToAddOnUpdate.clear();
}
private void clearCachedProperties() {
this.primaryInternetFacingLoadBalancer = null;
this.primaryInternalLoadBalancer = null;
}
private void loadCurrentPrimaryLoadBalancersIfAvailable() throws IOException {
if (this.primaryInternetFacingLoadBalancer != null && this.primaryInternalLoadBalancer != null) {
return;
}
String firstLoadBalancerId = null;
VirtualMachineScaleSetIPConfigurationInner ipConfig = primaryNicDefaultIPConfiguration();
if (!ipConfig.loadBalancerBackendAddressPools().isEmpty()) {
firstLoadBalancerId = ResourceUtils
.parentResourcePathFromResourceId(ipConfig.loadBalancerBackendAddressPools().get(0).id());
}
if (firstLoadBalancerId == null && !ipConfig.loadBalancerInboundNatPools().isEmpty()) {
firstLoadBalancerId = ResourceUtils
.parentResourcePathFromResourceId(ipConfig.loadBalancerInboundNatPools().get(0).id());
}
if (firstLoadBalancerId == null) {
return;
}
LoadBalancer loadBalancer1 = this.networkManager
.loadBalancers()
.getById(firstLoadBalancerId);
if (loadBalancer1.publicIpAddressIds() != null && loadBalancer1.publicIpAddressIds().size() > 0) {
this.primaryInternetFacingLoadBalancer = loadBalancer1;
} else {
this.primaryInternalLoadBalancer = loadBalancer1;
}
String secondLoadBalancerId = null;
for (SubResource subResource: ipConfig.loadBalancerBackendAddressPools()) {
if (!subResource.id().toLowerCase().startsWith(firstLoadBalancerId.toLowerCase())) {
secondLoadBalancerId = ResourceUtils
.parentResourcePathFromResourceId(subResource.id());
break;
}
}
if (secondLoadBalancerId == null) {
for (SubResource subResource: ipConfig.loadBalancerInboundNatPools()) {
if (!subResource.id().toLowerCase().startsWith(firstLoadBalancerId.toLowerCase())) {
secondLoadBalancerId = ResourceUtils
.parentResourcePathFromResourceId(subResource.id());
break;
}
}
}
if (secondLoadBalancerId == null) {
return;
}
LoadBalancer loadBalancer2 = this.networkManager
.loadBalancers()
.getById(secondLoadBalancerId);
if (loadBalancer2.publicIpAddressIds() != null && loadBalancer2.publicIpAddressIds().size() > 0) {
this.primaryInternetFacingLoadBalancer = loadBalancer2;
} else {
this.primaryInternalLoadBalancer = loadBalancer2;
}
}
private VirtualMachineScaleSetIPConfigurationInner primaryNicDefaultIPConfiguration() {
List<VirtualMachineScaleSetNetworkConfigurationInner> nicConfigurations = this.inner()
.virtualMachineProfile()
.networkProfile()
.networkInterfaceConfigurations();
for (VirtualMachineScaleSetNetworkConfigurationInner nicConfiguration : nicConfigurations) {
if (nicConfiguration.primary()) {
if (nicConfiguration.ipConfigurations().size() > 0) {
VirtualMachineScaleSetIPConfigurationInner ipConfig = nicConfiguration.ipConfigurations().get(0);
if (ipConfig.loadBalancerBackendAddressPools() == null) {
ipConfig.withLoadBalancerBackendAddressPools(new ArrayList<SubResource>());
}
if (ipConfig.loadBalancerInboundNatPools() == null) {
ipConfig.withLoadBalancerInboundNatPools(new ArrayList<SubResource>());
}
return ipConfig;
}
}
}
throw new RuntimeException("Could not find the primary nic configuration or an IP configuration in it");
}
private static void associateBackEndsToIpConfiguration(String loadBalancerId,
VirtualMachineScaleSetIPConfigurationInner ipConfig,
String... backendNames) {
List<SubResource> backendSubResourcesToAssociate = new ArrayList<>();
for (String backendName : backendNames) {
String backendPoolId = mergePath(loadBalancerId, "backendAddressPools", backendName);
boolean found = false;
for (SubResource subResource : ipConfig.loadBalancerBackendAddressPools()) {
if (subResource.id().equalsIgnoreCase(backendPoolId)) {
found = true;
break;
}
}
if (!found) {
backendSubResourcesToAssociate.add(new SubResource().withId(backendPoolId));
}
}
for (SubResource backendSubResource : backendSubResourcesToAssociate) {
ipConfig.loadBalancerBackendAddressPools().add(backendSubResource);
}
}
private static void associateInboundNATPoolsToIpConfiguration(String loadBalancerId,
VirtualMachineScaleSetIPConfigurationInner ipConfig,
String... inboundNatPools) {
List<SubResource> inboundNatPoolSubResourcesToAssociate = new ArrayList<>();
for (String inboundNatPool : inboundNatPools) {
String inboundNatPoolId = mergePath(loadBalancerId, "inboundNatPools", inboundNatPool);
boolean found = false;
for (SubResource subResource : ipConfig.loadBalancerInboundNatPools()) {
if (subResource.id().equalsIgnoreCase(inboundNatPoolId)) {
found = true;
break;
}
}
if (!found) {
inboundNatPoolSubResourcesToAssociate.add(new SubResource().withId(inboundNatPoolId));
}
}
for (SubResource backendSubResource : inboundNatPoolSubResourcesToAssociate) {
ipConfig.loadBalancerInboundNatPools().add(backendSubResource);
}
}
private static Map<String, Backend> getBackendsAssociatedWithIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
String loadBalancerId = loadBalancer.id();
Map<String, Backend> attachedBackends = new HashMap<>();
Map<String, Backend> lbBackends = loadBalancer.backends();
for (Backend lbBackend : lbBackends.values()) {
String backendId = mergePath(loadBalancerId, "backendAddressPools", lbBackend.name());
for (SubResource subResource : ipConfig.loadBalancerBackendAddressPools()) {
if (subResource.id().equalsIgnoreCase(backendId)) {
attachedBackends.put(lbBackend.name(), lbBackend);
}
}
}
return attachedBackends;
}
private static Map<String, InboundNatPool> getInboundNatPoolsAssociatedWithIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
String loadBalancerId = loadBalancer.id();
Map<String, InboundNatPool> attachedInboundNatPools = new HashMap<>();
Map<String, InboundNatPool> lbInboundNatPools = loadBalancer.inboundNatPools();
for (InboundNatPool lbInboundNatPool : lbInboundNatPools.values()) {
String inboundNatPoolId = mergePath(loadBalancerId, "inboundNatPools", lbInboundNatPool.name());
for (SubResource subResource : ipConfig.loadBalancerInboundNatPools()) {
if (subResource.id().equalsIgnoreCase(inboundNatPoolId)) {
attachedInboundNatPools.put(lbInboundNatPool.name(), lbInboundNatPool);
}
}
}
return attachedInboundNatPools;
}
private static void associateLoadBalancerToIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
Collection<Backend> backends = loadBalancer.backends().values();
String[] backendNames = new String[backends.size()];
int i = 0;
for (Backend backend : backends) {
backendNames[i] = backend.name();
i++;
}
associateBackEndsToIpConfiguration(loadBalancer.id(),
ipConfig,
backendNames);
Collection<InboundNatPool> inboundNatPools = loadBalancer.inboundNatPools().values();
String[] natPoolNames = new String[inboundNatPools.size()];
i = 0;
for (InboundNatPool inboundNatPool : inboundNatPools) {
natPoolNames[i] = inboundNatPool.name();
i++;
}
associateInboundNATPoolsToIpConfiguration(loadBalancer.id(),
ipConfig,
natPoolNames);
}
private static void removeLoadBalancerAssociationFromIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
removeAllBackendAssociationFromIpConfiguration(loadBalancer, ipConfig);
removeAllInboundNatPoolAssociationFromIpConfiguration(loadBalancer, ipConfig);
}
private static void removeAllBackendAssociationFromIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
List<SubResource> toRemove = new ArrayList<>();
for (SubResource subResource : ipConfig.loadBalancerBackendAddressPools()) {
if (subResource.id().toLowerCase().startsWith(loadBalancer.id().toLowerCase() + "/")) {
toRemove.add(subResource);
}
}
for (SubResource subResource : toRemove) {
ipConfig.loadBalancerBackendAddressPools().remove(subResource);
}
}
private static void removeAllInboundNatPoolAssociationFromIpConfiguration(LoadBalancer loadBalancer,
VirtualMachineScaleSetIPConfigurationInner ipConfig) {
List<SubResource> toRemove = new ArrayList<>();
for (SubResource subResource : ipConfig.loadBalancerInboundNatPools()) {
if (subResource.id().toLowerCase().startsWith(loadBalancer.id().toLowerCase() + "/")) {
toRemove.add(subResource);
}
}
for (SubResource subResource : toRemove) {
ipConfig.loadBalancerInboundNatPools().remove(subResource);
}
}
private static void removeBackendsFromIpConfiguration(String loadBalancerId,
VirtualMachineScaleSetIPConfigurationInner ipConfig,
String... backendNames) {
List<SubResource> toRemove = new ArrayList<>();
for (String backendName : backendNames) {
String backendPoolId = mergePath(loadBalancerId, "backendAddressPools", backendName);
for (SubResource subResource : ipConfig.loadBalancerBackendAddressPools()) {
if (subResource.id().equalsIgnoreCase(backendPoolId)) {
toRemove.add(subResource);
break;
}
}
}
for (SubResource subResource : toRemove) {
ipConfig.loadBalancerBackendAddressPools().remove(subResource);
}
}
private static void removeInboundNatPoolsFromIpConfiguration(String loadBalancerId,
VirtualMachineScaleSetIPConfigurationInner ipConfig,
String... inboundNatPoolNames) {
List<SubResource> toRemove = new ArrayList<>();
for (String natPoolName : inboundNatPoolNames) {
String inboundNatPoolId = mergePath(loadBalancerId, "inboundNatPools", natPoolName);
for (SubResource subResource : ipConfig.loadBalancerInboundNatPools()) {
if (subResource.id().equalsIgnoreCase(inboundNatPoolId)) {
toRemove.add(subResource);
break;
}
}
}
for (SubResource subResource : toRemove) {
ipConfig.loadBalancerInboundNatPools().remove(subResource);
}
}
private static <T> void addToList(List<T> list, T...items) {
for (T item : items) {
list.add(item);
}
}
private static String mergePath(String... segments) {
StringBuilder builder = new StringBuilder();
for (String segment : segments) {
while (segment.length() > 1 && segment.endsWith("/")) {
segment = segment.substring(0, segment.length() - 1);
}
if (segment.length() > 0) {
builder.append(segment);
builder.append("/");
}
}
String merged = builder.toString();
if (merged.endsWith("/")) {
merged = merged.substring(0, merged.length() - 1);
}
return merged;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util.nio;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteInClosure;
import org.jetbrains.annotations.Nullable;
/**
* Recovery information for single node.
*/
public class GridNioRecoveryDescriptor {
/** Number of acknowledged messages. */
private long acked;
/** Unacknowledged message futures. */
private final ArrayDeque<GridNioFuture<?>> msgFuts;
/** Number of messages to resend. */
private int resendCnt;
/** Number of received messages. */
private long rcvCnt;
/** Reserved flag. */
private boolean reserved;
/** Last acknowledged message. */
private long lastAck;
/** Node left flag. */
private boolean nodeLeft;
/** Target node. */
private final ClusterNode node;
/** Logger. */
private final IgniteLogger log;
/** Incoming connection request from remote node. */
private IgniteBiTuple<Long, IgniteInClosure<Boolean>> handshakeReq;
/** Connected flag. */
private boolean connected;
/** Number of outgoing connect attempts. */
private long connectCnt;
/** Maximum size of unacknowledged messages queue. */
private final int queueLimit;
/**
* @param queueLimit Maximum size of unacknowledged messages queue.
* @param node Node.
* @param log Logger.
*/
public GridNioRecoveryDescriptor(int queueLimit, ClusterNode node, IgniteLogger log) {
assert !node.isLocal() : node;
assert queueLimit > 0;
msgFuts = new ArrayDeque<>(queueLimit);
this.queueLimit = queueLimit;
this.node = node;
this.log = log;
}
/**
* @return Connect count.
*/
public long incrementConnectCount() {
return connectCnt++;
}
/**
* @return Node.
*/
public ClusterNode node() {
return node;
}
/**
* Increments received messages counter.
*
* @return Number of received messages.
*/
public long onReceived() {
rcvCnt++;
return rcvCnt;
}
/**
* @return Number of received messages.
*/
public long received() {
return rcvCnt;
}
/**
* @param lastAck Last acknowledged message.
*/
public void lastAcknowledged(long lastAck) {
this.lastAck = lastAck;
}
/**
* @return Last acknowledged message.
*/
public long lastAcknowledged() {
return lastAck;
}
/**
* @return Received messages count.
*/
public long receivedCount() {
return rcvCnt;
}
/**
* @return Maximum size of unacknowledged messages queue.
*/
public int queueLimit() {
return queueLimit;
}
/**
* @param fut NIO future.
* @return {@code False} if queue limit is exceeded.
*/
public boolean add(GridNioFuture<?> fut) {
assert fut != null;
if (!fut.skipRecovery()) {
if (resendCnt == 0) {
msgFuts.addLast(fut);
return msgFuts.size() < queueLimit;
}
else
resendCnt--;
}
return true;
}
/**
* @param rcvCnt Number of messages received by remote node.
*/
public void ackReceived(long rcvCnt) {
if (log.isDebugEnabled())
log.debug("Handle acknowledgment [acked=" + acked + ", rcvCnt=" + rcvCnt +
", msgFuts=" + msgFuts.size() + ']');
while (acked < rcvCnt) {
GridNioFuture<?> fut = msgFuts.pollFirst();
assert fut != null : "Missed message future [rcvCnt=" + rcvCnt +
", acked=" + acked +
", desc=" + this + ']';
assert fut.isDone() : fut;
if (fut.ackClosure() != null)
fut.ackClosure().apply(null);
acked++;
}
}
/**
* Node left callback.
*/
public void onNodeLeft() {
GridNioFuture<?>[] futs = null;
synchronized (this) {
nodeLeft = true;
if (!reserved && !msgFuts.isEmpty()) {
futs = msgFuts.toArray(new GridNioFuture<?>[msgFuts.size()]);
msgFuts.clear();
}
}
if (futs != null)
completeOnNodeLeft(futs);
}
/**
* @return Message futures for unacknowledged messages.
*/
public Deque<GridNioFuture<?>> messagesFutures() {
return msgFuts;
}
/**
* @param node Node.
* @return {@code True} if node is not null and has the same order as initial remtoe node.
*/
public boolean nodeAlive(@Nullable ClusterNode node) {
return node != null && node.order() == this.node.order();
}
/**
* @throws InterruptedException If interrupted.
* @return {@code True} if reserved.
*/
public boolean reserve() throws InterruptedException {
synchronized (this) {
while (!connected && reserved)
wait();
if (!connected)
reserved = true;
return !connected;
}
}
/**
* @param rcvCnt Number of messages received by remote node.
*/
public void onHandshake(long rcvCnt) {
synchronized (this) {
if (!nodeLeft)
ackReceived(rcvCnt);
resendCnt = msgFuts.size();
}
}
/**
*
*/
public void connected() {
synchronized (this) {
assert reserved : this;
assert !connected : this;
connected = true;
if (handshakeReq != null) {
IgniteInClosure<Boolean> c = handshakeReq.get2();
assert c != null;
c.apply(false);
handshakeReq = null;
}
notifyAll();
}
}
/**
*
*/
public void release() {
GridNioFuture<?>[] futs = null;
synchronized (this) {
connected = false;
if (handshakeReq != null) {
IgniteInClosure<Boolean> c = handshakeReq.get2();
assert c != null;
handshakeReq = null;
c.apply(true);
}
else {
reserved = false;
notifyAll();
}
if (nodeLeft && !msgFuts.isEmpty()) {
futs = msgFuts.toArray(new GridNioFuture<?>[msgFuts.size()]);
msgFuts.clear();
}
}
if (futs != null)
completeOnNodeLeft(futs);
}
/**
* @param id Handshake ID.
* @param c Closure to run on reserve.
* @return {@code True} if reserved.
*/
public boolean tryReserve(long id, IgniteInClosure<Boolean> c) {
synchronized (this) {
if (connected) {
c.apply(false);
return false;
}
if (reserved) {
if (handshakeReq != null) {
assert handshakeReq.get1() != null;
long id0 = handshakeReq.get1();
assert id0 != id : id0;
if (id > id0) {
IgniteInClosure<Boolean> c0 = handshakeReq.get2();
assert c0 != null;
c0.apply(false);
handshakeReq = new IgniteBiTuple<>(id, c);
}
else
c.apply(false);
}
else
handshakeReq = new IgniteBiTuple<>(id, c);
return false;
}
else {
reserved = true;
return true;
}
}
}
/**
* @param futs Futures to complete.
*/
private void completeOnNodeLeft(GridNioFuture<?>[] futs) {
for (GridNioFuture<?> msg : futs) {
IOException e = new IOException("Failed to send message, node has left: " + node.id());
((GridNioFutureImpl)msg).onDone(e);
if (msg.ackClosure() != null)
msg.ackClosure().apply(new IgniteException(e));
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridNioRecoveryDescriptor.class, this);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.node.ResponseCollectorService;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.LongSupplier;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
public class TransportSearchAction extends HandledTransportAction<SearchRequest, SearchResponse> {
/** The maximum number of shards for a single search request. */
public static final Setting<Long> SHARD_COUNT_LIMIT_SETTING = Setting.longSetting(
"action.search.shard_count.limit", Long.MAX_VALUE, 1L, Property.Dynamic, Property.NodeScope);
private final ClusterService clusterService;
private final SearchTransportService searchTransportService;
private final RemoteClusterService remoteClusterService;
private final SearchPhaseController searchPhaseController;
private final SearchService searchService;
@Inject
public TransportSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, SearchService searchService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController,
ClusterService clusterService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, SearchAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SearchRequest::new);
this.searchPhaseController = searchPhaseController;
this.searchTransportService = searchTransportService;
this.remoteClusterService = searchTransportService.getRemoteClusterService();
SearchTransportService.registerRequestHandler(transportService, searchService);
this.clusterService = clusterService;
this.searchService = searchService;
}
private Map<String, AliasFilter> buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState,
Index[] concreteIndices, Map<String, AliasFilter> remoteAliasMap) {
final Map<String, AliasFilter> aliasFilterMap = new HashMap<>();
final Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
for (Index index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName());
AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, index.getName(), indicesAndAliases);
assert aliasFilter != null;
aliasFilterMap.put(index.getUUID(), aliasFilter);
}
aliasFilterMap.putAll(remoteAliasMap);
return aliasFilterMap;
}
private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, ClusterState clusterState) {
if (searchRequest.source() == null) {
return Collections.emptyMap();
}
SearchSourceBuilder source = searchRequest.source();
if (source.indexBoosts() == null) {
return Collections.emptyMap();
}
Map<String, Float> concreteIndexBoosts = new HashMap<>();
for (SearchSourceBuilder.IndexBoost ib : source.indexBoosts()) {
Index[] concreteIndices =
indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(), ib.getIndex());
for (Index concreteIndex : concreteIndices) {
concreteIndexBoosts.putIfAbsent(concreteIndex.getUUID(), ib.getBoost());
}
}
return Collections.unmodifiableMap(concreteIndexBoosts);
}
/**
* Search operations need two clocks. One clock is to fulfill real clock needs (e.g., resolving
* "now" to an index name). Another clock is needed for measuring how long a search operation
* took. These two uses are at odds with each other. There are many issues with using a real
* clock for measuring how long an operation took (they often lack precision, they are subject
* to moving backwards due to NTP and other such complexities, etc.). There are also issues with
* using a relative clock for reporting real time. Thus, we simply separate these two uses.
*/
static class SearchTimeProvider {
private final long absoluteStartMillis;
private final long relativeStartNanos;
private final LongSupplier relativeCurrentNanosProvider;
/**
* Instantiates a new search time provider. The absolute start time is the real clock time
* used for resolving index expressions that include dates. The relative start time is the
* start of the search operation according to a relative clock. The total time the search
* operation took can be measured against the provided relative clock and the relative start
* time.
*
* @param absoluteStartMillis the absolute start time in milliseconds since the epoch
* @param relativeStartNanos the relative start time in nanoseconds
* @param relativeCurrentNanosProvider provides the current relative time
*/
SearchTimeProvider(
final long absoluteStartMillis,
final long relativeStartNanos,
final LongSupplier relativeCurrentNanosProvider) {
this.absoluteStartMillis = absoluteStartMillis;
this.relativeStartNanos = relativeStartNanos;
this.relativeCurrentNanosProvider = relativeCurrentNanosProvider;
}
long getAbsoluteStartMillis() {
return absoluteStartMillis;
}
long getRelativeStartNanos() {
return relativeStartNanos;
}
long getRelativeCurrentNanos() {
return relativeCurrentNanosProvider.getAsLong();
}
}
@Override
protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
final long relativeStartNanos = System.nanoTime();
final SearchTimeProvider timeProvider =
new SearchTimeProvider(searchRequest.getOrCreateAbsoluteStartMillis(), relativeStartNanos, System::nanoTime);
ActionListener<SearchSourceBuilder> rewriteListener = ActionListener.wrap(source -> {
if (source != searchRequest.source()) {
// only set it if it changed - we don't allow null values to be set but it might be already null be we want to catch
// situations when it possible due to a bug changes to null
searchRequest.source(source);
}
final ClusterState clusterState = clusterService.state();
final Map<String, OriginalIndices> remoteClusterIndices = remoteClusterService.groupIndices(searchRequest.indicesOptions(),
searchRequest.indices(), idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState));
OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
if (remoteClusterIndices.isEmpty()) {
executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, Collections.emptyList(),
(clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener,
clusterState.getNodes().getDataNodes().size(), SearchResponse.Clusters.EMPTY);
} else {
remoteClusterService.collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(),
searchRequest.routing(), remoteClusterIndices, ActionListener.wrap((searchShardsResponses) -> {
List<SearchShardIterator> remoteShardIterators = new ArrayList<>();
Map<String, AliasFilter> remoteAliasFilters = new HashMap<>();
BiFunction<String, String, DiscoveryNode> clusterNodeLookup = processRemoteShards(searchShardsResponses,
remoteClusterIndices, remoteShardIterators, remoteAliasFilters);
int numNodesInvolved = searchShardsResponses.values().stream().mapToInt(r -> r.getNodes().length).sum()
+ clusterState.getNodes().getDataNodes().size();
SearchResponse.Clusters clusters = buildClusters(localIndices, remoteClusterIndices, searchShardsResponses);
executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices,
remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, numNodesInvolved,
clusters);
}, listener::onFailure));
}
}, listener::onFailure);
if (searchRequest.source() == null) {
rewriteListener.onResponse(searchRequest.source());
} else {
Rewriteable.rewriteAndFetch(searchRequest.source(), searchService.getRewriteContext(timeProvider::getAbsoluteStartMillis),
rewriteListener);
}
}
static SearchResponse.Clusters buildClusters(OriginalIndices localIndices, Map<String, OriginalIndices> remoteIndices,
Map<String, ClusterSearchShardsResponse> searchShardsResponses) {
int localClusters = localIndices == null ? 0 : 1;
int totalClusters = remoteIndices.size() + localClusters;
int successfulClusters = localClusters;
for (ClusterSearchShardsResponse searchShardsResponse : searchShardsResponses.values()) {
if (searchShardsResponse != ClusterSearchShardsResponse.EMPTY) {
successfulClusters++;
}
}
int skippedClusters = totalClusters - successfulClusters;
return new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters);
}
static BiFunction<String, String, DiscoveryNode> processRemoteShards(Map<String, ClusterSearchShardsResponse> searchShardsResponses,
Map<String, OriginalIndices> remoteIndicesByCluster,
List<SearchShardIterator> remoteShardIterators,
Map<String, AliasFilter> aliasFilterMap) {
Map<String, Map<String, DiscoveryNode>> clusterToNode = new HashMap<>();
for (Map.Entry<String, ClusterSearchShardsResponse> entry : searchShardsResponses.entrySet()) {
String clusterAlias = entry.getKey();
ClusterSearchShardsResponse searchShardsResponse = entry.getValue();
HashMap<String, DiscoveryNode> idToDiscoveryNode = new HashMap<>();
clusterToNode.put(clusterAlias, idToDiscoveryNode);
for (DiscoveryNode remoteNode : searchShardsResponse.getNodes()) {
idToDiscoveryNode.put(remoteNode.getId(), remoteNode);
}
final Map<String, AliasFilter> indicesAndFilters = searchShardsResponse.getIndicesAndFilters();
for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) {
//add the cluster name to the remote index names for indices disambiguation
//this ends up in the hits returned with the search response
ShardId shardId = clusterSearchShardsGroup.getShardId();
final AliasFilter aliasFilter;
if (indicesAndFilters == null) {
aliasFilter = AliasFilter.EMPTY;
} else {
aliasFilter = indicesAndFilters.get(shardId.getIndexName());
assert aliasFilter != null : "alias filter must not be null for index: " + shardId.getIndex();
}
String[] aliases = aliasFilter.getAliases();
String[] finalIndices = aliases.length == 0 ? new String[] {shardId.getIndexName()} : aliases;
// here we have to map the filters to the UUID since from now on we use the uuid for the lookup
aliasFilterMap.put(shardId.getIndex().getUUID(), aliasFilter);
final OriginalIndices originalIndices = remoteIndicesByCluster.get(clusterAlias);
assert originalIndices != null : "original indices are null for clusterAlias: " + clusterAlias;
SearchShardIterator shardIterator = new SearchShardIterator(clusterAlias, shardId,
Arrays.asList(clusterSearchShardsGroup.getShards()), new OriginalIndices(finalIndices,
originalIndices.indicesOptions()));
remoteShardIterators.add(shardIterator);
}
}
return (clusterAlias, nodeId) -> {
Map<String, DiscoveryNode> clusterNodes = clusterToNode.get(clusterAlias);
if (clusterNodes == null) {
throw new IllegalArgumentException("unknown remote cluster: " + clusterAlias);
}
return clusterNodes.get(nodeId);
};
}
private Index[] resolveLocalIndices(OriginalIndices localIndices,
IndicesOptions indicesOptions,
ClusterState clusterState,
SearchTimeProvider timeProvider) {
if (localIndices == null) {
return Index.EMPTY_ARRAY; //don't search on any local index (happens when only remote indices were specified)
}
return indexNameExpressionResolver.concreteIndices(clusterState, indicesOptions,
timeProvider.getAbsoluteStartMillis(), localIndices.indices());
}
private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest,
OriginalIndices localIndices, List<SearchShardIterator> remoteShardIterators,
BiFunction<String, String, DiscoveryNode> remoteConnections, ClusterState clusterState,
Map<String, AliasFilter> remoteAliasMap, ActionListener<SearchResponse> listener, int nodeCount,
SearchResponse.Clusters clusters) {
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
// TODO: I think startTime() should become part of ActionRequest and that should be used both for index name
// date math expressions and $now in scripts. This way all apis will deal with now in the same way instead
// of just for the _search api
final Index[] indices = resolveLocalIndices(localIndices, searchRequest.indicesOptions(), clusterState, timeProvider);
Map<String, AliasFilter> aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap);
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(),
searchRequest.indices());
routingMap = routingMap == null ? Collections.emptyMap() : Collections.unmodifiableMap(routingMap);
String[] concreteIndices = new String[indices.length];
for (int i = 0; i < indices.length; i++) {
concreteIndices[i] = indices[i].getName();
}
Map<String, Long> nodeSearchCounts = searchTransportService.getPendingSearchRequests();
GroupShardsIterator<ShardIterator> localShardsIterator = clusterService.operationRouting().searchShards(
clusterState,
concreteIndices,
searchRequest.types(),
routingMap,
searchRequest.preference(),
searchService.getResponseCollectorService(),
searchRequest.tokenRanges(),
searchRequest.remoteAddress(),
nodeSearchCounts);
GroupShardsIterator<SearchShardIterator> shardIterators = mergeShardsIterators(localShardsIterator, localIndices,
searchRequest.getLocalClusterAlias(), remoteShardIterators);
failIfOverShardCountLimit(clusterService, shardIterators.size());
Map<String, Float> concreteIndexBoosts = resolveIndexBoosts(searchRequest, clusterState);
// optimize search type for cases where there is only one shard group to search on
if (shardIterators.size() == 1) {
// if we only have one group, then we always want Q_T_F, no need for DFS, and no need to do THEN since we hit one shard
searchRequest.searchType(QUERY_THEN_FETCH);
}
if (searchRequest.allowPartialSearchResults() == null) {
// No user preference defined in search request - apply cluster service default
searchRequest.allowPartialSearchResults(searchService.defaultAllowPartialSearchResults());
}
if (searchRequest.isSuggestOnly()) {
// disable request cache if we have only suggest
searchRequest.requestCache(false);
switch (searchRequest.searchType()) {
case DFS_QUERY_THEN_FETCH:
// convert to Q_T_F if we have only suggest
searchRequest.searchType(QUERY_THEN_FETCH);
break;
}
}
final DiscoveryNodes nodes = clusterState.nodes();
BiFunction<String, String, Transport.Connection> connectionLookup = buildConnectionLookup(searchRequest.getLocalClusterAlias(),
nodes::get, remoteConnections, searchTransportService::getConnection);
assert nodeCount > 0 || shardIterators.size() == 0 : "non empty search iterators but node count is 0";
setMaxConcurrentShardRequests(searchRequest, nodeCount);
boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators);
searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(),
Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, routingMap, listener, preFilterSearchShards, clusters).start();
}
static void setMaxConcurrentShardRequests(SearchRequest searchRequest, int nodeCount) {
if (searchRequest.isMaxConcurrentShardRequestsSet() == false) {
// we try to set a default of max concurrent shard requests based on the node count but upper-bound it by 256 by default to
// keep it sane. A single search request that fans out to lots of shards should hit a cluster too hard while 256 is already
// a lot. we multiply it by the default number of shards such that a single request in a cluster of 1 would hit all shards of
// a default index. We take into account that we may be in a cluster with no data nodes searching against no shards.
searchRequest.setMaxConcurrentShardRequests(Math.min(256, Math.max(nodeCount, 1)
* IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getDefault(Settings.EMPTY)));
}
}
static BiFunction<String, String, Transport.Connection> buildConnectionLookup(String requestClusterAlias,
Function<String, DiscoveryNode> localNodes,
BiFunction<String, String, DiscoveryNode> remoteNodes,
BiFunction<String, DiscoveryNode, Transport.Connection> nodeToConnection) {
return (clusterAlias, nodeId) -> {
final DiscoveryNode discoveryNode;
final boolean remoteCluster;
if (clusterAlias == null || requestClusterAlias != null) {
assert requestClusterAlias == null || requestClusterAlias.equals(clusterAlias);
discoveryNode = localNodes.apply(nodeId);
remoteCluster = false;
} else {
discoveryNode = remoteNodes.apply(clusterAlias, nodeId);
remoteCluster = true;
}
if (discoveryNode == null) {
throw new IllegalStateException("no node found for id: " + nodeId);
}
return nodeToConnection.apply(remoteCluster ? clusterAlias : null, discoveryNode);
};
}
private static boolean shouldPreFilterSearchShards(SearchRequest searchRequest,
GroupShardsIterator<SearchShardIterator> shardIterators) {
SearchSourceBuilder source = searchRequest.source();
return searchRequest.searchType() == QUERY_THEN_FETCH && // we can't do this for DFS it needs to fan out to all shards all the time
SearchService.canRewriteToMatchNone(source) &&
searchRequest.getPreFilterShardSize() < shardIterators.size();
}
static GroupShardsIterator<SearchShardIterator> mergeShardsIterators(GroupShardsIterator<ShardIterator> localShardsIterator,
OriginalIndices localIndices,
@Nullable String localClusterAlias,
List<SearchShardIterator> remoteShardIterators) {
List<SearchShardIterator> shards = new ArrayList<>(remoteShardIterators);
for (ShardIterator shardIterator : localShardsIterator) {
shards.add(new SearchShardIterator(localClusterAlias, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices));
}
return new GroupShardsIterator<>(shards);
}
@Override
protected final void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
throw new UnsupportedOperationException("the task parameter is required");
}
private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest,
GroupShardsIterator<SearchShardIterator> shardIterators,
SearchTimeProvider timeProvider,
BiFunction<String, String, Transport.Connection> connectionLookup,
long clusterStateVersion,
Map<String, AliasFilter> aliasFilter,
Map<String, Float> concreteIndexBoosts,
Map<String, Set<String>> indexRoutings,
ActionListener<SearchResponse> listener,
boolean preFilter,
SearchResponse.Clusters clusters) {
Executor executor = threadPool.executor(ThreadPool.Names.SEARCH);
if (preFilter) {
return new CanMatchPreFilterSearchPhase(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, indexRoutings, executor, searchRequest, listener, shardIterators,
timeProvider, clusterStateVersion, task, (iter) -> {
AbstractSearchAsyncAction action = searchAsyncAction(task, searchRequest, iter, timeProvider, connectionLookup,
clusterStateVersion, aliasFilter, concreteIndexBoosts, indexRoutings, listener, false, clusters);
return new SearchPhase(action.getName()) {
@Override
public void run() {
action.start();
}
};
}, clusters);
} else {
AbstractSearchAsyncAction searchAsyncAction;
switch (searchRequest.searchType()) {
case DFS_QUERY_THEN_FETCH:
searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener,
shardIterators, timeProvider, clusterStateVersion, task, clusters);
break;
case QUERY_AND_FETCH:
case QUERY_THEN_FETCH:
searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener,
shardIterators, timeProvider, clusterStateVersion, task, clusters);
break;
default:
throw new IllegalStateException("Unknown search type: [" + searchRequest.searchType() + "]");
}
return searchAsyncAction;
}
}
private static void failIfOverShardCountLimit(ClusterService clusterService, int shardCount) {
final long shardCountLimit = clusterService.getClusterSettings().get(SHARD_COUNT_LIMIT_SETTING);
if (shardCount > shardCountLimit) {
throw new IllegalArgumentException("Trying to query " + shardCount + " shards, which is over the limit of "
+ shardCountLimit + ". This limit exists because querying many shards at the same time can make the "
+ "job of the coordinating node very CPU and/or memory intensive. It is usually a better idea to "
+ "have a smaller number of larger shards. Update [" + SHARD_COUNT_LIMIT_SETTING.getKey()
+ "] to a greater value if you really want to query that many shards at the same time.");
}
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.cli.compiler;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.ModuleNode;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.springframework.boot.cli.compiler.dependencies.ArtifactCoordinatesResolver;
import org.springframework.boot.cli.compiler.grape.DependencyResolutionContext;
import groovy.lang.Grab;
import groovy.lang.GroovyClassLoader;
/**
* Customizer that allows dependencies to be added during compilation. Adding a dependency
* results in a {@link Grab @Grab} annotation being added to the primary {@link ClassNode
* class} is the {@link ModuleNode module} that's being customized.
* <p>
* This class provides a fluent API for conditionally adding dependencies. For example:
* {@code dependencies.ifMissing("com.corp.SomeClass").add(module)}.
*
* @author Phillip Webb
* @author Andy Wilkinson
*/
public class DependencyCustomizer {
private final GroovyClassLoader loader;
private final ClassNode classNode;
private final DependencyResolutionContext dependencyResolutionContext;
/**
* Create a new {@link DependencyCustomizer} instance.
* @param loader the current classloader
* @param moduleNode the current module
* @param dependencyResolutionContext the context for dependency resolution
*/
public DependencyCustomizer(GroovyClassLoader loader, ModuleNode moduleNode,
DependencyResolutionContext dependencyResolutionContext) {
this.loader = loader;
this.classNode = moduleNode.getClasses().get(0);
this.dependencyResolutionContext = dependencyResolutionContext;
}
/**
* Create a new nested {@link DependencyCustomizer}.
* @param parent the parent customizer
*/
protected DependencyCustomizer(DependencyCustomizer parent) {
this.loader = parent.loader;
this.classNode = parent.classNode;
this.dependencyResolutionContext = parent.dependencyResolutionContext;
}
public String getVersion(String artifactId) {
return getVersion(artifactId, "");
}
public String getVersion(String artifactId, String defaultVersion) {
String version = this.dependencyResolutionContext.getArtifactCoordinatesResolver()
.getVersion(artifactId);
if (version == null) {
version = defaultVersion;
}
return version;
}
/**
* Create a nested {@link DependencyCustomizer} that only applies if any of the
* specified class names are not on the class path.
* @param classNames the class names to test
* @return a nested {@link DependencyCustomizer}
*/
public DependencyCustomizer ifAnyMissingClasses(final String... classNames) {
return new DependencyCustomizer(this) {
@Override
protected boolean canAdd() {
for (String classname : classNames) {
try {
DependencyCustomizer.this.loader.loadClass(classname);
}
catch (Exception ex) {
return true;
}
}
return false;
}
};
}
/**
* Create a nested {@link DependencyCustomizer} that only applies if all of the
* specified class names are not on the class path.
* @param classNames the class names to test
* @return a nested {@link DependencyCustomizer}
*/
public DependencyCustomizer ifAllMissingClasses(final String... classNames) {
return new DependencyCustomizer(this) {
@Override
protected boolean canAdd() {
for (String classname : classNames) {
try {
DependencyCustomizer.this.loader.loadClass(classname);
return false;
}
catch (Exception ex) {
// swallow exception and continue
}
}
return DependencyCustomizer.this.canAdd();
}
};
}
/**
* Create a nested {@link DependencyCustomizer} that only applies if the specified
* paths are on the class path.
* @param paths the paths to test
* @return a nested {@link DependencyCustomizer}
*/
public DependencyCustomizer ifAllResourcesPresent(final String... paths) {
return new DependencyCustomizer(this) {
@Override
protected boolean canAdd() {
for (String path : paths) {
try {
if (DependencyCustomizer.this.loader.getResource(path) == null) {
return false;
}
return true;
}
catch (Exception ex) {
// swallow exception and continue
}
}
return DependencyCustomizer.this.canAdd();
}
};
}
/**
* Create a nested {@link DependencyCustomizer} that only applies at least one of the
* specified paths is on the class path.
* @param paths the paths to test
* @return a nested {@link DependencyCustomizer}
*/
public DependencyCustomizer ifAnyResourcesPresent(final String... paths) {
return new DependencyCustomizer(this) {
@Override
protected boolean canAdd() {
for (String path : paths) {
try {
if (DependencyCustomizer.this.loader.getResource(path) != null) {
return true;
}
return false;
}
catch (Exception ex) {
// swallow exception and continue
}
}
return DependencyCustomizer.this.canAdd();
}
};
}
/**
* Add dependencies and all of their dependencies. The group ID and version of the
* dependencies are resolved from the modules using the customizer's
* {@link ArtifactCoordinatesResolver}.
* @param modules The module IDs
* @return this {@link DependencyCustomizer} for continued use
*/
public DependencyCustomizer add(String... modules) {
for (String module : modules) {
add(module, null, null, true);
}
return this;
}
/**
* Add a single dependency and, optionally, all of its dependencies. The group ID and
* version of the dependency are resolved from the module using the customizer's
* {@link ArtifactCoordinatesResolver}.
* @param module The module ID
* @param transitive {@code true} if the transitive dependencies should also be added,
* otherwise {@code false}.
* @return this {@link DependencyCustomizer} for continued use
*/
public DependencyCustomizer add(String module, boolean transitive) {
return add(module, null, null, transitive);
}
/**
* Add a single dependency with the specified classifier and type and, optionally, all
* of its dependencies. The group ID and version of the dependency are resolved from
* the module by using the customizer's {@link ArtifactCoordinatesResolver}.
* @param module The module ID
* @param classifier The classifier, may be {@code null}
* @param type The type, may be {@code null}
* @param transitive {@code true} if the transitive dependencies should also be added,
* otherwise {@code false}.
* @return this {@link DependencyCustomizer} for continued use
*/
public DependencyCustomizer add(String module, String classifier, String type,
boolean transitive) {
if (canAdd()) {
ArtifactCoordinatesResolver artifactCoordinatesResolver = this.dependencyResolutionContext
.getArtifactCoordinatesResolver();
this.classNode.addAnnotation(
createGrabAnnotation(artifactCoordinatesResolver.getGroupId(module),
artifactCoordinatesResolver.getArtifactId(module),
artifactCoordinatesResolver.getVersion(module), classifier,
type, transitive));
}
return this;
}
private AnnotationNode createGrabAnnotation(String group, String module,
String version, String classifier, String type, boolean transitive) {
AnnotationNode annotationNode = new AnnotationNode(new ClassNode(Grab.class));
annotationNode.addMember("group", new ConstantExpression(group));
annotationNode.addMember("module", new ConstantExpression(module));
annotationNode.addMember("version", new ConstantExpression(version));
if (classifier != null) {
annotationNode.addMember("classifier", new ConstantExpression(classifier));
}
if (type != null) {
annotationNode.addMember("type", new ConstantExpression(type));
}
annotationNode.addMember("transitive", new ConstantExpression(transitive));
annotationNode.addMember("initClass", new ConstantExpression(false));
return annotationNode;
}
/**
* Strategy called to test if dependencies can be added. Subclasses override as
* required. Returns {@code true} by default.
*
* @return {@code true} if dependencies can be added, otherwise {@code false}
*/
protected boolean canAdd() {
return true;
}
/**
* Returns the {@link DependencyResolutionContext}.
* @return the dependency resolution context
*/
public DependencyResolutionContext getDependencyResolutionContext() {
return this.dependencyResolutionContext;
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.keyboard_accessory.sheet_tabs;
import static org.chromium.ui.base.LocalizationUtils.isLayoutRtl;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.text.method.PasswordTransformationMethod;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.Nullable;
import androidx.core.view.ViewCompat;
import androidx.recyclerview.widget.RecyclerView;
import org.chromium.base.Callback;
import org.chromium.chrome.browser.keyboard_accessory.R;
import org.chromium.chrome.browser.keyboard_accessory.data.KeyboardAccessoryData;
import org.chromium.chrome.browser.keyboard_accessory.data.KeyboardAccessoryData.FooterCommand;
import org.chromium.chrome.browser.keyboard_accessory.data.UserInfoField;
import org.chromium.chrome.browser.keyboard_accessory.sheet_tabs.AccessorySheetTabModel.AccessorySheetDataPiece;
import org.chromium.chrome.browser.keyboard_accessory.sheet_tabs.AccessorySheetTabViewBinder.ElementViewHolder;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.chrome.browser.ui.favicon.FaviconUtils;
import org.chromium.chrome.browser.ui.favicon.LargeIconBridge;
import org.chromium.components.browser_ui.widget.RoundedIconGenerator;
import org.chromium.ui.HorizontalListDividerDrawable;
import org.chromium.ui.modelutil.ListModel;
/**
* This stateless class provides methods to bind the items in a {@link ListModel <Item>}
* to the {@link RecyclerView} used as view of the Password accessory sheet component.
*/
class PasswordAccessorySheetViewBinder {
static ElementViewHolder create(ViewGroup parent, @AccessorySheetDataPiece.Type int viewType) {
switch (viewType) {
case AccessorySheetDataPiece.Type.TITLE:
return new PasswordsTitleViewHolder(parent);
case AccessorySheetDataPiece.Type.PASSWORD_INFO:
return new PasswordsInfoViewHolder(parent);
case AccessorySheetDataPiece.Type.FOOTER_COMMAND:
return new FooterCommandViewHolder(parent);
}
assert false : "Unhandled type of data piece: " + viewType;
return null;
}
/**
* Holds a the TextView with the title of the sheet and a divider for the accessory bar.
*/
static class PasswordsTitleViewHolder extends ElementViewHolder<String, LinearLayout> {
PasswordsTitleViewHolder(ViewGroup parent) {
super(parent, R.layout.password_accessory_sheet_label);
}
@Override
protected void bind(String displayText, LinearLayout view) {
TextView titleView = view.findViewById(R.id.tab_title);
titleView.setText(displayText);
titleView.setContentDescription(displayText);
}
}
/**
* Holds a TextView that represents a bottom command and is separated to the top by a divider.
*/
static class FooterCommandViewHolder extends ElementViewHolder<FooterCommand, LinearLayout> {
public static class DynamicTopDivider extends RecyclerView.ItemDecoration {
@Override
public void getItemOffsets(
Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
super.getItemOffsets(outRect, view, parent, state);
if (view.getId() != R.id.footer_command) return;
int previous = parent.indexOfChild(view) - 1;
if (previous < 0) return;
if (parent.getChildAt(previous).getId() == R.id.footer_command) return;
outRect.top = view.getContext().getResources().getDimensionPixelSize(
R.dimen.keyboard_accessory_suggestion_padding)
+ view.getContext().getResources().getDimensionPixelSize(
R.dimen.divider_height);
}
@Override
public void onDraw(Canvas canvas, RecyclerView parent, RecyclerView.State state) {
int attatchedChlidCount = parent.getChildCount();
for (int i = 0; i < attatchedChlidCount - 1; ++i) {
View currentView = parent.getChildAt(i);
if (currentView.getId() == R.id.footer_command) break;
View nextView = parent.getChildAt(i + 1);
if (nextView.getId() != R.id.footer_command) continue;
Drawable dividerDrawable =
HorizontalListDividerDrawable.create(nextView.getContext());
int top = currentView.getBottom()
+ currentView.getContext().getResources().getDimensionPixelOffset(
R.dimen.keyboard_accessory_suggestion_padding)
/ 2;
int bottom = top + dividerDrawable.getIntrinsicHeight();
dividerDrawable.setBounds(parent.getLeft() + parent.getPaddingLeft(), top,
parent.getRight() - parent.getPaddingRight(), bottom);
dividerDrawable.draw(canvas);
}
}
}
FooterCommandViewHolder(ViewGroup parent) {
super(parent, R.layout.password_accessory_sheet_legacy_option);
}
@Override
protected void bind(FooterCommand footerCommand, LinearLayout layout) {
TextView view = layout.findViewById(R.id.footer_text);
view.setText(footerCommand.getDisplayText());
view.setContentDescription(footerCommand.getDisplayText());
view.setOnClickListener(v -> footerCommand.execute());
view.setClickable(true);
}
}
/**
* Holds a layout for a username and a password with a small icon.
*/
static class PasswordsInfoViewHolder
extends ElementViewHolder<KeyboardAccessoryData.UserInfo, LinearLayout> {
private final int mPadding;
private final int mIconSize;
PasswordsInfoViewHolder(ViewGroup parent) {
super(parent, R.layout.keyboard_accessory_sheet_tab_legacy_password_info);
mPadding = itemView.getContext().getResources().getDimensionPixelSize(
R.dimen.keyboard_accessory_suggestion_padding);
mIconSize = itemView.getContext().getResources().getDimensionPixelSize(
R.dimen.keyboard_accessory_suggestion_icon_size);
}
@Override
protected void bind(KeyboardAccessoryData.UserInfo info, LinearLayout layout) {
TextView username = layout.findViewById(R.id.suggestion_text);
TextView password = layout.findViewById(R.id.password_text);
bindTextView(username, info.getFields().get(0));
bindTextView(password, info.getFields().get(1));
// Set the default icon for username, then try to get a better one.
FaviconHelper faviconHelper = new FaviconHelper(username.getContext());
setIconForBitmap(username, faviconHelper.getDefaultIcon(info.getOrigin()));
faviconHelper.fetchFavicon(info.getOrigin(), icon -> setIconForBitmap(username, icon));
ViewCompat.setPaddingRelative(username, mPadding, 0, mPadding, 0);
// Passwords have no icon, so increase the offset.
ViewCompat.setPaddingRelative(password, 2 * mPadding + mIconSize, 0, mPadding, 0);
}
private void bindTextView(TextView text, UserInfoField field) {
text.setTransformationMethod(
field.isObfuscated() ? new PasswordTransformationMethod() : null);
// With transformation, the character set forces a LTR gravity. Therefore, invert it:
text.setGravity(Gravity.CENTER_VERTICAL
| (isLayoutRtl() && field.isObfuscated() ? Gravity.END : Gravity.START));
text.setText(field.getDisplayText());
text.setContentDescription(field.getA11yDescription());
text.setOnClickListener(!field.isSelectable() ? null : src -> field.triggerSelection());
text.setClickable(true); // Ensures that "disabled" is announced.
text.setEnabled(field.isSelectable());
text.setBackground(getBackgroundDrawable(field.isSelectable()));
}
private @Nullable Drawable getBackgroundDrawable(boolean selectable) {
if (!selectable) return null;
TypedArray a = itemView.getContext().obtainStyledAttributes(
new int[] {R.attr.selectableItemBackground});
Drawable suggestionBackground = a.getDrawable(0);
a.recycle();
return suggestionBackground;
}
private void setIconForBitmap(TextView text, @Nullable Drawable icon) {
if (icon != null) {
icon.setBounds(0, 0, mIconSize, mIconSize);
}
text.setCompoundDrawablePadding(mPadding);
text.setCompoundDrawablesRelative(icon, null, null, null);
}
}
/**
* Provides default favicons and helps to fetch and set favicons. It automatically discards
* out-of-date responses which are common for recycled ViewHolder.
*/
static class FaviconHelper {
private final Resources mResources;
private final RoundedIconGenerator mIconGenerator;
private final int mDesiredSize;
/**
* Creates a new helper.
* @param context The {@link Context} used to fetch resources and create Drawables.
*/
FaviconHelper(Context context) {
mResources = context.getResources();
mDesiredSize = mResources.getDimensionPixelSize(
R.dimen.keyboard_accessory_suggestion_icon_size);
mIconGenerator = FaviconUtils.createCircularIconGenerator(mResources);
}
Drawable getDefaultIcon(String origin) {
return FaviconUtils.getIconDrawableWithoutFilter(null, origin,
R.color.default_favicon_background_color, mIconGenerator, mResources,
mDesiredSize);
}
/**
* Resets favicon in case the container is recycled. Then queries a favicon for the origin.
* @param origin The origin URL of the favicon.
* @param setIconCallback Callback called with fetched icons. May be called with null.
*/
void fetchFavicon(String origin, Callback<Drawable> setIconCallback) {
final LargeIconBridge mIconBridge =
new LargeIconBridge(Profile.getLastUsedRegularProfile());
mIconBridge.getLargeIconForStringUrl(origin, mDesiredSize,
(icon, fallbackColor, isFallbackColorDefault, iconType) -> {
Drawable drawable = FaviconUtils.getIconDrawableWithoutFilter(icon, origin,
fallbackColor, mIconGenerator, mResources, mDesiredSize);
setIconCallback.onResult(drawable);
});
}
}
static void initializeView(RecyclerView view, AccessorySheetTabModel model) {
view.setAdapter(PasswordAccessorySheetCoordinator.createAdapter(model));
view.addItemDecoration(new FooterCommandViewHolder.DynamicTopDivider());
}
}
| |
package com.studio4plus.homerplayer.ui;
import android.annotation.SuppressLint;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.PersistableBundle;
import android.speech.tts.TextToSpeech;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Toast;
import com.studio4plus.homerplayer.GlobalSettings;
import com.studio4plus.homerplayer.HomerPlayerApplication;
import com.studio4plus.homerplayer.KioskModeSwitcher;
import com.studio4plus.homerplayer.R;
import com.studio4plus.homerplayer.battery.BatteryStatusProvider;
import com.studio4plus.homerplayer.concurrency.SimpleDeferred;
import com.studio4plus.homerplayer.ui.classic.ClassicMainUiModule;
import com.studio4plus.homerplayer.ui.classic.DaggerClassicMainUiComponent;
import com.studio4plus.homerplayer.concurrency.SimpleFuture;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import de.greenrobot.event.EventBus;
public class MainActivity extends AppCompatActivity implements SpeakerProvider {
private static final int TTS_CHECK_CODE = 1;
private static final String KIOSK_MODE_ENABLE_ACTION = "KioskModeEnable";
private static final String ENABLE_EXTRA = "Enable";
@SuppressWarnings("FieldCanBeLocal")
private MainUiComponent mainUiComponent;
private BatteryStatusIndicator batteryStatusIndicator;
private @Nullable SimpleDeferred<Speaker> ttsDeferred;
private OrientationActivityDelegate orientationDelegate;
@Inject public UiControllerMain controller;
@Inject public BatteryStatusProvider batteryStatusProvider;
@Inject public EventBus eventBus;
@Inject public GlobalSettings globalSettings;
@Inject public KioskModeHandler kioskModeHandler;
@Inject public KioskModeSwitcher kioskModeSwitcher;
private final static long SUPPRESSED_BACK_MESSAGE_DELAY_NANO = TimeUnit.SECONDS.toNanos(2);
private long lastSuppressedBackTimeNano = 0;
private boolean isInForeground = false;
@Nullable
private ColorTheme currentTheme;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mainUiComponent = DaggerClassicMainUiComponent.builder()
.applicationComponent(HomerPlayerApplication.getComponent(this))
.activityModule(new ActivityModule(this))
.classicMainUiModule(new ClassicMainUiModule(this))
.build();
mainUiComponent.inject(this);
setTheme(globalSettings.colorTheme());
setContentView(R.layout.main_activity);
controller.onActivityCreated();
batteryStatusIndicator = new BatteryStatusIndicator(
findViewById(R.id.batteryStatusIndicator), eventBus);
orientationDelegate = new OrientationActivityDelegate(this, globalSettings);
View touchEventEater = findViewById(R.id.touchEventEater);
touchEventEater.setOnTouchListener(new View.OnTouchListener() {
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouch(View v, MotionEvent event) {
// Tell the other views that the event has been handled.
return true;
}
});
}
@Override
protected void onStart() {
super.onStart();
// onStart must be called before the UI controller can manipulate fragments.
controller.onActivityStart();
orientationDelegate.onStart();
batteryStatusProvider.start();
kioskModeHandler.onActivityStart(this);
handleIntent(getIntent());
}
@Override
protected void onResume() {
super.onResume();
isInForeground = true;
ColorTheme theme = globalSettings.colorTheme();
if (currentTheme != theme) {
setTheme(theme);
recreate();
}
}
@Override
protected void onResumeFragments() {
super.onResumeFragments();
controller.onActivityResumeFragments();
}
@SuppressLint("MissingSuperCall")
@Override
protected void onSaveInstanceState(Bundle outState) {
// Do nothing, this activity takes state from the PlayerService and the AudioBookManager.
}
@Override
public void onSaveInstanceState(Bundle outState, PersistableBundle outPersistentState) {
// Do nothing, this activity takes state from the PlayerService and the AudioBookManager.
}
@Override
protected void onPause() {
// Call super.onPause() first. It may, among other things, call onResumeFragments(), so
// calling super.onPause() before controller.onActivityPause() is necessary to ensure that
// controller.onActivityResumeFragments() is called in the right order.
super.onPause();
isInForeground = false;
controller.onActivityPause();
}
@Override
protected void onStop() {
controller.onActivityStop();
orientationDelegate.onStop();
kioskModeHandler.onActivityStop();
super.onStop();
batteryStatusProvider.stop();
}
@Override
public void onBackPressed() {
if (globalSettings.isAnyKioskModeEnabled()) {
long now = System.nanoTime();
if (now - lastSuppressedBackTimeNano < SUPPRESSED_BACK_MESSAGE_DELAY_NANO) {
Toast.makeText(this, R.string.back_suppressed_by_kiosk, Toast.LENGTH_SHORT)
.show();
}
lastSuppressedBackTimeNano = now;
} else {
super.onBackPressed();
}
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus && isInForeground) {
// Start animations.
batteryStatusIndicator.startAnimations();
kioskModeHandler.onFocusGained(this);
}
}
@Override
protected void onDestroy() {
batteryStatusIndicator.shutdown();
controller.onActivityDestroy();
super.onDestroy();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
// A call with no grantResults means the dialog has been closed without any user decision.
if (grantResults.length > 0)
controller.onRequestPermissionResult(requestCode, permissions, grantResults);
}
protected void onActivityResult(
int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == TTS_CHECK_CODE) {
if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
// success, create the TTS instance
if (ttsDeferred != null) {
ttsDeferred.setResult(new Speaker(this));
ttsDeferred = null;
}
} else {
// missing data, install it
Intent installIntent = new Intent();
installIntent.setAction(
TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
try {
startActivity(installIntent);
} catch (ActivityNotFoundException e) {
Log.w("MainActivity", "No activity to handle Text-to-Speech data installation.");
if (ttsDeferred != null) {
ttsDeferred.setException(e);
ttsDeferred = null;
}
}
}
}
}
@Override
@NonNull
public SimpleFuture<Speaker> obtainTts() {
SimpleDeferred<Speaker> result = ttsDeferred;
if (ttsDeferred == null) {
result = new SimpleDeferred<>();
Intent checkIntent = new Intent();
checkIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
try {
startActivityForResult(checkIntent, TTS_CHECK_CODE);
ttsDeferred = result;
} catch (ActivityNotFoundException e) {
Log.w("MainActivity", "Text-to-Speech not available");
result.setException(e);
// ttsDeferred stays unset because the result is delivered.
}
}
return result;
}
private void handleIntent(Intent intent) {
if (intent != null && KIOSK_MODE_ENABLE_ACTION.equals(intent.getAction())) {
if (kioskModeSwitcher.isLockTaskPermitted()) {
boolean enable = intent.getBooleanExtra(ENABLE_EXTRA, false);
if (globalSettings.isFullKioskModeEnabled() != enable) {
globalSettings.setFullKioskModeEnabledNow(enable);
kioskModeSwitcher.onFullKioskModeEnabled(this, enable);
// For some reason clearing the preferred Home activity only takes effect if the
// application exits (finishing the activity doesn't help).
// This issue doesn't happen when disabling the kiosk mode from the settings
// screen and I'm out of ideas.
if (!enable) {
new Handler(getMainLooper()).postDelayed(new Runnable() {
@Override
public void run() {
System.exit(0);
}
}, 500);
}
}
}
}
}
private void setTheme(@NonNull ColorTheme theme) {
currentTheme = theme;
setTheme(theme.styleId);
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.river;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.admin.indices.mapping.delete.DeleteMappingResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.Injectors;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.river.cluster.RiverClusterChangedEvent;
import org.elasticsearch.river.cluster.RiverClusterService;
import org.elasticsearch.river.cluster.RiverClusterState;
import org.elasticsearch.river.cluster.RiverClusterStateListener;
import org.elasticsearch.river.routing.RiverRouting;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException;
/**
*
*/
public class RiversService extends AbstractLifecycleComponent<RiversService> {
private final String riverIndexName;
private Client client;
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final RiversTypesRegistry typesRegistry;
private final Injector injector;
private final Map<RiverName, Injector> riversInjectors = Maps.newHashMap();
private volatile ImmutableMap<RiverName, River> rivers = ImmutableMap.of();
@Inject
public RiversService(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService, RiversTypesRegistry typesRegistry, RiverClusterService riverClusterService, Injector injector) {
super(settings);
this.riverIndexName = RiverIndexName.Conf.indexName(settings);
this.client = client;
this.threadPool = threadPool;
this.clusterService = clusterService;
this.typesRegistry = typesRegistry;
this.injector = injector;
riverClusterService.add(new ApplyRivers());
}
@Override
protected void doStart() throws ElasticSearchException {
}
@Override
protected void doStop() throws ElasticSearchException {
ImmutableSet<RiverName> indices = ImmutableSet.copyOf(this.rivers.keySet());
final CountDownLatch latch = new CountDownLatch(indices.size());
for (final RiverName riverName : indices) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
try {
closeRiver(riverName);
} catch (Exception e) {
logger.warn("failed to delete river on stop [{}]/[{}]", e, riverName.type(), riverName.name());
} finally {
latch.countDown();
}
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
// ignore
}
}
@Override
protected void doClose() throws ElasticSearchException {
}
public synchronized void createRiver(RiverName riverName, Map<String, Object> settings) throws ElasticSearchException {
if (riversInjectors.containsKey(riverName)) {
logger.warn("ignoring river [{}][{}] creation, already exists", riverName.type(), riverName.name());
return;
}
logger.debug("creating river [{}][{}]", riverName.type(), riverName.name());
try {
ModulesBuilder modules = new ModulesBuilder();
modules.add(new RiverNameModule(riverName));
modules.add(new RiverModule(riverName, settings, this.settings, typesRegistry));
modules.add(new RiversPluginsModule(this.settings, injector.getInstance(PluginsService.class)));
Injector indexInjector = modules.createChildInjector(injector);
riversInjectors.put(riverName, indexInjector);
River river = indexInjector.getInstance(River.class);
rivers = MapBuilder.newMapBuilder(rivers).put(riverName, river).immutableMap();
// we need this start so there can be operations done (like creating an index) which can't be
// done on create since Guice can't create two concurrent child injectors
river.start();
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
builder.field("ok", true);
builder.startObject("node");
builder.field("id", clusterService.localNode().id());
builder.field("name", clusterService.localNode().name());
builder.field("transport_address", clusterService.localNode().address().toString());
builder.endObject();
builder.endObject();
client.prepareIndex(riverIndexName, riverName.name(), "_status")
.setConsistencyLevel(WriteConsistencyLevel.ONE)
.setSource(builder).execute().actionGet();
} catch (Exception e) {
logger.warn("failed to create river [{}][{}]", e, riverName.type(), riverName.name());
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
builder.field("error", ExceptionsHelper.detailedMessage(e));
builder.startObject("node");
builder.field("id", clusterService.localNode().id());
builder.field("name", clusterService.localNode().name());
builder.field("transport_address", clusterService.localNode().address().toString());
builder.endObject();
builder.endObject();
client.prepareIndex(riverIndexName, riverName.name(), "_status")
.setConsistencyLevel(WriteConsistencyLevel.ONE)
.setSource(builder).execute().actionGet();
} catch (Exception e1) {
logger.warn("failed to write failed status for river creation", e);
}
}
}
public synchronized void closeRiver(RiverName riverName) throws ElasticSearchException {
Injector riverInjector;
River river;
synchronized (this) {
riverInjector = riversInjectors.remove(riverName);
if (riverInjector == null) {
throw new RiverException(riverName, "missing");
}
logger.debug("closing river [{}][{}]", riverName.type(), riverName.name());
Map<RiverName, River> tmpMap = Maps.newHashMap(rivers);
river = tmpMap.remove(riverName);
rivers = ImmutableMap.copyOf(tmpMap);
}
river.close();
Injectors.close(injector);
}
private class ApplyRivers implements RiverClusterStateListener {
@Override
public void riverClusterChanged(RiverClusterChangedEvent event) {
DiscoveryNode localNode = clusterService.localNode();
RiverClusterState state = event.state();
// first, go over and delete ones that either don't exists or are not allocated
for (final RiverName riverName : rivers.keySet()) {
RiverRouting routing = state.routing().routing(riverName);
if (routing == null || !localNode.equals(routing.node())) {
// not routed at all, and not allocated here, clean it (we delete the relevant ones before)
closeRiver(riverName);
// also, double check and delete the river content if it was deleted (_meta does not exists)
try {
client.prepareGet(riverIndexName, riverName.name(), "_meta").setListenerThreaded(true).execute(new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
if (!getResponse.isExists()) {
// verify the river is deleted
client.admin().indices().prepareDeleteMapping(riverIndexName).setType(riverName.name()).execute(new ActionListener<DeleteMappingResponse>() {
@Override
public void onResponse(DeleteMappingResponse deleteMappingResponse) {
// all is well...
}
@Override
public void onFailure(Throwable e) {
logger.debug("failed to (double) delete river [{}] content", e, riverName.name());
}
});
}
}
@Override
public void onFailure(Throwable e) {
logger.debug("failed to (double) delete river [{}] content", e, riverName.name());
}
});
} catch (IndexMissingException e) {
// all is well, the _river index was deleted
} catch (Exception e) {
logger.warn("unexpected failure when trying to verify river [{}] deleted", e, riverName.name());
}
}
}
for (final RiverRouting routing : state.routing()) {
// not allocated
if (routing.node() == null) {
continue;
}
// only apply changes to the local node
if (!routing.node().equals(localNode)) {
continue;
}
// if its already created, ignore it
if (rivers.containsKey(routing.riverName())) {
continue;
}
client.prepareGet(riverIndexName, routing.riverName().name(), "_meta").setListenerThreaded(true).execute(new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
if (!rivers.containsKey(routing.riverName())) {
if (getResponse.isExists()) {
// only create the river if it exists, otherwise, the indexing meta data has not been visible yet...
createRiver(routing.riverName(), getResponse.getSourceAsMap());
}
}
}
@Override
public void onFailure(Throwable e) {
// if its this is a failure that need to be retried, then do it
// this might happen if the state of the river index has not been propagated yet to this node, which
// should happen pretty fast since we managed to get the _meta in the RiversRouter
Throwable failure = ExceptionsHelper.unwrapCause(e);
if (isShardNotAvailableException(failure)) {
logger.debug("failed to get _meta from [{}]/[{}], retrying...", e, routing.riverName().type(), routing.riverName().name());
final ActionListener<GetResponse> listener = this;
threadPool.schedule(TimeValue.timeValueSeconds(5), ThreadPool.Names.SAME, new Runnable() {
@Override
public void run() {
client.prepareGet(riverIndexName, routing.riverName().name(), "_meta").setListenerThreaded(true).execute(listener);
}
});
} else {
logger.warn("failed to get _meta from [{}]/[{}]", e, routing.riverName().type(), routing.riverName().name());
}
}
});
}
}
}
}
| |
package org.bouncycastle.jce.provider;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigInteger;
import java.security.interfaces.ECPrivateKey;
import java.security.spec.ECParameterSpec;
import java.security.spec.ECPoint;
import java.security.spec.ECPrivateKeySpec;
import java.security.spec.EllipticCurve;
import java.util.Enumeration;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.asn1.ASN1Encoding;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERBitString;
import org.bouncycastle.asn1.DERInteger;
import org.bouncycastle.asn1.DERNull;
import org.bouncycastle.asn1.DERObjectIdentifier;
// BEGIN android-removed
// import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
// import org.bouncycastle.asn1.cryptopro.ECGOST3410NamedCurves;
// END android-removed
import org.bouncycastle.asn1.pkcs.PrivateKeyInfo;
import org.bouncycastle.asn1.sec.ECPrivateKeyStructure;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
import org.bouncycastle.asn1.x9.X962Parameters;
import org.bouncycastle.asn1.x9.X9ECParameters;
import org.bouncycastle.asn1.x9.X9ObjectIdentifiers;
import org.bouncycastle.crypto.params.ECDomainParameters;
import org.bouncycastle.crypto.params.ECPrivateKeyParameters;
import org.bouncycastle.jcajce.provider.asymmetric.util.EC5Util;
import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil;
import org.bouncycastle.jcajce.provider.asymmetric.util.PKCS12BagAttributeCarrierImpl;
import org.bouncycastle.jce.interfaces.ECPointEncoder;
import org.bouncycastle.jce.interfaces.PKCS12BagAttributeCarrier;
import org.bouncycastle.jce.spec.ECNamedCurveSpec;
import org.bouncycastle.math.ec.ECCurve;
public class JCEECPrivateKey
implements ECPrivateKey, org.bouncycastle.jce.interfaces.ECPrivateKey, PKCS12BagAttributeCarrier, ECPointEncoder
{
private String algorithm = "EC";
private BigInteger d;
private ECParameterSpec ecSpec;
private boolean withCompression;
private DERBitString publicKey;
private PKCS12BagAttributeCarrierImpl attrCarrier = new PKCS12BagAttributeCarrierImpl();
protected JCEECPrivateKey()
{
}
public JCEECPrivateKey(
ECPrivateKey key)
{
this.d = key.getS();
this.algorithm = key.getAlgorithm();
this.ecSpec = key.getParams();
}
public JCEECPrivateKey(
String algorithm,
org.bouncycastle.jce.spec.ECPrivateKeySpec spec)
{
this.algorithm = algorithm;
this.d = spec.getD();
if (spec.getParams() != null) // can be null if implicitlyCA
{
ECCurve curve = spec.getParams().getCurve();
EllipticCurve ellipticCurve;
ellipticCurve = EC5Util.convertCurve(curve, spec.getParams().getSeed());
this.ecSpec = EC5Util.convertSpec(ellipticCurve, spec.getParams());
}
else
{
this.ecSpec = null;
}
}
public JCEECPrivateKey(
String algorithm,
ECPrivateKeySpec spec)
{
this.algorithm = algorithm;
this.d = spec.getS();
this.ecSpec = spec.getParams();
}
public JCEECPrivateKey(
String algorithm,
JCEECPrivateKey key)
{
this.algorithm = algorithm;
this.d = key.d;
this.ecSpec = key.ecSpec;
this.withCompression = key.withCompression;
this.attrCarrier = key.attrCarrier;
this.publicKey = key.publicKey;
}
public JCEECPrivateKey(
String algorithm,
ECPrivateKeyParameters params,
JCEECPublicKey pubKey,
ECParameterSpec spec)
{
ECDomainParameters dp = params.getParameters();
this.algorithm = algorithm;
this.d = params.getD();
if (spec == null)
{
EllipticCurve ellipticCurve = EC5Util.convertCurve(dp.getCurve(), dp.getSeed());
this.ecSpec = new ECParameterSpec(
ellipticCurve,
new ECPoint(
dp.getG().getX().toBigInteger(),
dp.getG().getY().toBigInteger()),
dp.getN(),
dp.getH().intValue());
}
else
{
this.ecSpec = spec;
}
publicKey = getPublicKeyDetails(pubKey);
}
public JCEECPrivateKey(
String algorithm,
ECPrivateKeyParameters params,
JCEECPublicKey pubKey,
org.bouncycastle.jce.spec.ECParameterSpec spec)
{
ECDomainParameters dp = params.getParameters();
this.algorithm = algorithm;
this.d = params.getD();
if (spec == null)
{
EllipticCurve ellipticCurve = EC5Util.convertCurve(dp.getCurve(), dp.getSeed());
this.ecSpec = new ECParameterSpec(
ellipticCurve,
new ECPoint(
dp.getG().getX().toBigInteger(),
dp.getG().getY().toBigInteger()),
dp.getN(),
dp.getH().intValue());
}
else
{
EllipticCurve ellipticCurve = EC5Util.convertCurve(spec.getCurve(), spec.getSeed());
this.ecSpec = new ECParameterSpec(
ellipticCurve,
new ECPoint(
spec.getG().getX().toBigInteger(),
spec.getG().getY().toBigInteger()),
spec.getN(),
spec.getH().intValue());
}
publicKey = getPublicKeyDetails(pubKey);
}
public JCEECPrivateKey(
String algorithm,
ECPrivateKeyParameters params)
{
this.algorithm = algorithm;
this.d = params.getD();
this.ecSpec = null;
}
JCEECPrivateKey(
PrivateKeyInfo info)
throws IOException
{
populateFromPrivKeyInfo(info);
}
private void populateFromPrivKeyInfo(PrivateKeyInfo info)
throws IOException
{
X962Parameters params = new X962Parameters((ASN1Primitive)info.getPrivateKeyAlgorithm().getParameters());
if (params.isNamedCurve())
{
ASN1ObjectIdentifier oid = ASN1ObjectIdentifier.getInstance(params.getParameters());
X9ECParameters ecP = ECUtil.getNamedCurveByOid(oid);
// BEGIN android-removed
// if (ecP == null) // GOST Curve
// {
// ECDomainParameters gParam = ECGOST3410NamedCurves.getByOID(oid);
// EllipticCurve ellipticCurve = EC5Util.convertCurve(gParam.getCurve(), gParam.getSeed());
//
// ecSpec = new ECNamedCurveSpec(
// ECGOST3410NamedCurves.getName(oid),
// ellipticCurve,
// new ECPoint(
// gParam.getG().getX().toBigInteger(),
// gParam.getG().getY().toBigInteger()),
// gParam.getN(),
// gParam.getH());
// }
// else
// END android-removed
{
EllipticCurve ellipticCurve = EC5Util.convertCurve(ecP.getCurve(), ecP.getSeed());
ecSpec = new ECNamedCurveSpec(
ECUtil.getCurveName(oid),
ellipticCurve,
new ECPoint(
ecP.getG().getX().toBigInteger(),
ecP.getG().getY().toBigInteger()),
ecP.getN(),
ecP.getH());
}
}
else if (params.isImplicitlyCA())
{
ecSpec = null;
}
else
{
X9ECParameters ecP = X9ECParameters.getInstance(params.getParameters());
EllipticCurve ellipticCurve = EC5Util.convertCurve(ecP.getCurve(), ecP.getSeed());
this.ecSpec = new ECParameterSpec(
ellipticCurve,
new ECPoint(
ecP.getG().getX().toBigInteger(),
ecP.getG().getY().toBigInteger()),
ecP.getN(),
ecP.getH().intValue());
}
ASN1Encodable privKey = info.parsePrivateKey();
if (privKey instanceof DERInteger)
{
DERInteger derD = DERInteger.getInstance(privKey);
this.d = derD.getValue();
}
else
{
ECPrivateKeyStructure ec = new ECPrivateKeyStructure((ASN1Sequence)privKey);
this.d = ec.getKey();
this.publicKey = ec.getPublicKey();
}
}
public String getAlgorithm()
{
return algorithm;
}
/**
* return the encoding format we produce in getEncoded().
*
* @return the string "PKCS#8"
*/
public String getFormat()
{
return "PKCS#8";
}
/**
* Return a PKCS8 representation of the key. The sequence returned
* represents a full PrivateKeyInfo object.
*
* @return a PKCS8 representation of the key.
*/
public byte[] getEncoded()
{
X962Parameters params;
if (ecSpec instanceof ECNamedCurveSpec)
{
DERObjectIdentifier curveOid = ECUtil.getNamedCurveOid(((ECNamedCurveSpec)ecSpec).getName());
if (curveOid == null) // guess it's the OID
{
curveOid = new DERObjectIdentifier(((ECNamedCurveSpec)ecSpec).getName());
}
params = new X962Parameters(curveOid);
}
else if (ecSpec == null)
{
params = new X962Parameters(DERNull.INSTANCE);
}
else
{
ECCurve curve = EC5Util.convertCurve(ecSpec.getCurve());
X9ECParameters ecP = new X9ECParameters(
curve,
EC5Util.convertPoint(curve, ecSpec.getGenerator(), withCompression),
ecSpec.getOrder(),
BigInteger.valueOf(ecSpec.getCofactor()),
ecSpec.getCurve().getSeed());
params = new X962Parameters(ecP);
}
PrivateKeyInfo info;
ECPrivateKeyStructure keyStructure;
if (publicKey != null)
{
keyStructure = new ECPrivateKeyStructure(this.getS(), publicKey, params);
}
else
{
keyStructure = new ECPrivateKeyStructure(this.getS(), params);
}
try
{
// BEGIN android-removed
// if (algorithm.equals("ECGOST3410"))
// {
// info = new PrivateKeyInfo(new AlgorithmIdentifier(CryptoProObjectIdentifiers.gostR3410_2001, params.toASN1Primitive()), keyStructure.toASN1Primitive());
// }
// else
// END android-removed
{
info = new PrivateKeyInfo(new AlgorithmIdentifier(X9ObjectIdentifiers.id_ecPublicKey, params.toASN1Primitive()), keyStructure.toASN1Primitive());
}
return info.getEncoded(ASN1Encoding.DER);
}
catch (IOException e)
{
return null;
}
}
public ECParameterSpec getParams()
{
return ecSpec;
}
public org.bouncycastle.jce.spec.ECParameterSpec getParameters()
{
if (ecSpec == null)
{
return null;
}
return EC5Util.convertSpec(ecSpec, withCompression);
}
org.bouncycastle.jce.spec.ECParameterSpec engineGetSpec()
{
if (ecSpec != null)
{
return EC5Util.convertSpec(ecSpec, withCompression);
}
return BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa();
}
public BigInteger getS()
{
return d;
}
public BigInteger getD()
{
return d;
}
public void setBagAttribute(
ASN1ObjectIdentifier oid,
ASN1Encodable attribute)
{
attrCarrier.setBagAttribute(oid, attribute);
}
public ASN1Encodable getBagAttribute(
ASN1ObjectIdentifier oid)
{
return attrCarrier.getBagAttribute(oid);
}
public Enumeration getBagAttributeKeys()
{
return attrCarrier.getBagAttributeKeys();
}
public void setPointFormat(String style)
{
withCompression = !("UNCOMPRESSED".equalsIgnoreCase(style));
}
public boolean equals(Object o)
{
if (!(o instanceof JCEECPrivateKey))
{
return false;
}
JCEECPrivateKey other = (JCEECPrivateKey)o;
return getD().equals(other.getD()) && (engineGetSpec().equals(other.engineGetSpec()));
}
public int hashCode()
{
return getD().hashCode() ^ engineGetSpec().hashCode();
}
public String toString()
{
StringBuffer buf = new StringBuffer();
String nl = System.getProperty("line.separator");
buf.append("EC Private Key").append(nl);
buf.append(" S: ").append(this.d.toString(16)).append(nl);
return buf.toString();
}
private DERBitString getPublicKeyDetails(JCEECPublicKey pub)
{
try
{
SubjectPublicKeyInfo info = SubjectPublicKeyInfo.getInstance(ASN1Primitive.fromByteArray(pub.getEncoded()));
return info.getPublicKeyData();
}
catch (IOException e)
{ // should never happen
return null;
}
}
private void readObject(
ObjectInputStream in)
throws IOException, ClassNotFoundException
{
byte[] enc = (byte[])in.readObject();
populateFromPrivKeyInfo(PrivateKeyInfo.getInstance(ASN1Primitive.fromByteArray(enc)));
this.algorithm = (String)in.readObject();
this.withCompression = in.readBoolean();
this.attrCarrier = new PKCS12BagAttributeCarrierImpl();
attrCarrier.readObject(in);
}
private void writeObject(
ObjectOutputStream out)
throws IOException
{
out.writeObject(this.getEncoded());
out.writeObject(algorithm);
out.writeBoolean(withCompression);
attrCarrier.writeObject(out);
}
}
| |
/**
* Copyright (c) 2013-2021 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.SortedSet;
import org.redisson.client.RedisConnection;
import org.redisson.connection.ConnectionManager;
/**
*
* @author Nikita Koksharov
*
* @param <V>
*/
// TODO compare tail or head value with current in case of absence this value
class RedissonSubSortedSet<V> implements SortedSet<V> {
private ConnectionManager connectionManager;
private RedissonSortedSet<V> redissonSortedSet;
private V headValue;
private V tailValue;
RedissonSubSortedSet(RedissonSortedSet<V> redissonSortedSet, ConnectionManager connectionManager, V headValue, V tailValue) {
super();
this.headValue = headValue;
this.tailValue = tailValue;
this.connectionManager = connectionManager;
this.redissonSortedSet = redissonSortedSet;
}
@Override
public int size() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp(-1);
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
//
// return connection.zcount(redissonSortedSet.getName(), headScore, tailScore).intValue();
// } finally {
// connectionManager.releaseRead(-1, connection);
// }
}
private int getTailScore(RedisConnection connection) {
throw new UnsupportedOperationException();
// BinarySearchResult<V> res = redissonSortedSet.binarySearch(tailValue, connection);
// if (res.getIndex() < 0) {
// return 0;
// }
// return res.getIndex();
}
private int getHeadScore(RedisConnection connection) {
throw new UnsupportedOperationException();
// BinarySearchResult<V> res = redissonSortedSet.binarySearch(headValue, connection);
// if (res.getIndex() < 0) {
// return 0;
// }
// return res.getIndex();
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp(-1);
// try {
// int headScore = getHeadScore(connection);
// int tailScore = getTailScore(connection);
//
// BinarySearchResult<V> res = redissonSortedSet.binarySearch((V)o, connection);
// return res.getIndex() < tailScore && res.getIndex() > headScore;
// } finally {
// connectionManager.releaseRead(-1, connection);
// }
}
@Override
public Iterator<V> iterator() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp();
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
// return redissonSortedSet.iterator(headScore, tailScore);
// } finally {
// connectionManager.releaseRead(connection);
// }
}
@Override
public Object[] toArray() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp(-1);
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
// return connection.zrangebyscore(redissonSortedSet.getName(), headScore, tailScore).toArray();
// } finally {
// connectionManager.releaseRead(-1, connection);
// }
}
@Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp(-1);
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
// return connection.zrangebyscore(redissonSortedSet.getName(), headScore, tailScore).toArray(a);
// } finally {
// connectionManager.releaseRead(-1, connection);
// }
}
@Override
public boolean add(V e) {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionWriteOp();
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
//
// BinarySearchResult<V> res = redissonSortedSet.binarySearch(e, connection);
// if (res.getScore() == null) {
// NewScore score = redissonSortedSet.calcNewScore(res.getIndex(), connection);
// if (score.getScore() < tailScore && score.getScore() > headScore) {
// return redissonSortedSet.add(e, connection);
// } else {
// throw new IllegalArgumentException("value out of range");
// }
// }
// return false;
// } finally {
// connectionManager.releaseWrite(connection);
// }
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionWriteOp();
// try {
// double headScore = getHeadScore(connection);
// double tailScore = getTailScore(connection);
//
// BinarySearchResult<V> res = redissonSortedSet.binarySearch((V)o, connection);
// if (res.getScore() != null && res.getScore() < tailScore && res.getScore() > headScore) {
// return redissonSortedSet.remove(o, connection);
// }
// return false;
// } finally {
// connectionManager.releaseWrite(connection);
// }
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object object : c) {
if (!contains(object)) {
return false;
}
}
return true;
}
@Override
public boolean addAll(Collection<? extends V> c) {
boolean changed = false;
for (V v : c) {
if (add(v)) {
changed = true;
}
}
return changed;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean changed = false;
for (Object object : this) {
if (!c.contains(object)) {
remove(object);
changed = true;
}
}
return changed;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean changed = false;
for (Object obj : c) {
if (remove(obj)) {
changed = true;
}
}
return changed;
}
@Override
public void clear() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionWriteOp();
// try {
// // TODO sync
// int headScore = getHeadScore(connection);
// int tailScore = getTailScore(connection);
// connection.ltrim(redissonSortedSet.getName(), 0, index - 1);
// connection.zremrangebyscore(redissonSortedSet.getName(), headScore, tailScore);
// } finally {
// connectionManager.releaseWrite(connection);
// }
}
@Override
public Comparator<? super V> comparator() {
return redissonSortedSet.comparator();
}
@Override
public SortedSet<V> subSet(V fromElement, V toElement) {
throw new UnsupportedOperationException();
// TODO check bounds
// if (fromElement == null) {
// fromElement = headValue;
// }
// if (toElement == null) {
// toElement = tailValue;
// }
// return new RedissonSubSortedSet<V>(redissonSortedSet, connectionManager, fromElement, toElement);
}
@Override
public SortedSet<V> headSet(V toElement) {
return subSet(null, toElement);
}
@Override
public SortedSet<V> tailSet(V fromElement) {
return subSet(fromElement, null);
}
@Override
public V first() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp();
// try {
// // TODO compare first value with headValue
// if (headValue != null) {
// BinarySearchResult<V> res = redissonSortedSet.binarySearch(headValue, connection);
// if (res.getIndex() < 0) {
// NewScore headScore = redissonSortedSet.calcNewScore(res.getIndex(), connection);
// double tailScore = getTailScore(connection);
// List<V> vals = connection.zrangebyscore(redissonSortedSet.getName(), headScore.getScore(), tailScore);
// if (vals.isEmpty()) {
// throw new NoSuchElementException();
// }
// return vals.get(0);
// }
// return res.getValue();
// }
// return redissonSortedSet.first();
// } finally {
// connectionManager.releaseRead(connection);
// }
}
@Override
public V last() {
throw new UnsupportedOperationException();
// RedisConnection<Object, V> connection = connectionManager.connectionReadOp();
// try {
// // TODO compare last value with headValue
// if (tailValue != null) {
// BinarySearchResult<V> res = redissonSortedSet.binarySearch(tailValue, connection);
// if (res.getIndex() < 0) {
// connection.lrange(redissonSortedSet.getName(), index + 1, size());
// NewScore tailScore = redissonSortedSet.calcNewScore(res.getIndex(), connection);
// double headScore = getHeadScore(connection);
// List<V> vals = connection.zrangebyscore(redissonSortedSet.getName(), headScore, tailScore.getScore());
// if (vals.isEmpty()) {
// throw new NoSuchElementException();
// }
// return vals.get(0);
// }
// return res.getValue();
// }
// return redissonSortedSet.last();
// } finally {
// connectionManager.releaseRead(connection);
// }
}
@SuppressWarnings("AvoidInlineConditionals")
@Override
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.query.lucene;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.NumberFormat;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.collections.map.LRUMap;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FilterIndexReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ReaderUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements an <code>IndexReader</code> that maintains caches to resolve
* {@link #getParent(int, BitSet)} calls efficiently.
* <p>
*/
class CachingIndexReader extends FilterIndexReader {
/**
* The logger instance for this class.
*/
private static final Logger log = LoggerFactory.getLogger(CachingIndexReader.class);
/**
* The number of nodes that are processed in a batch when the hierarchy
* cache is initialized. The value is 400'000, which will limit the
* temporary memory usage to initialize the hierarchy cache of a segment
* to 64MB (-> 170B * 400k)
*/
private static final int MAX_CACHE_INIT_BATCH_SIZE = 400 * 1000;
/**
* The current value of the global creation tick counter.
*/
private static long currentTick;
/**
* BitSet where bits that correspond to document numbers are set for
* shareable nodes.
*/
private final BitSet shareableNodes;
/**
* Cache of nodes parent relation. If an entry in the array is >= 0,
* then that means the node with the document number = array-index has the
* node with the value at that position as parent.
*/
private final int[] inSegmentParents;
/**
* Cache of nodes parent relation that point to a foreign index segment.
*/
private final Map<Integer, DocId> foreignParentDocIds = new ConcurrentHashMap<Integer, DocId>();
/**
* Initializes the {@link #inSegmentParents} and {@link #foreignParentDocIds}
* caches.
*/
private final CacheInitializer cacheInitializer;
/**
* Tick when this index reader was created.
*/
private final long creationTick = getNextCreationTick();
/**
* Document number cache if available. May be <code>null</code>.
*/
private final DocNumberCache cache;
/**
* Maps document number to node id.
*/
private final Map<Integer, NodeId> docNumber2id;
/**
* A cache of TermDocs that are regularly read from the index.
*/
private final TermDocsCache termDocsCache;
/**
* Creates a new <code>CachingIndexReader</code> based on
* <code>delegatee</code>
*
* @param delegatee the base <code>IndexReader</code>.
* @param cache a document number cache, or <code>null</code> if not
* available to this reader.
* @param initCache if the parent caches should be initialized
* when this index reader is constructed.
* @throws IOException if an error occurs while reading from the index.
*/
@SuppressWarnings("unchecked")
CachingIndexReader(IndexReader delegatee,
DocNumberCache cache,
boolean initCache)
throws IOException {
super(delegatee);
this.cache = cache;
this.inSegmentParents = new int[delegatee.maxDoc()];
Arrays.fill(this.inSegmentParents, -1);
this.shareableNodes = initShareableNodes(delegatee);
this.cacheInitializer = new CacheInitializer(delegatee);
if (initCache) {
cacheInitializer.run();
}
// limit cache to 1% of maxDoc(), but at least 10.
this.docNumber2id = Collections.synchronizedMap(
new LRUMap(Math.max(10, delegatee.maxDoc() / 100)));
this.termDocsCache = new TermDocsCache(delegatee, FieldNames.PROPERTIES);
}
private BitSet initShareableNodes(IndexReader delegatee) throws IOException {
BitSet shareableNodes = new BitSet();
TermDocs tDocs = delegatee.termDocs(new Term(FieldNames.SHAREABLE_NODE,
""));
try {
while (tDocs.next()) {
shareableNodes.set(tDocs.doc());
}
} finally {
tDocs.close();
}
return shareableNodes;
}
/**
* Returns the <code>DocId</code> of the parent of <code>n</code> or
* {@link DocId#NULL} if <code>n</code> does not have a parent
* (<code>n</code> is the root node).
*
* @param n the document number.
* @param deleted the documents that should be regarded as deleted.
* @return the <code>DocId</code> of <code>n</code>'s parent.
* @throws IOException if an error occurs while reading from the index.
*/
DocId getParent(int n, BitSet deleted) throws IOException {
DocId parent;
boolean existing = false;
int parentDocNum = inSegmentParents[n];
if (parentDocNum != -1) {
parent = DocId.create(parentDocNum);
} else {
parent = foreignParentDocIds.get(n);
}
if (parent != null) {
existing = true;
// check if valid and reset if necessary
if (!parent.isValid(deleted)) {
if (log.isDebugEnabled()) {
log.debug(parent + " not valid anymore.");
}
parent = null;
}
}
if (parent == null) {
int plainDocId = -1;
Document doc = document(n, FieldSelectors.UUID_AND_PARENT);
String[] parentUUIDs = doc.getValues(FieldNames.PARENT);
if (parentUUIDs.length == 0 || parentUUIDs[0].length() == 0) {
// root node
parent = DocId.NULL;
} else {
if (shareableNodes.get(n)) {
parent = DocId.create(parentUUIDs);
} else {
if (!existing) {
Term id = TermFactory.createUUIDTerm(parentUUIDs[0]);
TermDocs docs = termDocs(id);
try {
while (docs.next()) {
if (!deleted.get(docs.doc())) {
plainDocId = docs.doc();
parent = DocId.create(plainDocId);
break;
}
}
} finally {
docs.close();
}
}
// if still null, then parent is not in this index, or existing
// DocId was invalid. thus, only allowed to create DocId from uuid
if (parent == null) {
parent = DocId.create(parentUUIDs[0]);
}
}
}
// finally put to cache
if (plainDocId != -1) {
// PlainDocId
inSegmentParents[n] = plainDocId;
} else {
// UUIDDocId
foreignParentDocIds.put(n, parent);
if (existing) {
// there was an existing parent reference in
// inSegmentParents, which was invalid and is replaced
// with a UUIDDocId (points to a foreign segment).
// mark as unknown
inSegmentParents[n] = -1;
}
}
}
return parent;
}
/**
* Returns the tick value when this reader was created.
*
* @return the creation tick for this reader.
*/
public long getCreationTick() {
return creationTick;
}
//--------------------< FilterIndexReader overwrites >----------------------
@Override
public IndexReader[] getSequentialSubReaders() {
return null;
}
@Override
public FieldInfos getFieldInfos() {
return ReaderUtil.getMergedFieldInfos(in);
}
/**
* Uses the {@link #docNumber2id} cache for document lookups that are only
* interested in the {@link FieldSelectors#UUID}.
*
* @param n the document number.
* @param fieldSelector the field selector.
* @return the document.
* @throws CorruptIndexException if the index is corrupt.
* @throws IOException if an error occurs while reading from the index.
*/
public Document document(int n, FieldSelector fieldSelector)
throws CorruptIndexException, IOException {
if (fieldSelector == FieldSelectors.UUID) {
Document doc;
NodeId id = docNumber2id.get(n);
if (id == null) {
doc = super.document(n, fieldSelector);
id = new NodeId(doc.get(FieldNames.UUID));
docNumber2id.put(n, id);
} else {
doc = new Document();
doc.add(new IDField(id));
}
return doc;
} else {
return super.document(n, fieldSelector);
}
}
/**
* If the field of <code>term</code> is {@link FieldNames#UUID} this
* <code>CachingIndexReader</code> returns a <code>TermDocs</code> instance
* with a cached document id. If <code>term</code> has any other field
* the call is delegated to the base <code>IndexReader</code>.<br/>
* If <code>term</code> is for a {@link FieldNames#UUID} field and this
* <code>CachingIndexReader</code> does not have such a document,
* {@link EmptyTermDocs#INSTANCE} is returned.
*
* @param term the term to start the <code>TermDocs</code> enumeration.
* @return a TermDocs instance.
* @throws IOException if an error occurs while reading from the index.
*/
public TermDocs termDocs(Term term) throws IOException {
if (term != null && term.field() == FieldNames.UUID) {
// check cache if we have one
if (cache != null) {
DocNumberCache.Entry e = cache.get(term.text());
if (e != null) {
// check if valid
// the cache may contain entries from a different reader
// with the same uuid. that happens when a node is updated
// and is reindexed. the node 'travels' from an older index
// to a newer one. the cache will still contain a cache
// entry from the old until it is overwritten by the
// newer index.
if (e.creationTick == creationTick && !isDeleted(e.doc)) {
return new SingleTermDocs(e.doc);
}
}
// not in cache or invalid
TermDocs docs = in.termDocs(term);
try {
if (docs.next()) {
// put to cache
cache.put(term.text(), this, docs.doc());
// and return
return new SingleTermDocs(docs.doc());
} else {
return EmptyTermDocs.INSTANCE;
}
} finally {
docs.close();
}
}
}
return termDocsCache.termDocs(term);
}
/**
* {@inheritDoc}
*/
protected void doClose() throws IOException {
try {
cacheInitializer.waitUntilStopped();
} catch (InterruptedException e) {
// ignore
}
super.doClose();
}
//----------------------< internal >----------------------------------------
/**
* Returns the next creation tick value.
*
* @return the next creation tick value.
*/
private static long getNextCreationTick() {
synchronized (CachingIndexReader.class) {
return currentTick++;
}
}
/**
* Initializes the {@link CachingIndexReader#inSegmentParents} and
* {@link CachingIndexReader#foreignParentDocIds} caches.
*/
private class CacheInitializer implements Runnable {
/**
* The {@link #inSegmentParents} is persisted using this filename.
*/
private static final String FILE_CACHE_NAME_ARRAY = "cache.inSegmentParents";
/**
* From where to read.
*/
private final IndexReader reader;
/**
* Set to <code>true</code> while this initializer does its work.
*/
private boolean running = false;
/**
* Set to <code>true</code> when this index reader is about to be closed.
*/
private volatile boolean stopRequested = false;
/**
* Creates a new initializer with the given <code>reader</code>.
* @param reader
* an index reader.
*/
public CacheInitializer(IndexReader reader) {
this.reader = reader;
}
/**
* Initializes the cache.
*/
public void run() {
synchronized (this) {
running = true;
}
try {
if (stopRequested) {
// immediately return when stop is requested
return;
}
boolean initCacheFromFile = loadCacheFromFile();
if (!initCacheFromFile) {
// file-based cache is not available, load from the
// repository
log.debug("persisted cache is not available, will load directly from the repository.");
initializeParents(reader);
}
} catch (Exception e) {
// only log warn message during regular operation
if (!stopRequested) {
log.warn("Error initializing parents cache.", e);
}
} finally {
synchronized (this) {
running = false;
notifyAll();
}
}
}
/**
* Waits until this cache initializer is stopped.
*
* @throws InterruptedException if the current thread is interrupted.
*/
public void waitUntilStopped() throws InterruptedException {
stopRequested = true;
synchronized (this) {
while (running) {
wait();
}
}
}
/**
* Initializes the {@link CachingIndexReader#inSegmentParents} and
* {@link CachingIndexReader#foreignParentDocIds} caches.
*
* @param reader the underlying index reader.
* @throws IOException if an error occurs while reading from the index.
*/
private void initializeParents(IndexReader reader) throws IOException {
double foreignParents = 0;
long time = System.currentTimeMillis();
// initialize in multiple passes with
// a fixed number of nodes at a time
final Term[] startUUID = new Term[]{TermFactory.createUUIDTerm("")};
for (;;) {
final Map<Object, NodeInfo> docs = new HashMap<Object, NodeInfo>();
final Map<NodeId, Integer> parents = new HashMap<NodeId, Integer>();
if (startUUID[0].text().length() != 0) {
// force reading the next uuid after startUUID
startUUID[0] = TermFactory.createUUIDTerm(startUUID[0].text() + "_");
}
// read UUIDs
collectTermDocs(reader, startUUID[0], new TermDocsCollector() {
public boolean collect(Term term, TermDocs tDocs) throws IOException {
// remember start term for next batch
startUUID[0] = term;
if (docs.size() >= MAX_CACHE_INIT_BATCH_SIZE) {
return false;
}
NodeId id = new NodeId(term.text());
while (tDocs.next()) {
int doc = tDocs.doc();
// skip shareable nodes
if (!shareableNodes.get(doc)) {
NodeInfo info = new NodeInfo(doc, id);
docs.put(doc, info);
}
}
return true;
}
});
if (docs.isEmpty()) {
// no more nodes to initialize, persist cache to file
saveCacheToFile();
break;
}
// read PARENTs (full scan)
collectTermDocs(reader, new Term(FieldNames.PARENT, "0"), new TermDocsCollector() {
public boolean collect(Term term, TermDocs tDocs) throws IOException {
NodeId id = new NodeId(term.text());
while (tDocs.next()) {
Integer docId = tDocs.doc();
NodeInfo info = docs.get(docId);
if (info == null) {
// shareable node, see above
// or cache init is batched
} else {
info.parent = id;
docs.remove(docId);
docs.put(info.id, info);
parents.put(id, null);
}
}
return true;
}
});
// scan UUIDs again to get document numbers for parents
collectTermDocs(reader, TermFactory.createUUIDTerm(""), new TermDocsCollector() {
public boolean collect(Term term, TermDocs tDocs) throws IOException {
NodeId id = new NodeId(term.text());
while (tDocs.next()) {
int doc = tDocs.doc();
if (parents.containsKey(id)) {
parents.put(id, doc);
}
}
return true;
}
});
if (stopRequested) {
return;
}
for (NodeInfo info : docs.values()) {
int parentDocId = -1;
NodeInfo parent = docs.get(info.parent);
if (parent != null) {
parentDocId = parent.docId;
} else {
Integer docId = parents.get(info.parent);
if (docId != null) {
parentDocId = docId;
}
}
if (parentDocId != -1) {
inSegmentParents[info.docId] = parentDocId;
} else if (info.parent != null) {
foreignParents++;
foreignParentDocIds.put(info.docId, DocId.create(info.parent));
} else if (shareableNodes.get(info.docId)) {
Document doc = reader.document(info.docId, FieldSelectors.UUID_AND_PARENT);
foreignParentDocIds.put(info.docId, DocId.create(doc.getValues(FieldNames.PARENT)));
} else {
// no parent -> root node
foreignParentDocIds.put(info.docId, DocId.NULL);
}
}
}
if (log.isDebugEnabled()) {
NumberFormat nf = NumberFormat.getPercentInstance();
nf.setMaximumFractionDigits(1);
time = System.currentTimeMillis() - time;
if (inSegmentParents.length > 0) {
foreignParents /= inSegmentParents.length;
}
log.debug("initialized {} DocIds in {} ms, {} foreign parents",
new Object[]{
inSegmentParents.length,
time,
nf.format(foreignParents)
});
}
}
/**
* Collects term docs for a given start term. All terms with the same
* field as <code>start</code> are enumerated.
*
* @param reader the index reader.
* @param start the term where to start the term enumeration.
* @param collector collects the term docs for each term.
* @throws IOException if an error occurs while reading from the index.
*/
private void collectTermDocs(IndexReader reader,
Term start,
TermDocsCollector collector)
throws IOException {
TermDocs tDocs = reader.termDocs();
try {
TermEnum terms = reader.terms(start);
try {
int count = 0;
do {
Term t = terms.term();
if (t != null && t.field() == start.field()) {
tDocs.seek(terms);
if (!collector.collect(t, tDocs)) {
// collector indicated break
break;
}
} else {
break;
}
// once in a while check if we should quit
if (++count % 10000 == 0) {
if (stopRequested) {
break;
}
}
} while (terms.next());
} finally {
terms.close();
}
} finally {
tDocs.close();
}
}
/**
* Persists the cache info {@link #inSegmentParents} to a file:
* {@link #FILE_CACHE_NAME_ARRAY}, for faster init times on startup.
*
* see https://issues.apache.org/jira/browse/JCR-3107
*/
public void saveCacheToFile() throws IOException {
IndexOutput io = null;
try {
io = reader.directory().createOutput(FILE_CACHE_NAME_ARRAY);
for (int parent : inSegmentParents) {
io.writeInt(parent);
}
} catch (Exception e) {
log.error(
"Error saving " + FILE_CACHE_NAME_ARRAY + ": "
+ e.getMessage(), e);
} finally {
if (io != null) {
io.close();
}
}
}
/**
* Loads the cache info {@link #inSegmentParents} from the file
* {@link #FILE_CACHE_NAME_ARRAY}.
*
* see https://issues.apache.org/jira/browse/JCR-3107
*
* @return true if the cache has been initialized of false if the cache
* file does not exist yet, or an error happened
*/
private boolean loadCacheFromFile() throws IOException {
IndexInput ii = null;
try {
long time = System.currentTimeMillis();
ii = reader.directory().openInput(FILE_CACHE_NAME_ARRAY);
for (int i = 0; i < inSegmentParents.length; i++) {
inSegmentParents[i] = ii.readInt();
}
log.debug(
"persisted cache initialized {} DocIds in {} ms",
new Object[] { inSegmentParents.length,
System.currentTimeMillis() - time });
return true;
} catch (FileNotFoundException ignore) {
// expected in the case where the file-based cache has not been
// initialized yet
} catch (IOException ignore) {
log.warn(
"Saved state of CachingIndexReader is corrupt, will try to remove offending file "
+ FILE_CACHE_NAME_ARRAY, ignore);
// In the case where is a read error, the cache file is removed
// so it can be recreated after
// the cache loads the data from the repository directly
reader.directory().deleteFile(FILE_CACHE_NAME_ARRAY);
} finally {
if (ii != null) {
ii.close();
}
}
return false;
}
}
/**
* Simple interface to collect a term and its term docs.
*/
private interface TermDocsCollector {
/**
* Called for each term encountered.
*
* @param term the term.
* @param tDocs the term docs of <code>term</code>.
* @return false if the collector does not wish to collect more TermDocs.
* @throws IOException if an error occurs while reading from the index.
*/
boolean collect(Term term, TermDocs tDocs) throws IOException;
}
private final static class NodeInfo {
final int docId;
final NodeId id;
NodeId parent;
public NodeInfo(int docId, NodeId id) {
this.docId = docId;
this.id = id;
}
}
}
| |
package org.csap.agent.services ;
import java.util.Arrays ;
import java.util.List ;
import java.util.regex.Matcher ;
import java.util.stream.Collectors ;
import org.csap.agent.CsapConstants ;
import org.springframework.boot.context.properties.ConfigurationProperties ;
@ConfigurationProperties ( CsapConstants.CONFIGURATION_PREFIX + ".os-commands" )
public class OsCommands {
private static List<String> NOT_INITIALIZED = Arrays.asList( "Not", "Initialized" ) ;
private List<String> processStatus = NOT_INITIALIZED ;
private List<String> systemProcessMetrics = NOT_INITIALIZED ;
private List<String> systemNetworkDevices = NOT_INITIALIZED ;
private List<String> systemNetworkPorts = NOT_INITIALIZED ;
private List<String> systemNetworkListenPorts = NOT_INITIALIZED ;
private List<String> systemDiskWithRateOnly = NOT_INITIALIZED ;
private List<String> systemDiskWithUtilization = NOT_INITIALIZED ;
private List<String> criPs = NOT_INITIALIZED ;
private List<String> criPidReport = NOT_INITIALIZED ;
private List<String> criInspect = NOT_INITIALIZED ;
private List<String> diskUsageSystem = NOT_INITIALIZED ;
private List<String> diskUsageAbout = NOT_INITIALIZED ;
private List<String> diskUsageCsap = NOT_INITIALIZED ;
private List<String> systemNetworkStats = NOT_INITIALIZED ;
private List<String> systemPackages = NOT_INITIALIZED ;
private List<String> systemPackageDetails = NOT_INITIALIZED ;
private List<String> nfsMountLocation = NOT_INITIALIZED ;
private List<String> systemServices = NOT_INITIALIZED ;
private List<String> systemServiceListing = NOT_INITIALIZED ;
private List<String> systemServiceDetails = NOT_INITIALIZED ;
private List<String> serviceDiskIo = NOT_INITIALIZED ;
private List<String> serviceDiskUsage = NOT_INITIALIZED ;
private List<String> serviceDiskUsageDf = NOT_INITIALIZED ;
private List<String> serviceSockets = NOT_INITIALIZED ;
private List<String> serviceSocketsDocker = NOT_INITIALIZED ;
private List<String> fileReadPermissions = NOT_INITIALIZED ;
private List<String> serviceJobsDiskClean = NOT_INITIALIZED ;
private List<String> infraTestDisk = NOT_INITIALIZED ;
private List<String> infraTestCpu = NOT_INITIALIZED ;
private List<String> dockerImageExport = NOT_INITIALIZED ;
private List<String> dockerImageLoad = NOT_INITIALIZED ;
private List<String> dockerSocketStats = NOT_INITIALIZED ;
private List<String> dockerContainerPids = NOT_INITIALIZED ;
private List<String> govcDatastoreList = NOT_INITIALIZED ;
private List<String> govcDatastoreInfo = NOT_INITIALIZED ;
private List<String> govcDatastoreLs = NOT_INITIALIZED ;
private List<String> govcDatastoreRecurse = NOT_INITIALIZED ;
private List<String> govcVmList = NOT_INITIALIZED ;
private List<String> govcVmFind = NOT_INITIALIZED ;
private List<String> govcVmInfo = NOT_INITIALIZED ;
public final static String LINE_SEPARATOR = "\n" ;
List<String> toList ( String item ) {
return Arrays.asList( item.split( LINE_SEPARATOR ) ) ;
}
public static String asScript ( List<String> lines ) {
return lines.stream( ).collect( Collectors.joining( "\n" ) ) ;
}
public List<String> getProcessStatus ( ) {
return processStatus ;
}
public void setProcessStatus ( String processStatus ) {
this.processStatus = toList( processStatus ) ;
}
@Override
public String toString ( ) {
return "OsCommands [processStatus=" + processStatus + ", systemProcessMetrics=" + systemProcessMetrics
+ ", systemNetworkDevices="
+ systemNetworkDevices + ", systemNetworkPorts=" + systemNetworkPorts + ", systemDiskWithRateOnly="
+ systemDiskWithRateOnly
+ ", systemDiskWithUtilization=" + systemDiskWithUtilization + ", diskUsageSystem=" + diskUsageSystem
+ ", systemNetworkStats=" + systemNetworkStats + ", systemPackages=" + systemPackages
+ ", systemPackageDetails="
+ systemPackageDetails + ", systemServices=" + systemServices + ", systemServiceListing="
+ systemServiceListing
+ ", systemServiceDetails=" + systemServiceDetails + ", serviceDiskIo=" + serviceDiskIo
+ ", serviceDiskUsage="
+ serviceDiskUsage + ", serviceDiskUsageDf=" + serviceDiskUsageDf + ", serviceSockets=" + serviceSockets
+ ", serviceSocketsDocker=" + serviceSocketsDocker + ", fileReadPermissions=" + fileReadPermissions
+ ", infraTestDisk="
+ infraTestDisk + ", infraTestCpu=" + infraTestCpu + ", dockerImageExport=" + dockerImageExport
+ ", dockerImageLoad="
+ dockerImageLoad + ", dockerSocketStats=" + dockerSocketStats + ", dockerContainerPids="
+ dockerContainerPids
+ ", govcDatastoreList=" + govcDatastoreList + "]" ;
}
public List<String> getDiskUsageSystem ( ) {
return diskUsageSystem ;
}
public void setDiskUsageSystem ( String diskUsageSystem ) {
this.diskUsageSystem = toList( diskUsageSystem ) ;
}
public List<String> getDiskUsageAbout ( ) {
return diskUsageAbout ;
}
public void setDiskUsageAbout ( String diskUsageAbout ) {
this.diskUsageAbout = toList( diskUsageAbout ) ;
}
public List<String> getDiskUsageCsap ( ) {
return diskUsageCsap ;
}
public void setDiskUsageCsap ( String diskUsageCsap ) {
this.diskUsageCsap = toList( diskUsageCsap ) ;
}
public List<String> getGovcDatastoreList ( ) {
return govcDatastoreList ;
}
public void setGovcDatastoreList ( String govcDatastoreList ) {
this.govcDatastoreList = toList( govcDatastoreList ) ;
}
public List<String> getGovcVmList ( String path ) {
return govcVmList
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$path" ), path ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcVmList ( String govcVmList ) {
this.govcVmList = toList( govcVmList ) ;
}
public List<String> getGovcVmFind ( String vmFilter ) {
return govcVmFind
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$vmFilter" ), vmFilter ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcVmFind ( String govcVmFind ) {
this.govcVmFind = toList( govcVmFind ) ;
}
public List<String> getGovcVmInfo ( String vmPath ) {
return govcVmInfo
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$path" ), vmPath ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcVmInfo ( String govcVmInfo ) {
this.govcVmInfo = toList( govcVmInfo ) ;
}
public List<String> getGovcDatastoreInfo ( String datastore ) {
return govcDatastoreInfo
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$datastore" ), datastore ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcDatastoreInfo ( String govcDatastoreInfo ) {
this.govcDatastoreInfo = toList( govcDatastoreInfo ) ;
}
public List<String> getGovcDatastoreLs ( String datastore , String path ) {
return govcDatastoreLs
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$datastore" ), datastore ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$path" ), path ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcDatastoreLs ( String govcDatastoreLs ) {
this.govcDatastoreLs = toList( govcDatastoreLs ) ;
}
public List<String> getGovcDatastoreRecurse ( String datastore ) {
return govcDatastoreRecurse
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$datastore" ), datastore ) )
.collect( Collectors.toList( ) ) ;
}
public void setGovcDatastoreRecurse ( String govcDatastoreRecurse ) {
this.govcDatastoreRecurse = toList( govcDatastoreRecurse ) ;
}
public List<String> getFileReadPermissions ( String user , String file ) {
return fileReadPermissions
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$user" ), user ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$file" ), file ) )
.collect( Collectors.toList( ) ) ;
}
public void setFileReadPermissions ( String fileReadPermissions ) {
this.fileReadPermissions = toList( fileReadPermissions ) ;
}
public List<String> getServiceDiskIo ( ) {
return serviceDiskIo ;
}
public void setServiceDiskIo ( String serviceDiskIo ) {
this.serviceDiskIo = toList( serviceDiskIo ) ;
}
public List<String> getServiceSockets ( ) {
return serviceSockets ;
}
public void setServiceSockets ( String serviceSockets ) {
this.serviceSockets = toList( serviceSockets ) ;
}
public List<String> getServiceSocketsDocker ( String pid ) {
return serviceSocketsDocker
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$pid" ), pid ) )
.collect( Collectors.toList( ) ) ;
}
public void setServiceSocketsDocker ( String serviceSocketsDocker ) {
this.serviceSocketsDocker = toList( serviceSocketsDocker ) ;
}
public List<String> getDockerImageExport ( String destination , String imageName ) {
return dockerImageExport
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$destination" ), destination ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$imageName" ), imageName ) )
.collect( Collectors.toList( ) ) ;
}
public void setDockerImageExport ( String dockerImageExport ) {
this.dockerImageExport = toList( dockerImageExport ) ;
}
public List<String> getDockerImageLoad ( String sourceTar ) {
return dockerImageLoad
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$sourceTar" ), sourceTar ) )
.collect( Collectors.toList( ) ) ;
}
public void setDockerImageLoad ( String dockerImageLoad ) {
this.dockerImageLoad = toList( dockerImageLoad ) ;
}
public List<String> getDockerSocketStats ( String pid ) {
return dockerSocketStats
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$pid" ), pid ) )
.collect( Collectors.toList( ) ) ;
}
public void setDockerSocketStats ( String dockerSocketStats ) {
this.dockerSocketStats = toList( dockerSocketStats ) ;
}
public List<String> getDockerContainerPids ( ) {
return dockerContainerPids ;
}
public void setDockerContainerPids ( String script ) {
this.dockerContainerPids = toList( script ) ;
}
// public List<String> getSystemNetworkStats () { return systemNetworkStats ; }
public List<String> getSystemNetworkStats ( String interfacePattern ) {
return systemNetworkStats
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement(
"$interfacePattern" ),
interfacePattern.substring( 0, interfacePattern.length( ) - 1 ) ) )
.collect( Collectors.toList( ) ) ;
}
public void setSystemNetworkStats ( String systemNetworkStats ) {
this.systemNetworkStats = toList( systemNetworkStats ) ;
}
public List<String> getSystemPackages ( ) {
return systemPackages ;
}
public void setSystemPackages ( String systemPackages ) {
this.systemPackages = toList( systemPackages ) ;
}
public List<String> getNfsMountLocation ( String mountSource ) {
return nfsMountLocation
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$mountSource" ), mountSource ) )
.collect( Collectors.toList( ) ) ;
}
public void setDiskNfsMountLocation ( String nfsMountLocation ) {
this.nfsMountLocation = toList( nfsMountLocation ) ;
}
public List<String> getSystemPackageDetails ( String packageName ) {
return systemPackageDetails
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$package" ), packageName ) )
.collect( Collectors.toList( ) ) ;
}
public void setSystemPackageDetails ( String systemPackageDetails ) {
this.systemPackageDetails = toList( systemPackageDetails ) ;
}
public List<String> getSystemServices ( ) {
return systemServices ;
}
public void setSystemServices ( String systemServices ) {
this.systemServices = toList( systemServices ) ;
}
public List<String> getSystemServiceDetails ( String serviceName ) {
return systemServiceDetails
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$serviceName" ), serviceName ) )
.collect( Collectors.toList( ) ) ;
}
public void setSystemServiceDetails ( String systemServiceDetails ) {
this.systemServiceDetails = toList( systemServiceDetails ) ;
}
public List<String> getServiceDiskUsage ( String servicePaths ) {
return serviceDiskUsage
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$servicePaths" ), servicePaths ) )
.collect( Collectors.toList( ) ) ;
}
public void setServiceDiskUsage ( String serviceDiskUsage ) {
this.serviceDiskUsage = toList( serviceDiskUsage ) ;
}
public List<String> getServiceDiskUsageDf ( ) {
return serviceDiskUsageDf ;
}
public void setServiceDiskUsageDf ( String serviceDiskUsageDf ) {
this.serviceDiskUsageDf = toList( serviceDiskUsageDf ) ;
}
public List<String> getInfraTestDisk ( String blockSize , String numBlocks ) {
return infraTestDisk
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$blockSize" ), blockSize ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$numBlocks" ), numBlocks ) )
.collect( Collectors.toList( ) ) ;
}
public void setInfraTestDisk ( String infraTestDisk ) {
this.infraTestDisk = toList( infraTestDisk ) ;
}
public List<String> getInfraTestCpu ( String numLoops ) {
return infraTestCpu
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$numLoops" ), numLoops ) )
.collect( Collectors.toList( ) ) ;
}
public void setInfraTestCpu ( String infraTestCpu ) {
this.infraTestCpu = toList( infraTestCpu ) ;
}
public List<String> getSystemDiskWithUtilization ( ) {
return systemDiskWithUtilization ;
}
public void setSystemDiskWithUtilization ( String systemDiskWithUtilization ) {
this.systemDiskWithUtilization = toList( systemDiskWithUtilization ) ;
}
public List<String> getSystemDiskWithRateOnly ( ) {
return systemDiskWithRateOnly ;
}
public void setSystemDiskWithRateOnly ( String systemDiskWithRateOnly ) {
this.systemDiskWithRateOnly = toList( systemDiskWithRateOnly ) ;
}
public List<String> getSystemServiceListing ( String staging ) {
return systemServiceListing
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$$platform" ), staging ) )
.collect( Collectors.toList( ) ) ;
}
public void setSystemServiceListing ( String systemServiceListing ) {
this.systemServiceListing = toList( systemServiceListing ) ;
}
public List<String> getSystemProcessMetrics ( int seconds ) {
return systemProcessMetrics
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$seconds" ), Integer.toString( seconds ) ) )
.collect( Collectors.toList( ) ) ;
}
public void setSystemProcessMetrics ( String systemProcessMetrics ) {
this.systemProcessMetrics = toList( systemProcessMetrics ) ;
}
public List<String> getSystemNetworkDevices ( ) {
return systemNetworkDevices ;
}
public void setSystemNetworkDevices ( String systemNetworkDevices ) {
this.systemNetworkDevices = toList( systemNetworkDevices ) ;
}
public List<String> getSystemNetworkPorts ( ) {
return systemNetworkPorts ;
}
public void setSystemNetworkPorts ( String systemNetworkPorts ) {
this.systemNetworkPorts = toList( systemNetworkPorts ) ;
}
public List<String> getSystemNetworkListenPorts ( ) {
return systemNetworkListenPorts ;
}
public void setSystemNetworkListenPorts ( String systemNetworkListenPorts ) {
this.systemNetworkListenPorts = toList( systemNetworkListenPorts ) ;
}
public List<String> getServiceJobsDiskClean (
String jobPath ,
int maxDepth ,
int numDays ,
boolean pruneByFolder ,
boolean runPrune ) {
return serviceJobsDiskClean
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$jobPath" ), jobPath ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$maxDepth" ), Integer.toString( maxDepth ) ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$numDays" ), Integer.toString( numDays ) ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$pruneByFolder" ), Boolean.toString(
pruneByFolder ) ) )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$runPrune" ), Boolean.toString( runPrune ) ) )
.collect( Collectors.toList( ) ) ;
}
public void setServiceJobsDiskClean ( String serviceJobsDiskClean ) {
this.serviceJobsDiskClean = toList( serviceJobsDiskClean ) ;
}
public List<String> getCriPs ( ) {
return criPs ;
}
public void setCriPs ( String crictlPs ) {
this.criPs = toList( crictlPs ) ;
}
public List<String> getCriInspect ( String id ) {
return criInspect
.stream( )
.map( line -> line.replaceAll( Matcher.quoteReplacement( "$id" ), id ) )
.collect( Collectors.toList( ) ) ;
}
public void setCriInspect ( String crictlInspect ) {
this.criInspect = toList( crictlInspect ) ;
}
public List<String> getCriPidReport ( ) {
return criPidReport ;
}
public void setCriPidReport ( String criPidReport ) {
this.criPidReport = toList( criPidReport ) ;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.client.impl.producer;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.rocketmq.common.message.Message;
import org.apache.rocketmq.common.message.MessageClientIDSetter;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.message.MessageConst;
import org.apache.rocketmq.common.message.MessageDecoder;
import org.apache.rocketmq.common.message.MessageBatch;
import org.apache.rocketmq.common.message.MessageAccessor;
import org.apache.rocketmq.common.message.MessageType;
import org.apache.rocketmq.common.message.MessageId;
import org.apache.rocketmq.client.QueryResult;
import org.apache.rocketmq.client.Validators;
import org.apache.rocketmq.client.common.ClientErrorCode;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.hook.CheckForbiddenContext;
import org.apache.rocketmq.client.hook.CheckForbiddenHook;
import org.apache.rocketmq.client.hook.SendMessageContext;
import org.apache.rocketmq.client.hook.SendMessageHook;
import org.apache.rocketmq.client.impl.CommunicationMode;
import org.apache.rocketmq.client.impl.MQClientManager;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.latency.MQFaultStrategy;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.client.producer.DefaultMQProducer;
import org.apache.rocketmq.client.producer.LocalTransactionExecuter;
import org.apache.rocketmq.client.producer.LocalTransactionState;
import org.apache.rocketmq.client.producer.MessageQueueSelector;
import org.apache.rocketmq.client.producer.SendCallback;
import org.apache.rocketmq.client.producer.SendResult;
import org.apache.rocketmq.client.producer.SendStatus;
import org.apache.rocketmq.client.producer.TransactionCheckListener;
import org.apache.rocketmq.client.producer.TransactionMQProducer;
import org.apache.rocketmq.client.producer.TransactionSendResult;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.ServiceState;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.help.FAQUrl;
import org.apache.rocketmq.common.protocol.ResponseCode;
import org.apache.rocketmq.common.protocol.header.CheckTransactionStateRequestHeader;
import org.apache.rocketmq.common.protocol.header.EndTransactionRequestHeader;
import org.apache.rocketmq.common.protocol.header.SendMessageRequestHeader;
import org.apache.rocketmq.common.sysflag.MessageSysFlag;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.exception.RemotingConnectException;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.slf4j.Logger;
public class DefaultMQProducerImpl implements MQProducerInner {
private final Logger log = ClientLogger.getLog();
private final Random random = new Random();
private final DefaultMQProducer defaultMQProducer;
private final ConcurrentMap<String/* topic */, TopicPublishInfo> topicPublishInfoTable =
new ConcurrentHashMap<String, TopicPublishInfo>();
private final ArrayList<SendMessageHook> sendMessageHookList = new ArrayList<SendMessageHook>();
private final RPCHook rpcHook;
protected BlockingQueue<Runnable> checkRequestQueue;
protected ExecutorService checkExecutor;
private ServiceState serviceState = ServiceState.CREATE_JUST;
private MQClientInstance mQClientFactory;
private ArrayList<CheckForbiddenHook> checkForbiddenHookList = new ArrayList<CheckForbiddenHook>();
private int zipCompressLevel = Integer.parseInt(System.getProperty(MixAll.MESSAGE_COMPRESS_LEVEL, "5"));
private MQFaultStrategy mqFaultStrategy = new MQFaultStrategy();
public DefaultMQProducerImpl(final DefaultMQProducer defaultMQProducer) {
this(defaultMQProducer, null);
}
public DefaultMQProducerImpl(final DefaultMQProducer defaultMQProducer, RPCHook rpcHook) {
this.defaultMQProducer = defaultMQProducer;
this.rpcHook = rpcHook;
}
public void registerCheckForbiddenHook(CheckForbiddenHook checkForbiddenHook) {
this.checkForbiddenHookList.add(checkForbiddenHook);
log.info("register a new checkForbiddenHook. hookName={}, allHookSize={}", checkForbiddenHook.hookName(),
checkForbiddenHookList.size());
}
public void initTransactionEnv() {
TransactionMQProducer producer = (TransactionMQProducer) this.defaultMQProducer;
this.checkRequestQueue = new LinkedBlockingQueue<Runnable>(producer.getCheckRequestHoldMax());
this.checkExecutor = new ThreadPoolExecutor(
producer.getCheckThreadPoolMinSize(),
producer.getCheckThreadPoolMaxSize(),
1000 * 60,
TimeUnit.MILLISECONDS,
this.checkRequestQueue);
}
public void destroyTransactionEnv() {
this.checkExecutor.shutdown();
this.checkRequestQueue.clear();
}
public void registerSendMessageHook(final SendMessageHook hook) {
this.sendMessageHookList.add(hook);
log.info("register sendMessage Hook, {}", hook.hookName());
}
public void start() throws MQClientException {
this.start(true);
}
public void start(final boolean startFactory) throws MQClientException {
switch (this.serviceState) {
case CREATE_JUST:
this.serviceState = ServiceState.START_FAILED;
this.checkConfig();
if (!this.defaultMQProducer.getProducerGroup().equals(MixAll.CLIENT_INNER_PRODUCER_GROUP)) {
this.defaultMQProducer.changeInstanceNameToPID();
}
this.mQClientFactory = MQClientManager.getInstance().getAndCreateMQClientInstance(this.defaultMQProducer, rpcHook);
boolean registerOK = mQClientFactory.registerProducer(this.defaultMQProducer.getProducerGroup(), this);
if (!registerOK) {
this.serviceState = ServiceState.CREATE_JUST;
throw new MQClientException("The producer group[" + this.defaultMQProducer.getProducerGroup()
+ "] has been created before, specify another name please." + FAQUrl.suggestTodo(FAQUrl.GROUP_NAME_DUPLICATE_URL),
null);
}
this.topicPublishInfoTable.put(this.defaultMQProducer.getCreateTopicKey(), new TopicPublishInfo());
if (startFactory) {
mQClientFactory.start();
}
log.info("the producer [{}] start OK. sendMessageWithVIPChannel={}", this.defaultMQProducer.getProducerGroup(),
this.defaultMQProducer.isSendMessageWithVIPChannel());
this.serviceState = ServiceState.RUNNING;
break;
case RUNNING:
case START_FAILED:
case SHUTDOWN_ALREADY:
throw new MQClientException("The producer service state not OK, maybe started once, "
+ this.serviceState
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_SERVICE_NOT_OK),
null);
default:
break;
}
this.mQClientFactory.sendHeartbeatToAllBrokerWithLock();
}
private void checkConfig() throws MQClientException {
Validators.checkGroup(this.defaultMQProducer.getProducerGroup());
if (null == this.defaultMQProducer.getProducerGroup()) {
throw new MQClientException("producerGroup is null", null);
}
if (this.defaultMQProducer.getProducerGroup().equals(MixAll.DEFAULT_PRODUCER_GROUP)) {
throw new MQClientException("producerGroup can not equal " + MixAll.DEFAULT_PRODUCER_GROUP + ", please specify another one.",
null);
}
}
public void shutdown() {
this.shutdown(true);
}
public void shutdown(final boolean shutdownFactory) {
switch (this.serviceState) {
case CREATE_JUST:
break;
case RUNNING:
this.mQClientFactory.unregisterProducer(this.defaultMQProducer.getProducerGroup());
if (shutdownFactory) {
this.mQClientFactory.shutdown();
}
log.info("the producer [{}] shutdown OK", this.defaultMQProducer.getProducerGroup());
this.serviceState = ServiceState.SHUTDOWN_ALREADY;
break;
case SHUTDOWN_ALREADY:
break;
default:
break;
}
}
@Override
public Set<String> getPublishTopicList() {
Set<String> topicList = new HashSet<String>();
for (String key : this.topicPublishInfoTable.keySet()) {
topicList.add(key);
}
return topicList;
}
@Override
public boolean isPublishTopicNeedUpdate(String topic) {
TopicPublishInfo prev = this.topicPublishInfoTable.get(topic);
return null == prev || !prev.ok();
}
@Override
public TransactionCheckListener checkListener() {
if (this.defaultMQProducer instanceof TransactionMQProducer) {
TransactionMQProducer producer = (TransactionMQProducer) defaultMQProducer;
return producer.getTransactionCheckListener();
}
return null;
}
@Override
public void checkTransactionState(final String addr, final MessageExt msg,
final CheckTransactionStateRequestHeader header) {
Runnable request = new Runnable() {
private final String brokerAddr = addr;
private final MessageExt message = msg;
private final CheckTransactionStateRequestHeader checkRequestHeader = header;
private final String group = DefaultMQProducerImpl.this.defaultMQProducer.getProducerGroup();
@Override
public void run() {
TransactionCheckListener transactionCheckListener = DefaultMQProducerImpl.this.checkListener();
if (transactionCheckListener != null) {
LocalTransactionState localTransactionState = LocalTransactionState.UNKNOW;
Throwable exception = null;
try {
localTransactionState = transactionCheckListener.checkLocalTransactionState(message);
} catch (Throwable e) {
log.error("Broker call checkTransactionState, but checkLocalTransactionState exception", e);
exception = e;
}
this.processTransactionState(
localTransactionState,
group,
exception);
} else {
log.warn("checkTransactionState, pick transactionCheckListener by group[{}] failed", group);
}
}
private void processTransactionState(
final LocalTransactionState localTransactionState,
final String producerGroup,
final Throwable exception) {
final EndTransactionRequestHeader thisHeader = new EndTransactionRequestHeader();
thisHeader.setCommitLogOffset(checkRequestHeader.getCommitLogOffset());
thisHeader.setProducerGroup(producerGroup);
thisHeader.setTranStateTableOffset(checkRequestHeader.getTranStateTableOffset());
thisHeader.setFromTransactionCheck(true);
String uniqueKey = message.getProperties().get(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX);
if (uniqueKey == null) {
uniqueKey = message.getMsgId();
}
thisHeader.setMsgId(uniqueKey);
thisHeader.setTransactionId(checkRequestHeader.getTransactionId());
switch (localTransactionState) {
case COMMIT_MESSAGE:
thisHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_COMMIT_TYPE);
break;
case ROLLBACK_MESSAGE:
thisHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_ROLLBACK_TYPE);
log.warn("when broker check, client rollback this transaction, {}", thisHeader);
break;
case UNKNOW:
thisHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_NOT_TYPE);
log.warn("when broker check, client does not know this transaction state, {}", thisHeader);
break;
default:
break;
}
String remark = null;
if (exception != null) {
remark = "checkLocalTransactionState Exception: " + RemotingHelper.exceptionSimpleDesc(exception);
}
try {
DefaultMQProducerImpl.this.mQClientFactory.getMQClientAPIImpl().endTransactionOneway(brokerAddr, thisHeader, remark,
3000);
} catch (Exception e) {
log.error("endTransactionOneway exception", e);
}
}
};
this.checkExecutor.submit(request);
}
@Override
public void updateTopicPublishInfo(final String topic, final TopicPublishInfo info) {
if (info != null && topic != null) {
TopicPublishInfo prev = this.topicPublishInfoTable.put(topic, info);
if (prev != null) {
log.info("updateTopicPublishInfo prev is not null, " + prev.toString());
}
}
}
@Override
public boolean isUnitMode() {
return this.defaultMQProducer.isUnitMode();
}
public void createTopic(String key, String newTopic, int queueNum) throws MQClientException {
createTopic(key, newTopic, queueNum, 0);
}
public void createTopic(String key, String newTopic, int queueNum, int topicSysFlag) throws MQClientException {
this.makeSureStateOK();
Validators.checkTopic(newTopic);
this.mQClientFactory.getMQAdminImpl().createTopic(key, newTopic, queueNum, topicSysFlag);
}
private void makeSureStateOK() throws MQClientException {
if (this.serviceState != ServiceState.RUNNING) {
throw new MQClientException("The producer service state not OK, "
+ this.serviceState
+ FAQUrl.suggestTodo(FAQUrl.CLIENT_SERVICE_NOT_OK),
null);
}
}
public List<MessageQueue> fetchPublishMessageQueues(String topic) throws MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().fetchPublishMessageQueues(topic);
}
public long searchOffset(MessageQueue mq, long timestamp) throws MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().searchOffset(mq, timestamp);
}
public long maxOffset(MessageQueue mq) throws MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().maxOffset(mq);
}
public long minOffset(MessageQueue mq) throws MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().minOffset(mq);
}
public long earliestMsgStoreTime(MessageQueue mq) throws MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().earliestMsgStoreTime(mq);
}
public MessageExt viewMessage(
String msgId) throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().viewMessage(msgId);
}
public QueryResult queryMessage(String topic, String key, int maxNum, long begin, long end)
throws MQClientException, InterruptedException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().queryMessage(topic, key, maxNum, begin, end);
}
public MessageExt queryMessageByUniqKey(String topic, String uniqKey)
throws MQClientException, InterruptedException {
this.makeSureStateOK();
return this.mQClientFactory.getMQAdminImpl().queryMessageByUniqKey(topic, uniqKey);
}
/**
* DEFAULT ASYNC -------------------------------------------------------
*/
public void send(Message msg,
SendCallback sendCallback) throws MQClientException, RemotingException, InterruptedException {
send(msg, sendCallback, this.defaultMQProducer.getSendMsgTimeout());
}
public void send(Message msg, SendCallback sendCallback, long timeout)
throws MQClientException, RemotingException, InterruptedException {
try {
this.sendDefaultImpl(msg, CommunicationMode.ASYNC, sendCallback, timeout);
} catch (MQBrokerException e) {
throw new MQClientException("unknownn exception", e);
}
}
public MessageQueue selectOneMessageQueue(final TopicPublishInfo tpInfo, final String lastBrokerName) {
return this.mqFaultStrategy.selectOneMessageQueue(tpInfo, lastBrokerName);
}
public void updateFaultItem(final String brokerName, final long currentLatency, boolean isolation) {
this.mqFaultStrategy.updateFaultItem(brokerName, currentLatency, isolation);
}
private SendResult sendDefaultImpl(
Message msg,
final CommunicationMode communicationMode,
final SendCallback sendCallback,
final long timeout
) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
this.makeSureStateOK();
Validators.checkMessage(msg, this.defaultMQProducer);
final long invokeID = random.nextLong();
long beginTimestampFirst = System.currentTimeMillis();
long beginTimestampPrev = beginTimestampFirst;
long endTimestamp = beginTimestampFirst;
TopicPublishInfo topicPublishInfo = this.tryToFindTopicPublishInfo(msg.getTopic());
if (topicPublishInfo != null && topicPublishInfo.ok()) {
MessageQueue mq = null;
Exception exception = null;
SendResult sendResult = null;
int timesTotal = communicationMode == CommunicationMode.SYNC ? 1 + this.defaultMQProducer.getRetryTimesWhenSendFailed() : 1;
int times = 0;
String[] brokersSent = new String[timesTotal];
for (; times < timesTotal; times++) {
String lastBrokerName = null == mq ? null : mq.getBrokerName();
MessageQueue tmpmq = this.selectOneMessageQueue(topicPublishInfo, lastBrokerName);
if (tmpmq != null) {
mq = tmpmq;
brokersSent[times] = mq.getBrokerName();
try {
beginTimestampPrev = System.currentTimeMillis();
sendResult = this.sendKernelImpl(msg, mq, communicationMode, sendCallback, topicPublishInfo, timeout);
endTimestamp = System.currentTimeMillis();
this.updateFaultItem(mq.getBrokerName(), endTimestamp - beginTimestampPrev, false);
switch (communicationMode) {
case ASYNC:
return null;
case ONEWAY:
return null;
case SYNC:
if (sendResult.getSendStatus() != SendStatus.SEND_OK) {
if (this.defaultMQProducer.isRetryAnotherBrokerWhenNotStoreOK()) {
continue;
}
}
return sendResult;
default:
break;
}
} catch (RemotingException e) {
endTimestamp = System.currentTimeMillis();
this.updateFaultItem(mq.getBrokerName(), endTimestamp - beginTimestampPrev, true);
log.warn(String.format("sendKernelImpl exception, resend at once, InvokeID: %s, RT: %sms, Broker: %s", invokeID, endTimestamp - beginTimestampPrev, mq), e);
log.warn(msg.toString());
exception = e;
continue;
} catch (MQClientException e) {
endTimestamp = System.currentTimeMillis();
this.updateFaultItem(mq.getBrokerName(), endTimestamp - beginTimestampPrev, true);
log.warn(String.format("sendKernelImpl exception, resend at once, InvokeID: %s, RT: %sms, Broker: %s", invokeID, endTimestamp - beginTimestampPrev, mq), e);
log.warn(msg.toString());
exception = e;
continue;
} catch (MQBrokerException e) {
endTimestamp = System.currentTimeMillis();
this.updateFaultItem(mq.getBrokerName(), endTimestamp - beginTimestampPrev, true);
log.warn(String.format("sendKernelImpl exception, resend at once, InvokeID: %s, RT: %sms, Broker: %s", invokeID, endTimestamp - beginTimestampPrev, mq), e);
log.warn(msg.toString());
exception = e;
switch (e.getResponseCode()) {
case ResponseCode.TOPIC_NOT_EXIST:
case ResponseCode.SERVICE_NOT_AVAILABLE:
case ResponseCode.SYSTEM_ERROR:
case ResponseCode.NO_PERMISSION:
case ResponseCode.NO_BUYER_ID:
case ResponseCode.NOT_IN_CURRENT_UNIT:
continue;
default:
if (sendResult != null) {
return sendResult;
}
throw e;
}
} catch (InterruptedException e) {
endTimestamp = System.currentTimeMillis();
this.updateFaultItem(mq.getBrokerName(), endTimestamp - beginTimestampPrev, false);
log.warn(String.format("sendKernelImpl exception, throw exception, InvokeID: %s, RT: %sms, Broker: %s", invokeID, endTimestamp - beginTimestampPrev, mq), e);
log.warn(msg.toString());
log.warn("sendKernelImpl exception", e);
log.warn(msg.toString());
throw e;
}
} else {
break;
}
}
if (sendResult != null) {
return sendResult;
}
String info = String.format("Send [%d] times, still failed, cost [%d]ms, Topic: %s, BrokersSent: %s",
times,
System.currentTimeMillis() - beginTimestampFirst,
msg.getTopic(),
Arrays.toString(brokersSent));
info += FAQUrl.suggestTodo(FAQUrl.SEND_MSG_FAILED);
MQClientException mqClientException = new MQClientException(info, exception);
if (exception instanceof MQBrokerException) {
mqClientException.setResponseCode(((MQBrokerException) exception).getResponseCode());
} else if (exception instanceof RemotingConnectException) {
mqClientException.setResponseCode(ClientErrorCode.CONNECT_BROKER_EXCEPTION);
} else if (exception instanceof RemotingTimeoutException) {
mqClientException.setResponseCode(ClientErrorCode.ACCESS_BROKER_TIMEOUT);
} else if (exception instanceof MQClientException) {
mqClientException.setResponseCode(ClientErrorCode.BROKER_NOT_EXIST_EXCEPTION);
}
throw mqClientException;
}
List<String> nsList = this.getmQClientFactory().getMQClientAPIImpl().getNameServerAddressList();
if (null == nsList || nsList.isEmpty()) {
throw new MQClientException(
"No name server address, please set it." + FAQUrl.suggestTodo(FAQUrl.NAME_SERVER_ADDR_NOT_EXIST_URL), null).setResponseCode(ClientErrorCode.NO_NAME_SERVER_EXCEPTION);
}
throw new MQClientException("No route info of this topic, " + msg.getTopic() + FAQUrl.suggestTodo(FAQUrl.NO_TOPIC_ROUTE_INFO),
null).setResponseCode(ClientErrorCode.NOT_FOUND_TOPIC_EXCEPTION);
}
private TopicPublishInfo tryToFindTopicPublishInfo(final String topic) {
TopicPublishInfo topicPublishInfo = this.topicPublishInfoTable.get(topic);
if (null == topicPublishInfo || !topicPublishInfo.ok()) {
this.topicPublishInfoTable.putIfAbsent(topic, new TopicPublishInfo());
this.mQClientFactory.updateTopicRouteInfoFromNameServer(topic);
topicPublishInfo = this.topicPublishInfoTable.get(topic);
}
if (topicPublishInfo.isHaveTopicRouterInfo() || topicPublishInfo.ok()) {
return topicPublishInfo;
} else {
this.mQClientFactory.updateTopicRouteInfoFromNameServer(topic, true, this.defaultMQProducer);
topicPublishInfo = this.topicPublishInfoTable.get(topic);
return topicPublishInfo;
}
}
private SendResult sendKernelImpl(final Message msg,
final MessageQueue mq,
final CommunicationMode communicationMode,
final SendCallback sendCallback,
final TopicPublishInfo topicPublishInfo,
final long timeout) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
String brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(mq.getBrokerName());
if (null == brokerAddr) {
tryToFindTopicPublishInfo(mq.getTopic());
brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(mq.getBrokerName());
}
SendMessageContext context = null;
if (brokerAddr != null) {
brokerAddr = MixAll.brokerVIPChannel(this.defaultMQProducer.isSendMessageWithVIPChannel(), brokerAddr);
byte[] prevBody = msg.getBody();
try {
//for MessageBatch,ID has been set in the generating process
if (!(msg instanceof MessageBatch)) {
MessageClientIDSetter.setUniqID(msg);
}
int sysFlag = 0;
if (this.tryToCompressMessage(msg)) {
sysFlag |= MessageSysFlag.COMPRESSED_FLAG;
}
final String tranMsg = msg.getProperty(MessageConst.PROPERTY_TRANSACTION_PREPARED);
if (tranMsg != null && Boolean.parseBoolean(tranMsg)) {
sysFlag |= MessageSysFlag.TRANSACTION_PREPARED_TYPE;
}
if (hasCheckForbiddenHook()) {
CheckForbiddenContext checkForbiddenContext = new CheckForbiddenContext();
checkForbiddenContext.setNameSrvAddr(this.defaultMQProducer.getNamesrvAddr());
checkForbiddenContext.setGroup(this.defaultMQProducer.getProducerGroup());
checkForbiddenContext.setCommunicationMode(communicationMode);
checkForbiddenContext.setBrokerAddr(brokerAddr);
checkForbiddenContext.setMessage(msg);
checkForbiddenContext.setMq(mq);
checkForbiddenContext.setUnitMode(this.isUnitMode());
this.executeCheckForbiddenHook(checkForbiddenContext);
}
if (this.hasSendMessageHook()) {
context = new SendMessageContext();
context.setProducer(this);
context.setProducerGroup(this.defaultMQProducer.getProducerGroup());
context.setCommunicationMode(communicationMode);
context.setBornHost(this.defaultMQProducer.getClientIP());
context.setBrokerAddr(brokerAddr);
context.setMessage(msg);
context.setMq(mq);
String isTrans = msg.getProperty(MessageConst.PROPERTY_TRANSACTION_PREPARED);
if (isTrans != null && isTrans.equals("true")) {
context.setMsgType(MessageType.Trans_Msg_Half);
}
if (msg.getProperty("__STARTDELIVERTIME") != null || msg.getProperty(MessageConst.PROPERTY_DELAY_TIME_LEVEL) != null) {
context.setMsgType(MessageType.Delay_Msg);
}
this.executeSendMessageHookBefore(context);
}
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
requestHeader.setProducerGroup(this.defaultMQProducer.getProducerGroup());
requestHeader.setTopic(msg.getTopic());
requestHeader.setDefaultTopic(this.defaultMQProducer.getCreateTopicKey());
requestHeader.setDefaultTopicQueueNums(this.defaultMQProducer.getDefaultTopicQueueNums());
requestHeader.setQueueId(mq.getQueueId());
requestHeader.setSysFlag(sysFlag);
requestHeader.setBornTimestamp(System.currentTimeMillis());
requestHeader.setFlag(msg.getFlag());
requestHeader.setProperties(MessageDecoder.messageProperties2String(msg.getProperties()));
requestHeader.setReconsumeTimes(0);
requestHeader.setUnitMode(this.isUnitMode());
requestHeader.setBatch(msg instanceof MessageBatch);
if (requestHeader.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
String reconsumeTimes = MessageAccessor.getReconsumeTime(msg);
if (reconsumeTimes != null) {
requestHeader.setReconsumeTimes(Integer.valueOf(reconsumeTimes));
MessageAccessor.clearProperty(msg, MessageConst.PROPERTY_RECONSUME_TIME);
}
String maxReconsumeTimes = MessageAccessor.getMaxReconsumeTimes(msg);
if (maxReconsumeTimes != null) {
requestHeader.setMaxReconsumeTimes(Integer.valueOf(maxReconsumeTimes));
MessageAccessor.clearProperty(msg, MessageConst.PROPERTY_MAX_RECONSUME_TIMES);
}
}
SendResult sendResult = null;
switch (communicationMode) {
case ASYNC:
sendResult = this.mQClientFactory.getMQClientAPIImpl().sendMessage(
brokerAddr,
mq.getBrokerName(),
msg,
requestHeader,
timeout,
communicationMode,
sendCallback,
topicPublishInfo,
this.mQClientFactory,
this.defaultMQProducer.getRetryTimesWhenSendAsyncFailed(),
context,
this);
break;
case ONEWAY:
case SYNC:
sendResult = this.mQClientFactory.getMQClientAPIImpl().sendMessage(
brokerAddr,
mq.getBrokerName(),
msg,
requestHeader,
timeout,
communicationMode,
context,
this);
break;
default:
assert false;
break;
}
if (this.hasSendMessageHook()) {
context.setSendResult(sendResult);
this.executeSendMessageHookAfter(context);
}
return sendResult;
} catch (RemotingException e) {
if (this.hasSendMessageHook()) {
context.setException(e);
this.executeSendMessageHookAfter(context);
}
throw e;
} catch (MQBrokerException e) {
if (this.hasSendMessageHook()) {
context.setException(e);
this.executeSendMessageHookAfter(context);
}
throw e;
} catch (InterruptedException e) {
if (this.hasSendMessageHook()) {
context.setException(e);
this.executeSendMessageHookAfter(context);
}
throw e;
} finally {
msg.setBody(prevBody);
}
}
throw new MQClientException("The broker[" + mq.getBrokerName() + "] not exist", null);
}
public MQClientInstance getmQClientFactory() {
return mQClientFactory;
}
private boolean tryToCompressMessage(final Message msg) {
if (msg instanceof MessageBatch) {
//batch dose not support compressing right now
return false;
}
byte[] body = msg.getBody();
if (body != null) {
if (body.length >= this.defaultMQProducer.getCompressMsgBodyOverHowmuch()) {
try {
byte[] data = UtilAll.compress(body, zipCompressLevel);
if (data != null) {
msg.setBody(data);
return true;
}
} catch (IOException e) {
log.error("tryToCompressMessage exception", e);
log.warn(msg.toString());
}
}
}
return false;
}
public boolean hasCheckForbiddenHook() {
return !checkForbiddenHookList.isEmpty();
}
public void executeCheckForbiddenHook(final CheckForbiddenContext context) throws MQClientException {
if (hasCheckForbiddenHook()) {
for (CheckForbiddenHook hook : checkForbiddenHookList) {
hook.checkForbidden(context);
}
}
}
public boolean hasSendMessageHook() {
return !this.sendMessageHookList.isEmpty();
}
public void executeSendMessageHookBefore(final SendMessageContext context) {
if (!this.sendMessageHookList.isEmpty()) {
for (SendMessageHook hook : this.sendMessageHookList) {
try {
hook.sendMessageBefore(context);
} catch (Throwable e) {
log.warn("failed to executeSendMessageHookBefore", e);
}
}
}
}
public void executeSendMessageHookAfter(final SendMessageContext context) {
if (!this.sendMessageHookList.isEmpty()) {
for (SendMessageHook hook : this.sendMessageHookList) {
try {
hook.sendMessageAfter(context);
} catch (Throwable e) {
log.warn("failed to executeSendMessageHookAfter", e);
}
}
}
}
/**
* DEFAULT ONEWAY -------------------------------------------------------
*/
public void sendOneway(Message msg) throws MQClientException, RemotingException, InterruptedException {
try {
this.sendDefaultImpl(msg, CommunicationMode.ONEWAY, null, this.defaultMQProducer.getSendMsgTimeout());
} catch (MQBrokerException e) {
throw new MQClientException("unknown exception", e);
}
}
/**
* KERNEL SYNC -------------------------------------------------------
*/
public SendResult send(Message msg, MessageQueue mq)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return send(msg, mq, this.defaultMQProducer.getSendMsgTimeout());
}
public SendResult send(Message msg, MessageQueue mq, long timeout)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
this.makeSureStateOK();
Validators.checkMessage(msg, this.defaultMQProducer);
if (!msg.getTopic().equals(mq.getTopic())) {
throw new MQClientException("message's topic not equal mq's topic", null);
}
return this.sendKernelImpl(msg, mq, CommunicationMode.SYNC, null, null, timeout);
}
/**
* KERNEL ASYNC -------------------------------------------------------
*/
public void send(Message msg, MessageQueue mq, SendCallback sendCallback)
throws MQClientException, RemotingException, InterruptedException {
send(msg, mq, sendCallback, this.defaultMQProducer.getSendMsgTimeout());
}
public void send(Message msg, MessageQueue mq, SendCallback sendCallback, long timeout)
throws MQClientException, RemotingException, InterruptedException {
this.makeSureStateOK();
Validators.checkMessage(msg, this.defaultMQProducer);
if (!msg.getTopic().equals(mq.getTopic())) {
throw new MQClientException("message's topic not equal mq's topic", null);
}
try {
this.sendKernelImpl(msg, mq, CommunicationMode.ASYNC, sendCallback, null, timeout);
} catch (MQBrokerException e) {
throw new MQClientException("unknown exception", e);
}
}
/**
* KERNEL ONEWAY -------------------------------------------------------
*/
public void sendOneway(Message msg,
MessageQueue mq) throws MQClientException, RemotingException, InterruptedException {
this.makeSureStateOK();
Validators.checkMessage(msg, this.defaultMQProducer);
try {
this.sendKernelImpl(msg, mq, CommunicationMode.ONEWAY, null, null, this.defaultMQProducer.getSendMsgTimeout());
} catch (MQBrokerException e) {
throw new MQClientException("unknown exception", e);
}
}
/**
* SELECT SYNC -------------------------------------------------------
*/
public SendResult send(Message msg, MessageQueueSelector selector, Object arg)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return send(msg, selector, arg, this.defaultMQProducer.getSendMsgTimeout());
}
public SendResult send(Message msg, MessageQueueSelector selector, Object arg, long timeout)
throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return this.sendSelectImpl(msg, selector, arg, CommunicationMode.SYNC, null, timeout);
}
private SendResult sendSelectImpl(
Message msg,
MessageQueueSelector selector,
Object arg,
final CommunicationMode communicationMode,
final SendCallback sendCallback, final long timeout
) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
this.makeSureStateOK();
Validators.checkMessage(msg, this.defaultMQProducer);
TopicPublishInfo topicPublishInfo = this.tryToFindTopicPublishInfo(msg.getTopic());
if (topicPublishInfo != null && topicPublishInfo.ok()) {
MessageQueue mq = null;
try {
mq = selector.select(topicPublishInfo.getMessageQueueList(), msg, arg);
} catch (Throwable e) {
throw new MQClientException("select message queue throwed exception.", e);
}
if (mq != null) {
return this.sendKernelImpl(msg, mq, communicationMode, sendCallback, null, timeout);
} else {
throw new MQClientException("select message queue return null.", null);
}
}
throw new MQClientException("No route info for this topic, " + msg.getTopic(), null);
}
/**
* SELECT ASYNC -------------------------------------------------------
*/
public void send(Message msg, MessageQueueSelector selector, Object arg, SendCallback sendCallback)
throws MQClientException, RemotingException, InterruptedException {
send(msg, selector, arg, sendCallback, this.defaultMQProducer.getSendMsgTimeout());
}
public void send(Message msg, MessageQueueSelector selector, Object arg, SendCallback sendCallback, long timeout)
throws MQClientException, RemotingException, InterruptedException {
try {
this.sendSelectImpl(msg, selector, arg, CommunicationMode.ASYNC, sendCallback, timeout);
} catch (MQBrokerException e) {
throw new MQClientException("unknownn exception", e);
}
}
/**
* SELECT ONEWAY -------------------------------------------------------
*/
public void sendOneway(Message msg, MessageQueueSelector selector, Object arg)
throws MQClientException, RemotingException, InterruptedException {
try {
this.sendSelectImpl(msg, selector, arg, CommunicationMode.ONEWAY, null, this.defaultMQProducer.getSendMsgTimeout());
} catch (MQBrokerException e) {
throw new MQClientException("unknown exception", e);
}
}
public TransactionSendResult sendMessageInTransaction(final Message msg,
final LocalTransactionExecuter tranExecuter, final Object arg)
throws MQClientException {
if (null == tranExecuter) {
throw new MQClientException("tranExecutor is null", null);
}
Validators.checkMessage(msg, this.defaultMQProducer);
SendResult sendResult = null;
MessageAccessor.putProperty(msg, MessageConst.PROPERTY_TRANSACTION_PREPARED, "true");
MessageAccessor.putProperty(msg, MessageConst.PROPERTY_PRODUCER_GROUP, this.defaultMQProducer.getProducerGroup());
try {
sendResult = this.send(msg);
} catch (Exception e) {
throw new MQClientException("send message Exception", e);
}
LocalTransactionState localTransactionState = LocalTransactionState.UNKNOW;
Throwable localException = null;
switch (sendResult.getSendStatus()) {
case SEND_OK: {
try {
if (sendResult.getTransactionId() != null) {
msg.putUserProperty("__transactionId__", sendResult.getTransactionId());
}
localTransactionState = tranExecuter.executeLocalTransactionBranch(msg, arg);
if (null == localTransactionState) {
localTransactionState = LocalTransactionState.UNKNOW;
}
if (localTransactionState != LocalTransactionState.COMMIT_MESSAGE) {
log.info("executeLocalTransactionBranch return {}", localTransactionState);
log.info(msg.toString());
}
} catch (Throwable e) {
log.info("executeLocalTransactionBranch exception", e);
log.info(msg.toString());
localException = e;
}
}
break;
case FLUSH_DISK_TIMEOUT:
case FLUSH_SLAVE_TIMEOUT:
case SLAVE_NOT_AVAILABLE:
localTransactionState = LocalTransactionState.ROLLBACK_MESSAGE;
break;
default:
break;
}
try {
this.endTransaction(sendResult, localTransactionState, localException);
} catch (Exception e) {
log.warn("local transaction execute " + localTransactionState + ", but end broker transaction failed", e);
}
TransactionSendResult transactionSendResult = new TransactionSendResult();
transactionSendResult.setSendStatus(sendResult.getSendStatus());
transactionSendResult.setMessageQueue(sendResult.getMessageQueue());
transactionSendResult.setMsgId(sendResult.getMsgId());
transactionSendResult.setQueueOffset(sendResult.getQueueOffset());
transactionSendResult.setTransactionId(sendResult.getTransactionId());
transactionSendResult.setLocalTransactionState(localTransactionState);
return transactionSendResult;
}
/**
* DEFAULT SYNC -------------------------------------------------------
*/
public SendResult send(
Message msg) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return send(msg, this.defaultMQProducer.getSendMsgTimeout());
}
public void endTransaction(
final SendResult sendResult,
final LocalTransactionState localTransactionState,
final Throwable localException) throws RemotingException, MQBrokerException, InterruptedException, UnknownHostException {
final MessageId id;
if (sendResult.getOffsetMsgId() != null) {
id = MessageDecoder.decodeMessageId(sendResult.getOffsetMsgId());
} else {
id = MessageDecoder.decodeMessageId(sendResult.getMsgId());
}
String transactionId = sendResult.getTransactionId();
final String brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(sendResult.getMessageQueue().getBrokerName());
EndTransactionRequestHeader requestHeader = new EndTransactionRequestHeader();
requestHeader.setTransactionId(transactionId);
requestHeader.setCommitLogOffset(id.getOffset());
switch (localTransactionState) {
case COMMIT_MESSAGE:
requestHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_COMMIT_TYPE);
break;
case ROLLBACK_MESSAGE:
requestHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_ROLLBACK_TYPE);
break;
case UNKNOW:
requestHeader.setCommitOrRollback(MessageSysFlag.TRANSACTION_NOT_TYPE);
break;
default:
break;
}
requestHeader.setProducerGroup(this.defaultMQProducer.getProducerGroup());
requestHeader.setTranStateTableOffset(sendResult.getQueueOffset());
requestHeader.setMsgId(sendResult.getMsgId());
String remark = localException != null ? ("executeLocalTransactionBranch exception: " + localException.toString()) : null;
this.mQClientFactory.getMQClientAPIImpl().endTransactionOneway(brokerAddr, requestHeader, remark,
this.defaultMQProducer.getSendMsgTimeout());
}
public SendResult send(Message msg,
long timeout) throws MQClientException, RemotingException, MQBrokerException, InterruptedException {
return this.sendDefaultImpl(msg, CommunicationMode.SYNC, null, timeout);
}
public ConcurrentMap<String, TopicPublishInfo> getTopicPublishInfoTable() {
return topicPublishInfoTable;
}
public int getZipCompressLevel() {
return zipCompressLevel;
}
public void setZipCompressLevel(int zipCompressLevel) {
this.zipCompressLevel = zipCompressLevel;
}
public ServiceState getServiceState() {
return serviceState;
}
public void setServiceState(ServiceState serviceState) {
this.serviceState = serviceState;
}
public long[] getNotAvailableDuration() {
return this.mqFaultStrategy.getNotAvailableDuration();
}
public void setNotAvailableDuration(final long[] notAvailableDuration) {
this.mqFaultStrategy.setNotAvailableDuration(notAvailableDuration);
}
public long[] getLatencyMax() {
return this.mqFaultStrategy.getLatencyMax();
}
public void setLatencyMax(final long[] latencyMax) {
this.mqFaultStrategy.setLatencyMax(latencyMax);
}
public boolean isSendLatencyFaultEnable() {
return this.mqFaultStrategy.isSendLatencyFaultEnable();
}
public void setSendLatencyFaultEnable(final boolean sendLatencyFaultEnable) {
this.mqFaultStrategy.setSendLatencyFaultEnable(sendLatencyFaultEnable);
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.codec.json;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.TreeNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.util.TokenBuffer;
import org.json.JSONException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.skyscreamer.jsonassert.JSONAssert;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import org.springframework.core.codec.DecodingException;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferLimitException;
import org.springframework.core.testfixture.io.buffer.AbstractLeakCheckingTests;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
/**
* @author Arjen Poutsma
* @author Rossen Stoyanchev
* @author Juergen Hoeller
*/
public class Jackson2TokenizerTests extends AbstractLeakCheckingTests {
private JsonFactory jsonFactory;
private ObjectMapper objectMapper;
@BeforeEach
public void createParser() {
this.jsonFactory = new JsonFactory();
this.objectMapper = new ObjectMapper(this.jsonFactory);
}
@Test
public void doNotTokenizeArrayElements() {
testTokenize(
singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"),
singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"), false);
testTokenize(
asList(
"{\"foo\": \"foofoo\"",
", \"bar\": \"barbar\"}"
),
singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), false);
testTokenize(
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
false);
testTokenize(
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"),
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"), false);
testTokenize(
asList(
"[{\"foo\": \"foofoo\", \"bar\"",
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"
),
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
false);
testTokenize(
asList(
"[",
"{\"id\":1,\"name\":\"Robert\"}", ",",
"{\"id\":2,\"name\":\"Raide\"}", ",",
"{\"id\":3,\"name\":\"Ford\"}", "]"
),
singletonList("[{\"id\":1,\"name\":\"Robert\"},{\"id\":2,\"name\":\"Raide\"},{\"id\":3,\"name\":\"Ford\"}]"),
false);
// SPR-16166: top-level JSON values
testTokenize(asList("\"foo", "bar\""), singletonList("\"foobar\""), false);
testTokenize(asList("12", "34"), singletonList("1234"), false);
testTokenize(asList("12.", "34"), singletonList("12.34"), false);
// note that we do not test for null, true, or false, which are also valid top-level values,
// but are unsupported by JSONassert
}
@Test
public void tokenizeArrayElements() {
testTokenize(
singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"),
singletonList("{\"foo\": \"foofoo\", \"bar\": \"barbar\"}"), true);
testTokenize(
asList(
"{\"foo\": \"foofoo\"",
", \"bar\": \"barbar\"}"
),
singletonList("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"), true);
testTokenize(
singletonList("[{\"foo\": \"foofoo\", \"bar\": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
asList(
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}",
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"
),
true);
testTokenize(
singletonList("[{\"foo\": \"bar\"},{\"foo\": \"baz\"}]"),
asList(
"{\"foo\": \"bar\"}",
"{\"foo\": \"baz\"}"
),
true);
// SPR-15803: nested array
testTokenize(
singletonList("[" +
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}" +
"]"),
asList(
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}"
),
true);
// SPR-15803: nested array, no top-level array
testTokenize(
singletonList("{\"speakerIds\":[\"tastapod\"],\"language\":\"ENGLISH\"}"),
singletonList("{\"speakerIds\":[\"tastapod\"],\"language\":\"ENGLISH\"}"), true);
testTokenize(
asList(
"[{\"foo\": \"foofoo\", \"bar\"",
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"
),
asList(
"{\"foo\": \"foofoo\", \"bar\": \"barbar\"}",
"{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}"), true);
testTokenize(
asList(
"[",
"{\"id\":1,\"name\":\"Robert\"}",
",",
"{\"id\":2,\"name\":\"Raide\"}",
",",
"{\"id\":3,\"name\":\"Ford\"}",
"]"
),
asList(
"{\"id\":1,\"name\":\"Robert\"}",
"{\"id\":2,\"name\":\"Raide\"}",
"{\"id\":3,\"name\":\"Ford\"}"
),
true);
// SPR-16166: top-level JSON values
testTokenize(asList("\"foo", "bar\""), singletonList("\"foobar\""), true);
testTokenize(asList("12", "34"), singletonList("1234"), true);
testTokenize(asList("12.", "34"), singletonList("12.34"), true);
// SPR-16407
testTokenize(asList("[1", ",2,", "3]"), asList("1", "2", "3"), true);
}
@Test
void tokenizeStream() {
// NDJSON (Newline Delimited JSON), JSON Lines
testTokenize(
asList(
"{\"id\":1,\"name\":\"Robert\"}",
"\n",
"{\"id\":2,\"name\":\"Raide\"}",
"\n",
"{\"id\":3,\"name\":\"Ford\"}"
),
asList(
"{\"id\":1,\"name\":\"Robert\"}",
"{\"id\":2,\"name\":\"Raide\"}",
"{\"id\":3,\"name\":\"Ford\"}"
),
true);
// JSON Sequence with newline separator
testTokenize(
asList(
"\n",
"{\"id\":1,\"name\":\"Robert\"}",
"\n",
"{\"id\":2,\"name\":\"Raide\"}",
"\n",
"{\"id\":3,\"name\":\"Ford\"}"
),
asList(
"{\"id\":1,\"name\":\"Robert\"}",
"{\"id\":2,\"name\":\"Raide\"}",
"{\"id\":3,\"name\":\"Ford\"}"
),
true);
}
private void testTokenize(List<String> input, List<String> output, boolean tokenize) {
StepVerifier.FirstStep<String> builder = StepVerifier.create(decode(input, tokenize, -1));
output.forEach(expected -> builder.assertNext(actual -> {
try {
JSONAssert.assertEquals(expected, actual, true);
}
catch (JSONException ex) {
throw new RuntimeException(ex);
}
}));
builder.verifyComplete();
}
@Test
public void testLimit() {
List<String> source = asList(
"[",
"{", "\"id\":1,\"name\":\"Dan\"", "},",
"{", "\"id\":2,\"name\":\"Ron\"", "},",
"{", "\"id\":3,\"name\":\"Bartholomew\"", "}",
"]"
);
String expected = String.join("", source);
int maxInMemorySize = expected.length();
StepVerifier.create(decode(source, false, maxInMemorySize))
.expectNext(expected)
.verifyComplete();
StepVerifier.create(decode(source, false, maxInMemorySize - 2))
.verifyError(DataBufferLimitException.class);
}
@Test
public void testLimitTokenized() {
List<String> source = asList(
"[",
"{", "\"id\":1, \"name\":\"Dan\"", "},",
"{", "\"id\":2, \"name\":\"Ron\"", "},",
"{", "\"id\":3, \"name\":\"Bartholomew\"", "}",
"]"
);
String expected = "{\"id\":3,\"name\":\"Bartholomew\"}";
int maxInMemorySize = expected.length();
StepVerifier.create(decode(source, true, maxInMemorySize))
.expectNext("{\"id\":1,\"name\":\"Dan\"}")
.expectNext("{\"id\":2,\"name\":\"Ron\"}")
.expectNext(expected)
.verifyComplete();
StepVerifier.create(decode(source, true, maxInMemorySize - 1))
.expectNext("{\"id\":1,\"name\":\"Dan\"}")
.expectNext("{\"id\":2,\"name\":\"Ron\"}")
.verifyError(DataBufferLimitException.class);
}
@Test
public void errorInStream() {
DataBuffer buffer = stringBuffer("{\"id\":1,\"name\":");
Flux<DataBuffer> source = Flux.just(buffer).concatWith(Flux.error(new RuntimeException()));
Flux<TokenBuffer> result = Jackson2Tokenizer.tokenize(source, this.jsonFactory, this.objectMapper, true,
false, -1);
StepVerifier.create(result)
.expectError(RuntimeException.class)
.verify();
}
@Test // SPR-16521
public void jsonEOFExceptionIsWrappedAsDecodingError() {
Flux<DataBuffer> source = Flux.just(stringBuffer("{\"status\": \"noClosingQuote}"));
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(source, this.jsonFactory, this.objectMapper, false,
false, -1);
StepVerifier.create(tokens)
.expectError(DecodingException.class)
.verify();
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void useBigDecimalForFloats(boolean useBigDecimalForFloats) {
Flux<DataBuffer> source = Flux.just(stringBuffer("1E+2"));
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
source, this.jsonFactory, this.objectMapper, false, useBigDecimalForFloats, -1);
StepVerifier.create(tokens)
.assertNext(tokenBuffer -> {
try {
JsonParser parser = tokenBuffer.asParser();
JsonToken token = parser.nextToken();
assertThat(token).isEqualTo(JsonToken.VALUE_NUMBER_FLOAT);
JsonParser.NumberType numberType = parser.getNumberType();
if (useBigDecimalForFloats) {
assertThat(numberType).isEqualTo(JsonParser.NumberType.BIG_DECIMAL);
}
else {
assertThat(numberType).isEqualTo(JsonParser.NumberType.DOUBLE);
}
}
catch (IOException ex) {
fail(ex);
}
})
.verifyComplete();
}
private Flux<String> decode(List<String> source, boolean tokenize, int maxInMemorySize) {
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
Flux.fromIterable(source).map(this::stringBuffer),
this.jsonFactory, this.objectMapper, tokenize, false, maxInMemorySize);
return tokens
.map(tokenBuffer -> {
try {
TreeNode root = this.objectMapper.readTree(tokenBuffer.asParser());
return this.objectMapper.writeValueAsString(root);
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
});
}
private DataBuffer stringBuffer(String value) {
byte[] bytes = value.getBytes(StandardCharsets.UTF_8);
DataBuffer buffer = this.bufferFactory.allocateBuffer(bytes.length);
buffer.write(bytes);
return buffer;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder.util;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Implementation of Galois field arithmetic with 2^p elements. The input must
* be unsigned integers. It's ported from HDFS-RAID, slightly adapted.
*/
@InterfaceAudience.Private
public class GaloisField {
// Field size 256 is good for byte based system
private static final int DEFAULT_FIELD_SIZE = 256;
// primitive polynomial 1 + X^2 + X^3 + X^4 + X^8 (substitute 2)
private static final int DEFAULT_PRIMITIVE_POLYNOMIAL = 285;
static private final Map<Integer, GaloisField> instances =
new HashMap<Integer, GaloisField>();
private final int[] logTable;
private final int[] powTable;
private final int[][] mulTable;
private final int[][] divTable;
private final int fieldSize;
private final int primitivePeriod;
private final int primitivePolynomial;
private GaloisField(int fieldSize, int primitivePolynomial) {
assert fieldSize > 0;
assert primitivePolynomial > 0;
this.fieldSize = fieldSize;
this.primitivePeriod = fieldSize - 1;
this.primitivePolynomial = primitivePolynomial;
logTable = new int[fieldSize];
powTable = new int[fieldSize];
mulTable = new int[fieldSize][fieldSize];
divTable = new int[fieldSize][fieldSize];
int value = 1;
for (int pow = 0; pow < fieldSize - 1; pow++) {
powTable[pow] = value;
logTable[value] = pow;
value = value * 2;
if (value >= fieldSize) {
value = value ^ primitivePolynomial;
}
}
// building multiplication table
for (int i = 0; i < fieldSize; i++) {
for (int j = 0; j < fieldSize; j++) {
if (i == 0 || j == 0) {
mulTable[i][j] = 0;
continue;
}
int z = logTable[i] + logTable[j];
z = z >= primitivePeriod ? z - primitivePeriod : z;
z = powTable[z];
mulTable[i][j] = z;
}
}
// building division table
for (int i = 0; i < fieldSize; i++) {
for (int j = 1; j < fieldSize; j++) {
if (i == 0) {
divTable[i][j] = 0;
continue;
}
int z = logTable[i] - logTable[j];
z = z < 0 ? z + primitivePeriod : z;
z = powTable[z];
divTable[i][j] = z;
}
}
}
/**
* Get the object performs Galois field arithmetics
*
* @param fieldSize size of the field
* @param primitivePolynomial a primitive polynomial corresponds to the size
*/
public static GaloisField getInstance(int fieldSize,
int primitivePolynomial) {
int key = ((fieldSize << 16) & 0xFFFF0000)
+ (primitivePolynomial & 0x0000FFFF);
GaloisField gf;
synchronized (instances) {
gf = instances.get(key);
if (gf == null) {
gf = new GaloisField(fieldSize, primitivePolynomial);
instances.put(key, gf);
}
}
return gf;
}
/**
* Get the object performs Galois field arithmetic with default setting
*/
public static GaloisField getInstance() {
return getInstance(DEFAULT_FIELD_SIZE, DEFAULT_PRIMITIVE_POLYNOMIAL);
}
/**
* Return number of elements in the field
*
* @return number of elements in the field
*/
public int getFieldSize() {
return fieldSize;
}
/**
* Return the primitive polynomial in GF(2)
*
* @return primitive polynomial as a integer
*/
public int getPrimitivePolynomial() {
return primitivePolynomial;
}
/**
* Compute the sum of two fields
*
* @param x input field
* @param y input field
* @return result of addition
*/
public int add(int x, int y) {
assert (x >= 0 && x < getFieldSize() && y >= 0 && y < getFieldSize());
return x ^ y;
}
/**
* Compute the multiplication of two fields
*
* @param x input field
* @param y input field
* @return result of multiplication
*/
public int multiply(int x, int y) {
assert (x >= 0 && x < getFieldSize() && y >= 0 && y < getFieldSize());
return mulTable[x][y];
}
/**
* Compute the division of two fields
*
* @param x input field
* @param y input field
* @return x/y
*/
public int divide(int x, int y) {
assert (x >= 0 && x < getFieldSize() && y > 0 && y < getFieldSize());
return divTable[x][y];
}
/**
* Compute power n of a field
*
* @param x input field
* @param n power
* @return x^n
*/
public int power(int x, int n) {
assert (x >= 0 && x < getFieldSize());
if (n == 0) {
return 1;
}
if (x == 0) {
return 0;
}
x = logTable[x] * n;
if (x < primitivePeriod) {
return powTable[x];
}
x = x % primitivePeriod;
return powTable[x];
}
/**
* Given a Vandermonde matrix V[i][j]=x[j]^i and vector y, solve for z such
* that Vz=y. The output z will be placed in y.
*
* @param x the vector which describe the Vandermonde matrix
* @param y right-hand side of the Vandermonde system equation. will be
* replaced the output in this vector
*/
public void solveVandermondeSystem(int[] x, int[] y) {
solveVandermondeSystem(x, y, x.length);
}
/**
* Given a Vandermonde matrix V[i][j]=x[j]^i and vector y, solve for z such
* that Vz=y. The output z will be placed in y.
*
* @param x the vector which describe the Vandermonde matrix
* @param y right-hand side of the Vandermonde system equation. will be
* replaced the output in this vector
* @param len consider x and y only from 0...len-1
*/
public void solveVandermondeSystem(int[] x, int[] y, int len) {
assert (x.length <= len && y.length <= len);
for (int i = 0; i < len - 1; i++) {
for (int j = len - 1; j > i; j--) {
y[j] = y[j] ^ mulTable[x[i]][y[j - 1]];
}
}
for (int i = len - 1; i >= 0; i--) {
for (int j = i + 1; j < len; j++) {
y[j] = divTable[y[j]][x[j] ^ x[j - i - 1]];
}
for (int j = i; j < len - 1; j++) {
y[j] = y[j] ^ y[j + 1];
}
}
}
/**
* A "bulk" version to the solving of Vandermonde System
*/
public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets,
int len, int dataLen) {
int idx1, idx2;
for (int i = 0; i < len - 1; i++) {
for (int j = len - 1; j > i; j--) {
for (idx2 = outputOffsets[j-1], idx1 = outputOffsets[j];
idx1 < outputOffsets[j] + dataLen; idx1++, idx2++) {
y[j][idx1] = (byte) (y[j][idx1] ^ mulTable[x[i]][y[j - 1][idx2] &
0x000000FF]);
}
}
}
for (int i = len - 1; i >= 0; i--) {
for (int j = i + 1; j < len; j++) {
for (idx1 = outputOffsets[j];
idx1 < outputOffsets[j] + dataLen; idx1++) {
y[j][idx1] = (byte) (divTable[y[j][idx1] & 0x000000FF][x[j] ^
x[j - i - 1]]);
}
}
for (int j = i; j < len - 1; j++) {
for (idx2 = outputOffsets[j+1], idx1 = outputOffsets[j];
idx1 < outputOffsets[j] + dataLen; idx1++, idx2++) {
y[j][idx1] = (byte) (y[j][idx1] ^ y[j + 1][idx2]);
}
}
}
}
/**
* A "bulk" version of the solveVandermondeSystem, using ByteBuffer.
*/
public void solveVandermondeSystem(int[] x, ByteBuffer[] y, int len) {
ByteBuffer p;
int idx1, idx2;
for (int i = 0; i < len - 1; i++) {
for (int j = len - 1; j > i; j--) {
p = y[j];
for (idx1 = p.position(), idx2 = y[j-1].position();
idx1 < p.limit(); idx1++, idx2++) {
p.put(idx1, (byte) (p.get(idx1) ^ mulTable[x[i]][y[j-1].get(idx2) &
0x000000FF]));
}
}
}
for (int i = len - 1; i >= 0; i--) {
for (int j = i + 1; j < len; j++) {
p = y[j];
for (idx1 = p.position(); idx1 < p.limit(); idx1++) {
p.put(idx1, (byte) (divTable[p.get(idx1) &
0x000000FF][x[j] ^ x[j - i - 1]]));
}
}
for (int j = i; j < len - 1; j++) {
p = y[j];
for (idx1 = p.position(), idx2 = y[j+1].position();
idx1 < p.limit(); idx1++, idx2++) {
p.put(idx1, (byte) (p.get(idx1) ^ y[j+1].get(idx2)));
}
}
}
}
/**
* Compute the multiplication of two polynomials. The index in the array
* corresponds to the power of the entry. For example p[0] is the constant
* term of the polynomial p.
*
* @param p input polynomial
* @param q input polynomial
* @return polynomial represents p*q
*/
public int[] multiply(int[] p, int[] q) {
int len = p.length + q.length - 1;
int[] result = new int[len];
for (int i = 0; i < len; i++) {
result[i] = 0;
}
for (int i = 0; i < p.length; i++) {
for (int j = 0; j < q.length; j++) {
result[i + j] = add(result[i + j], multiply(p[i], q[j]));
}
}
return result;
}
/**
* Compute the remainder of a dividend and divisor pair. The index in the
* array corresponds to the power of the entry. For example p[0] is the
* constant term of the polynomial p.
*
* @param dividend dividend polynomial, the remainder will be placed
* here when return
* @param divisor divisor polynomial
*/
public void remainder(int[] dividend, int[] divisor) {
for (int i = dividend.length - divisor.length; i >= 0; i--) {
int ratio = divTable[dividend[i +
divisor.length - 1]][divisor[divisor.length - 1]];
for (int j = 0; j < divisor.length; j++) {
int k = j + i;
dividend[k] = dividend[k] ^ mulTable[ratio][divisor[j]];
}
}
}
/**
* Compute the sum of two polynomials. The index in the array corresponds to
* the power of the entry. For example p[0] is the constant term of the
* polynomial p.
*
* @param p input polynomial
* @param q input polynomial
* @return polynomial represents p+q
*/
public int[] add(int[] p, int[] q) {
int len = Math.max(p.length, q.length);
int[] result = new int[len];
for (int i = 0; i < len; i++) {
if (i < p.length && i < q.length) {
result[i] = add(p[i], q[i]);
} else if (i < p.length) {
result[i] = p[i];
} else {
result[i] = q[i];
}
}
return result;
}
/**
* Substitute x into polynomial p(x).
*
* @param p input polynomial
* @param x input field
* @return p(x)
*/
public int substitute(int[] p, int x) {
int result = 0;
int y = 1;
for (int i = 0; i < p.length; i++) {
result = result ^ mulTable[p[i]][y];
y = mulTable[x][y];
}
return result;
}
/**
* A "bulk" version of the substitute.
* Tends to be 2X faster than the "int" substitute in a loop.
*
* @param p input polynomial
* @param q store the return result
* @param x input field
*/
public void substitute(byte[][] p, byte[] q, int x) {
int y = 1;
for (int i = 0; i < p.length; i++) {
byte[] pi = p[i];
for (int j = 0; j < pi.length; j++) {
int pij = pi[j] & 0x000000FF;
q[j] = (byte) (q[j] ^ mulTable[pij][y]);
}
y = mulTable[x][y];
}
}
/**
* A "bulk" version of the substitute.
* Tends to be 2X faster than the "int" substitute in a loop.
*
* @param p input polynomial
* @param offsets
* @param len
* @param q store the return result
* @param offset
* @param x input field
*/
public void substitute(byte[][] p, int[] offsets,
int len, byte[] q, int offset, int x) {
int y = 1, iIdx, oIdx;
for (int i = 0; i < p.length; i++) {
byte[] pi = p[i];
for (iIdx = offsets[i], oIdx = offset;
iIdx < offsets[i] + len; iIdx++, oIdx++) {
int pij = pi != null ? pi[iIdx] & 0x000000FF : 0;
q[oIdx] = (byte) (q[oIdx] ^ mulTable[pij][y]);
}
y = mulTable[x][y];
}
}
/**
* A "bulk" version of the substitute, using ByteBuffer.
* Tends to be 2X faster than the "int" substitute in a loop.
*
* @param p input polynomial
* @param q store the return result
* @param x input field
*/
public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) {
int y = 1, iIdx, oIdx;
for (int i = 0; i < p.length; i++) {
ByteBuffer pi = p[i];
int pos = pi != null ? pi.position() : 0;
int limit = pi != null ? pi.limit() : len;
for (oIdx = q.position(), iIdx = pos;
iIdx < limit; iIdx++, oIdx++) {
int pij = pi != null ? pi.get(iIdx) & 0x000000FF : 0;
q.put(oIdx, (byte) (q.get(oIdx) ^ mulTable[pij][y]));
}
y = mulTable[x][y];
}
}
/**
* The "bulk" version of the remainder.
* Warning: This function will modify the "dividend" inputs.
*/
public void remainder(byte[][] dividend, int[] divisor) {
for (int i = dividend.length - divisor.length; i >= 0; i--) {
for (int j = 0; j < divisor.length; j++) {
for (int k = 0; k < dividend[i].length; k++) {
int ratio = divTable[dividend[i + divisor.length - 1][k] &
0x00FF][divisor[divisor.length - 1]];
dividend[j + i][k] = (byte) ((dividend[j + i][k] & 0x00FF) ^
mulTable[ratio][divisor[j]]);
}
}
}
}
/**
* The "bulk" version of the remainder.
* Warning: This function will modify the "dividend" inputs.
*/
public void remainder(byte[][] dividend, int[] offsets,
int len, int[] divisor) {
int idx1, idx2;
for (int i = dividend.length - divisor.length; i >= 0; i--) {
for (int j = 0; j < divisor.length; j++) {
for (idx2 = offsets[j + i], idx1 = offsets[i + divisor.length - 1];
idx1 < offsets[i + divisor.length - 1] + len;
idx1++, idx2++) {
int ratio = divTable[dividend[i + divisor.length - 1][idx1] &
0x00FF][divisor[divisor.length - 1]];
dividend[j + i][idx2] = (byte) ((dividend[j + i][idx2] & 0x00FF) ^
mulTable[ratio][divisor[j]]);
}
}
}
}
/**
* The "bulk" version of the remainder, using ByteBuffer.
* Warning: This function will modify the "dividend" inputs.
*/
public void remainder(ByteBuffer[] dividend, int[] divisor) {
int idx1, idx2;
ByteBuffer b1, b2;
for (int i = dividend.length - divisor.length; i >= 0; i--) {
for (int j = 0; j < divisor.length; j++) {
b1 = dividend[i + divisor.length - 1];
b2 = dividend[j + i];
for (idx1 = b1.position(), idx2 = b2.position();
idx1 < b1.limit(); idx1++, idx2++) {
int ratio = divTable[b1.get(idx1) &
0x00FF][divisor[divisor.length - 1]];
b2.put(idx2, (byte) ((b2.get(idx2) & 0x00FF) ^
mulTable[ratio][divisor[j]]));
}
}
}
}
/**
* Perform Gaussian elimination on the given matrix. This matrix has to be a
* fat matrix (number of rows > number of columns).
*/
public void gaussianElimination(int[][] matrix) {
assert(matrix != null && matrix.length > 0 && matrix[0].length > 0
&& matrix.length < matrix[0].length);
int height = matrix.length;
int width = matrix[0].length;
for (int i = 0; i < height; i++) {
boolean pivotFound = false;
// scan the column for a nonzero pivot and swap it to the diagonal
for (int j = i; j < height; j++) {
if (matrix[i][j] != 0) {
int[] tmp = matrix[i];
matrix[i] = matrix[j];
matrix[j] = tmp;
pivotFound = true;
break;
}
}
if (!pivotFound) {
continue;
}
int pivot = matrix[i][i];
for (int j = i; j < width; j++) {
matrix[i][j] = divide(matrix[i][j], pivot);
}
for (int j = i + 1; j < height; j++) {
int lead = matrix[j][i];
for (int k = i; k < width; k++) {
matrix[j][k] = add(matrix[j][k], multiply(lead, matrix[i][k]));
}
}
}
for (int i = height - 1; i >=0; i--) {
for (int j = 0; j < i; j++) {
int lead = matrix[j][i];
for (int k = i; k < width; k++) {
matrix[j][k] = add(matrix[j][k], multiply(lead, matrix[i][k]));
}
}
}
}
}
| |
package cn.yo2.aquarium.pocketvoa.parser.voa51;
import java.util.HashMap;
import cn.yo2.aquarium.pocketvoa.parser.IDataSource;
import cn.yo2.aquarium.pocketvoa.parser.IListParser;
import cn.yo2.aquarium.pocketvoa.parser.IPageParser;
public class Voa51DataSource implements IDataSource {
private static final String SEPERATOR = "_";
static final String HOST = "http://www.51voa.com";
// standard English
static final String URL_ENGLISH_NEWS = HOST + "/VOA_Standard_%d.html";
// special English
static final String URL_TECHNOLOGY_REPORT = HOST + "/Technology_Report_%d.html";
static final String URL_THIS_IS_AMERICA = HOST + "/This_is_America_%d.html";
static final String URL_AGRICULTURE_REPORT = HOST + "/Agriculture_Report_%d.html";
static final String URL_SCIENCE_IN_THE_NEWS = HOST + "/Science_in_the_News_%d.html";
static final String URL_HEALTH_REPORT = HOST + "/Health_Report_%d.html";
static final String URL_EXPLORATIONS = HOST + "/Explorations_%d.html";
static final String URL_EDUCATION_REPORT = HOST + "/Education_Report_%d.html";
static final String URL_THE_MAKING_OF_A_NATION = HOST+ "/The_Making_of_a_Nation_%d.html";
static final String URL_ECONOMICS_REPORT = HOST + "/Economics_Report_%d.html";
static final String URL_AMERICAN_MOSAIC = HOST + "/American_Mosaic_%d.html";
static final String URL_IN_THE_NEWS = HOST + "/In_the_News_%d.html";
static final String URL_AMERICAN_STORIES = HOST + "/American_Stories_%d.html";
static final String URL_WORDS_AND_THEIR_STORIES = HOST + "/Words_And_Their_Stories_%d.html";
static final String URL_PEOPLE_IN_AMERICA = HOST + "/People_in_America_%d.html";
// English learning
static final String URL_GO_ENGLISH = HOST + "/Go_English_%d.html";
static final String URL_WORD_MASTER = HOST + "/Word_Master_%d.html";
static final String URL_AMERICAN_CAFE = HOST + "/American_Cafe_%d.html";
static final String URL_POPULAR_AMERICAN = HOST + "/Popular_American_%d.html";
static final String URL_BUSINESS_ETIQUETTE = HOST + "/Business_Etiquette_%d.html";
static final String URL_SPORTS_ENGLISH = HOST + "/Sports_English_%d.html";
static final String URL_WORDS_AND_IDIOMS = HOST + "/Words_And_Idioms_%d.html";
// Article type_subtype ->
private final HashMap<String, String> mListUrls = new HashMap<String, String>();
// Article type_subtype ->
private final HashMap<String, IListParser> mListParsers = new HashMap<String, IListParser>();
// Article type_subtype ->
private final HashMap<String, IPageParser> mPageParsers = new HashMap<String, IPageParser>();
// Article type_subtype ->
private final HashMap<String, IPageParser> mPageZhParsers = new HashMap<String, IPageParser>();
public void init(int maxCount) {
setupListUrls();
setupListParsers(maxCount);
setupPageParsers();
setupPageZhParsers();
}
public HashMap<String, IListParser> getListParsers() {
return mListParsers;
}
public HashMap<String, String> getListUrls() {
return mListUrls;
}
public HashMap<String, IPageParser> getPageParsers() {
return mPageParsers;
}
public HashMap<String, IPageParser> getPageZhParsers() {
return mPageZhParsers;
}
public String getName() {
return "www.51voa.com";
}
private void setupListUrls() {
// standard English
mListUrls.put(STANDARD_ENGLISH + SEPERATOR + ENGLISH_NEWS, URL_ENGLISH_NEWS);
// special English
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + TECHNOLOGY_REPORT, URL_TECHNOLOGY_REPORT);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + THIS_IS_AMERICA, URL_THIS_IS_AMERICA);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + AGRICULTURE_REPORT, URL_AGRICULTURE_REPORT);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + SCIENCE_IN_THE_NEWS, URL_SCIENCE_IN_THE_NEWS);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + HEALTH_REPORT, URL_HEALTH_REPORT);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + EXPLORATIONS, URL_EXPLORATIONS);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + EDUCATION_REPORT, URL_EDUCATION_REPORT);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + THE_MAKING_OF_A_NATION, URL_THE_MAKING_OF_A_NATION);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + ECONOMICS_REPORT, URL_ECONOMICS_REPORT);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_MOSAIC, URL_AMERICAN_MOSAIC);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + IN_THE_NEWS, URL_IN_THE_NEWS);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_STORIES, URL_AMERICAN_STORIES);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + WORDS_AND_THEIR_STORIES, URL_WORDS_AND_THEIR_STORIES);
mListUrls.put(SPECIAL_ENGLISH + SEPERATOR + PEOPLE_IN_AMERICA, URL_PEOPLE_IN_AMERICA);
// English learning
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + GO_ENGLISH, URL_GO_ENGLISH);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + WORD_MASTER, URL_WORD_MASTER);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + AMERICAN_CAFE, URL_AMERICAN_CAFE);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + POPULAR_AMERICAN, URL_POPULAR_AMERICAN);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + BUSINESS_ETIQUETTE, URL_BUSINESS_ETIQUETTE);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + SPORTS_ENGLISH, URL_SPORTS_ENGLISH);
mListUrls.put(ENGLISH_LEARNING + SEPERATOR + WORDS_AND_IDIOMS, URL_WORDS_AND_IDIOMS);
}
private void setupListParsers(int maxCount) {
/*
* standard English
*/
mListParsers.put(STANDARD_ENGLISH + SEPERATOR + ENGLISH_NEWS,
new StandardEnglishListParser(STANDARD_ENGLISH, ENGLISH_NEWS));
/*
* special English
*/
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + TECHNOLOGY_REPORT,
new StandardEnglishListParser(SPECIAL_ENGLISH, TECHNOLOGY_REPORT));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + THIS_IS_AMERICA,
new StandardEnglishListParser(SPECIAL_ENGLISH, THIS_IS_AMERICA));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + AGRICULTURE_REPORT,
new StandardEnglishListParser(SPECIAL_ENGLISH, AGRICULTURE_REPORT));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + SCIENCE_IN_THE_NEWS,
new StandardEnglishListParser(SPECIAL_ENGLISH, SCIENCE_IN_THE_NEWS));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + HEALTH_REPORT,
new StandardEnglishListParser(SPECIAL_ENGLISH, HEALTH_REPORT));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + EXPLORATIONS,
new StandardEnglishListParser(SPECIAL_ENGLISH, EXPLORATIONS));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + EDUCATION_REPORT,
new StandardEnglishListParser(SPECIAL_ENGLISH, EDUCATION_REPORT));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + THE_MAKING_OF_A_NATION,
new StandardEnglishListParser(SPECIAL_ENGLISH, THE_MAKING_OF_A_NATION));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + ECONOMICS_REPORT,
new StandardEnglishListParser(SPECIAL_ENGLISH, ECONOMICS_REPORT));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_MOSAIC,
new StandardEnglishListParser(SPECIAL_ENGLISH, AMERICAN_MOSAIC));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + IN_THE_NEWS,
new StandardEnglishListParser(SPECIAL_ENGLISH, IN_THE_NEWS));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_STORIES,
new StandardEnglishListParser(SPECIAL_ENGLISH, AMERICAN_STORIES));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + WORDS_AND_THEIR_STORIES,
new StandardEnglishListParser(SPECIAL_ENGLISH, WORDS_AND_THEIR_STORIES));
mListParsers.put(SPECIAL_ENGLISH + SEPERATOR + PEOPLE_IN_AMERICA,
new StandardEnglishListParser(SPECIAL_ENGLISH, PEOPLE_IN_AMERICA));
/*
* English learning
*/
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + GO_ENGLISH,
new PopularAmericanListParser(ENGLISH_LEARNING, GO_ENGLISH));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + WORD_MASTER,
new PopularAmericanListParser(ENGLISH_LEARNING, WORD_MASTER));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + AMERICAN_CAFE,
new PopularAmericanListParser(ENGLISH_LEARNING, AMERICAN_CAFE));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + POPULAR_AMERICAN,
new PopularAmericanListParser(ENGLISH_LEARNING, POPULAR_AMERICAN));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + BUSINESS_ETIQUETTE,
new PopularAmericanListParser(ENGLISH_LEARNING, BUSINESS_ETIQUETTE));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + SPORTS_ENGLISH,
new PopularAmericanListParser(ENGLISH_LEARNING, SPORTS_ENGLISH));
mListParsers.put(ENGLISH_LEARNING + SEPERATOR + WORDS_AND_IDIOMS,
new PopularAmericanListParser(ENGLISH_LEARNING, WORDS_AND_IDIOMS));
}
private void setupPageParsers() {
/*
* standard English
*/
mPageParsers.put(STANDARD_ENGLISH + SEPERATOR + ENGLISH_NEWS,
new StandardEnglishPageParser());
/*
* special English
*/
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + TECHNOLOGY_REPORT,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + THIS_IS_AMERICA,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + AGRICULTURE_REPORT,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + SCIENCE_IN_THE_NEWS,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + HEALTH_REPORT,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + EXPLORATIONS,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + EDUCATION_REPORT,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + THE_MAKING_OF_A_NATION,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + ECONOMICS_REPORT,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_MOSAIC,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + IN_THE_NEWS,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_STORIES,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + WORDS_AND_THEIR_STORIES,
new StandardEnglishPageParser());
mPageParsers.put(SPECIAL_ENGLISH + SEPERATOR + PEOPLE_IN_AMERICA,
new StandardEnglishPageParser());
/*
* English learning
*/
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + GO_ENGLISH,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + WORD_MASTER,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + AMERICAN_CAFE,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + POPULAR_AMERICAN,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + BUSINESS_ETIQUETTE,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + SPORTS_ENGLISH,
new PopularAmericanPageParser());
mPageParsers.put(ENGLISH_LEARNING + SEPERATOR + WORDS_AND_IDIOMS,
new PopularAmericanPageParser());
}
private void setupPageZhParsers() {
// standard English
/*=====================================================================
* special English
====================================================================*/
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + TECHNOLOGY_REPORT,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + THIS_IS_AMERICA,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + AGRICULTURE_REPORT,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + SCIENCE_IN_THE_NEWS,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + HEALTH_REPORT,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + EXPLORATIONS,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + EDUCATION_REPORT,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + THE_MAKING_OF_A_NATION,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + ECONOMICS_REPORT,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_MOSAIC,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + IN_THE_NEWS,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + AMERICAN_STORIES,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + WORDS_AND_THEIR_STORIES,
new StandardEnglishPageZhParser());
mPageZhParsers.put(SPECIAL_ENGLISH + SEPERATOR + PEOPLE_IN_AMERICA,
new StandardEnglishPageZhParser());
// English learning
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.distributed.internal;
import java.io.File;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.CancelException;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.GemFireCache;
import com.gemstone.gemfire.cache.client.internal.locator.ClientConnectionRequest;
import com.gemstone.gemfire.cache.client.internal.locator.ClientReplacementRequest;
import com.gemstone.gemfire.cache.client.internal.locator.GetAllServersRequest;
import com.gemstone.gemfire.cache.client.internal.locator.LocatorListRequest;
import com.gemstone.gemfire.cache.client.internal.locator.LocatorStatusRequest;
import com.gemstone.gemfire.cache.client.internal.locator.LocatorStatusResponse;
import com.gemstone.gemfire.cache.client.internal.locator.QueueConnectionRequest;
import com.gemstone.gemfire.cache.client.internal.locator.ServerLocationRequest;
import com.gemstone.gemfire.cache.client.internal.locator.wan.LocatorMembershipListener;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.LocatorDiscovery;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.LocatorHelper;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.LocatorJoinMessage;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.LocatorMembershipListenerImpl;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorJoinRequest;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorJoinResponse;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorPingRequest;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorPingResponse;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorRequest;
//import com.gemstone.gemfire.cache.client.internal.locator.wan.RemoteLocatorResponse;
import com.gemstone.gemfire.cache.wan.GatewaySenderFactory;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.Locator;
import com.gemstone.gemfire.distributed.LockServiceDestroyedException;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem.ConnectListener;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem.DisconnectListener;
import com.gemstone.gemfire.distributed.internal.membership.QuorumChecker;
import com.gemstone.gemfire.distributed.internal.membership.jgroup.GFJGBasicAdapter;
import com.gemstone.gemfire.distributed.internal.membership.jgroup.JGroupMember;
import com.gemstone.gemfire.distributed.internal.membership.jgroup.LocatorImpl;
import com.gemstone.gemfire.distributed.internal.tcpserver.TcpClient;
import com.gemstone.gemfire.distributed.internal.tcpserver.TcpHandler;
import com.gemstone.gemfire.distributed.internal.tcpserver.TcpServer;
import com.gemstone.gemfire.internal.SocketCreator;
import com.gemstone.gemfire.internal.admin.remote.DistributionLocatorId;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.wan.WANServiceProvider;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.LogWriterFactory;
import com.gemstone.gemfire.internal.logging.LoggingThreadGroup;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.internal.logging.log4j.LogMarker;
import com.gemstone.gemfire.internal.logging.log4j.LogWriterAppenders;
import com.gemstone.gemfire.internal.logging.log4j.LogWriterLogger;
import com.gemstone.gemfire.management.internal.JmxManagerLocator;
import com.gemstone.gemfire.management.internal.JmxManagerLocatorRequest;
import com.gemstone.gemfire.management.internal.JmxManagerLocatorResponse;
import com.gemstone.gemfire.management.internal.cli.CliUtil;
import com.gemstone.gemfire.management.internal.configuration.domain.SharedConfigurationStatus;
import com.gemstone.gemfire.management.internal.configuration.handlers.ConfigurationRequestHandler;
import com.gemstone.gemfire.management.internal.configuration.handlers.SharedConfigurationStatusRequestHandler;
import com.gemstone.gemfire.management.internal.configuration.messages.ConfigurationRequest;
import com.gemstone.gemfire.management.internal.configuration.messages.SharedConfigurationStatusRequest;
import com.gemstone.gemfire.management.internal.configuration.messages.SharedConfigurationStatusResponse;
import com.gemstone.org.jgroups.Address;
import com.gemstone.org.jgroups.JChannel;
import com.gemstone.org.jgroups.stack.GossipData;
import com.gemstone.org.jgroups.stack.GossipServer;
import com.gemstone.org.jgroups.util.GemFireTracer;
/**
* Provides the implementation of a distribution <code>Locator</code>
* as well as internal-only functionality. Currently, a distribution
* locator is implemented using a JGroups {@link GossipServer}.
*
* This class has APIs that perform essentially three layers of
* services. At the bottom layer is the JGroups location service. On
* top of that you can start a distributed system. And then on top
* of that you can start server location services.
*
* Server Location Service
* DistributedSystem
* Peer Location Service
*
* The startLocator() methods provide a way to start all three
* services in one call. Otherwise, the services can be started
* independently
* <code>
* locator = createLocator()
* locator.startPeerLocation();
* locator.startDistributeSystem();
*
* @author David Whitlock
* @since 4.0
*/
public class InternalLocator extends Locator implements ConnectListener {
private static final Logger logger = LogService.getLogger();
/** How long (in milliseconds) a member that we haven't heard from
* in a while should live before we call it dead? */
private static final long EXPIRY_MS = 60000; // one minute
private static final int SHARED_CONFIG_STATUS_TIMEOUT = 10000; //10 seconds
/** system property name for forcing an locator distribution manager type */
public static final String FORCE_LOCATOR_DM_TYPE = "Locator.forceLocatorDMType";
/** system property name for inhibiting DM banner */
public static final String INHIBIT_DM_BANNER = "Locator.inhibitDMBanner";
///////////////////// Instance Fields //////////////////////
/** The tcp server responding to locator requests */
private final TcpServer server;
/**
* @since 5.7
*/
private final PrimaryHandler handler;
/** The distributed system owned by this locator, if any.
* Note that if a ds already exists because the locator is
* being colocated in a normal member this field will be null.
*/
private InternalDistributedSystem myDs;
/** The cache owned by this locator, if any.
* Note that if a cache already exists because the locator is
* being colocated in a normal member this field will be null.
*/
private Cache myCache;
/** locator state file */
private File stateFile;
/** product use logging */
private ProductUseLog productUseLog;
private boolean peerLocator;
private ServerLocator serverLocator;
protected volatile LocatorStats stats;
//TODO - these to properties are a waste of memory once
//the system is started.
private Properties env;
/** the gossip server used for peer location */
private LocatorImpl locatorImpl;
private DistributionConfigImpl config;
private LocatorMembershipListener locatorListener;
/** whether the locator was stopped during forced-disconnect processing but a reconnect will occur */
private volatile boolean stoppedForReconnect;
private final AtomicBoolean shutdownHandled = new AtomicBoolean(false);
private SharedConfiguration sharedConfig;
private volatile boolean isSharedConfigurationStarted = false;
private Thread restartThread;
public boolean isSharedConfigurationEnabled() {
return this.config.getEnableClusterConfiguration();
}
public boolean loadFromSharedConfigDir() {
return this.config.getLoadClusterConfigFromDir();
}
public boolean isSharedConfigurationRunning() {
if (this.sharedConfig != null) {
return this.sharedConfig.getStatus() == SharedConfigurationStatus.RUNNING;
} else {
return false;
}
}
////////////////////// Static Methods /////////////////////
/** the locator hosted by this JVM. As of 7.0 it is a singleton. */
private static InternalLocator locator; // must synchronize on locatorLock
private static final Object locatorLock = new Object();
public static InternalLocator getLocator() {
// synchronize in order to fix #46336 (race condition in createLocator)
synchronized (locatorLock) {
return locator;
}
}
public static boolean hasLocator() {
synchronized (locatorLock) {
return locator != null;
}
}
private static boolean removeLocator(InternalLocator l) {
if (l == null) return false;
synchronized (locatorLock) {
if (hasLocator()) {
if (l.equals(locator)) {
locator = null;
return true;
}
}
return false;
}
}
public LocatorMembershipListener getlocatorMembershipListener() {
return this.locatorListener;
}
/**
* Create a locator that listens on a given port. This locator will not have
* peer or server location services available until they are started by
* calling startServerLocation or startPeerLocation on the locator object.
*
* @param port
* the tcp/ip port to listen on
* @param logFile
* the file that log messages should be written to
* @param stateFile
* the file that state should be read from / written to for recovery
* @param logger
* a log writer that should be used (logFile parameter is
* ignored)
* @param securityLogger
* the logger to be used for security related log messages
* @param distributedSystemProperties
* optional properties to configure the distributed system
* (e.g., mcast addr/port, other locators)
* @param startDistributedSystem if true then this locator will also start its own ds
*/
public static InternalLocator createLocator(
int port,
File logFile,
File stateFile,
InternalLogWriter logger,
InternalLogWriter securityLogger,
InetAddress bindAddress,
String hostnameForClients,
java.util.Properties distributedSystemProperties, boolean startDistributedSystem) throws IOException {
synchronized (locatorLock) {
if (hasLocator()) {
throw new IllegalStateException("A locator can not be created because one already exists in this JVM.");
}
InternalLocator l = new InternalLocator(port, logFile, stateFile, logger, securityLogger, bindAddress, hostnameForClients, distributedSystemProperties, null, startDistributedSystem);
locator = l;
return l;
}
}
private static void setLocator(InternalLocator l) {
synchronized(locatorLock) {
if (locator != null && locator != l) {
throw new IllegalStateException("A locator can not be created because one already exists in this JVM.");
}
locator = l;
}
}
/**
* Creates a distribution locator that runs in this VM on the given
* port and bind address and creates a distributed system.
*
* @param port
* the tcp/ip port to listen on
* @param logFile
* the file that log messages should be written to
* @param logger
* a log writer that should be used (logFile parameter is ignored)
* @param securityLogger
* the logger to be used for security related log messages
* @param dsProperties
* optional properties to configure the distributed system (e.g., mcast addr/port, other locators)
* @param peerLocator
* enable peer location services
* @param enableServerLocator
* enable server location services
* @param hostnameForClients
* the name to give to clients for connecting to this locator
* @param loadSharedConfigFromDir
* load the shared configuration from the shared configuration directory
* @throws IOException
* @since 7.0
*/
public static InternalLocator startLocator(
int port,
File logFile,
File stateFile,
InternalLogWriter logger,
InternalLogWriter securityLogger,
InetAddress bindAddress,
java.util.Properties dsProperties,
boolean peerLocator,
boolean enableServerLocator,
String hostnameForClients,
boolean loadSharedConfigFromDir
)
throws IOException
{
return startLocator(port, logFile, stateFile, logger, securityLogger, bindAddress, true, dsProperties, peerLocator, enableServerLocator, hostnameForClients, loadSharedConfigFromDir);
}
/**
* Creates a distribution locator that runs in this VM on the given
* port and bind address.
*
* @param port
* the tcp/ip port to listen on
* @param logFile
* the file that log messages should be written to
* @param logger
* a log writer that should be used (logFile parameter is ignored)
* @param securityLogger
* the logger to be used for security related log messages
* @param startDistributedSystem
* if true, a distributed system is started
* @param dsProperties
* optional properties to configure the distributed system (e.g., mcast addr/port, other locators)
* @param peerLocator
* enable peer location services
* @param enableServerLocator
* enable server location services
* @param hostnameForClients
* the name to give to clients for connecting to this locator
* @param loadSharedConfigFromDir TODO:CONFIG
* @throws IOException
* @deprecated as of 7.0 use startLocator(int, File, File, InternalLogWriter, InternalLogWriter, InetAddress, java.util.Properties, boolean, boolean, String) instead.
*/
public static InternalLocator startLocator(
int port,
File logFile,
File stateFile,
InternalLogWriter logger,
InternalLogWriter securityLogger,
InetAddress bindAddress,
boolean startDistributedSystem,
java.util.Properties dsProperties,
boolean peerLocator,
boolean enableServerLocator,
String hostnameForClients,
boolean loadSharedConfigFromDir
)
throws IOException
{
if(!peerLocator && !enableServerLocator) {
throw new IllegalArgumentException(LocalizedStrings.InternalLocator_EITHER_PEER_LOCATOR_OR_SERVER_LOCATOR_MUST_BE_ENABLED.toLocalizedString());
}
System.setProperty(FORCE_LOCATOR_DM_TYPE, "true");
InternalLocator slocator = null;
boolean startedLocator = false;
try {
slocator = createLocator(port, logFile, stateFile, logger, securityLogger, bindAddress, hostnameForClients, dsProperties, startDistributedSystem);
if (enableServerLocator) {
slocator.handler.willHaveServerLocator = true;
}
if(peerLocator) {
slocator.startPeerLocation(startDistributedSystem);
}
if(startDistributedSystem) {
slocator.startDistributedSystem();
// fix bug #46324
final InternalDistributedSystem ids = (InternalDistributedSystem)slocator.myDs;
if (ids != null) {
ids.getDistributionManager().addHostedLocators(ids.getDistributedMember(), getLocatorStrings(), slocator.isSharedConfigurationEnabled());
}
}
// during the period when the product is using only paper licenses we always
// start server location services in order to be able to log information
// about the use of cache servers
// if(enableServerLocator) {
// slocator.startServerLocation(InternalDistributedSystem.getConnectedInstance());
// }
InternalDistributedSystem sys = InternalDistributedSystem.getConnectedInstance();
if (sys != null) {
slocator.startServerLocation(sys);
}
slocator.endStartLocator(null);
startedLocator = true;
return slocator;
} finally {
System.getProperties().remove(FORCE_LOCATOR_DM_TYPE);
if (!startedLocator) {
// fix for bug 46314
removeLocator(slocator);
}
}
}
/***
* Determines if this VM is a locator which must ignore a shutdown.
* @return true if this VM is a locator which should ignore a shutdown , false if it is a normal member.
*/
public static boolean isDedicatedLocator() {
InternalLocator internalLocator = getLocator();
if (internalLocator == null)
return false;
InternalDistributedSystem ids = (InternalDistributedSystem)internalLocator.myDs;
if (ids == null) {
return false;
}
DM dm = ids.getDistributionManager();
if (dm.isLoner()) {
return false;
}
DistributionManager distMgr = (DistributionManager)ids.getDistributionManager();
return distMgr.getDMType() == DistributionManager.LOCATOR_DM_TYPE;
}
public static LocatorStatusResponse statusLocator(int port, InetAddress bindAddress) throws IOException {
//final int timeout = (60 * 2 * 1000); // 2 minutes
final int timeout = Integer.MAX_VALUE; // 2 minutes
try {
return (LocatorStatusResponse) TcpClient.requestToServer(bindAddress, port,
new LocatorStatusRequest(), timeout, true);
}
catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
/**
* Stops the distribution locator that runs on the given port and
* bind address.
*/
public static void stopLocator(int port, InetAddress bindAddress)
throws ConnectException {
TcpClient.stop(bindAddress, port);
}
/**
* Returns information about the locator running on the given host
* and port or <code>null</code> if the information cannot be
* obtained. Two <code>String</code>s are returned: the first
* string is the working directory of the locator and the second
* string is the product directory of the locator.
*/
public static String[] getLocatorInfo(InetAddress host, int port) {
return TcpClient.getInfo(host, port);
}
/////////////////////// Constructors //////////////////////
/**
* Creates a new <code>Locator</code> with the given port, log file, logger,
* and bind address.
*
* @param port
* the tcp/ip port to listen on
* @param logF
* the file that log messages should be written to
* @param stateF
* the file that state should be read from / written to for recovery
* @param logWriter
* a log writer that should be used (logFile parameter is
* ignored)
* @param securityLogWriter
* the log writer to be used for security related log messages
* @param hostnameForClients
* the name to give to clients for connecting to this locator
* @param distributedSystemProperties
* optional properties to configure the distributed system
* (e.g., mcast addr/port, other locators)
* @param cfg the config if being called from a distributed system; otherwise null.
* @param startDistributedSystem if true locator will start its own distributed system
* @throws IOException
*/
private InternalLocator(
int port,
File logF,
File stateF,
InternalLogWriter logWriter, // LOG: 3 non-null sources: GemFireDistributionLocator, InternalDistributedSystem, LocatorLauncher
InternalLogWriter securityLogWriter, // LOG: 1 non-null source: GemFireDistributionLocator(same instance as logWriter), InternalDistributedSystem
InetAddress bindAddress,
String hostnameForClients,
java.util.Properties distributedSystemProperties, DistributionConfigImpl cfg, boolean startDistributedSystem) {
this.port = port;
this.logFile = logF;
this.bindAddress = bindAddress;
this.hostnameForClients = hostnameForClients;
if (stateF == null) {
this.stateFile = new File("locator" + port + "state.dat");
}
else {
this.stateFile = stateF;
}
File productUseFile = new File("locator"+port+"views.log");
this.productUseLog = new ProductUseLog(productUseFile);
this.config = cfg;
env = new Properties();
// set bind-address explicitly only if not wildcard and let any explicit
// value in distributedSystemProperties take precedence (#46870)
if (bindAddress != null && !bindAddress.isAnyLocalAddress()) {
env.setProperty(DistributionConfig.BIND_ADDRESS_NAME,
bindAddress.getHostAddress());
}
if (distributedSystemProperties != null) {
env.putAll(distributedSystemProperties);
}
env.setProperty(DistributionConfig.CACHE_XML_FILE_NAME, "");
// create a DC so that all of the lookup rules, gemfire.properties, etc,
// are considered and we have a config object we can trust
if (this.config == null) {
this.config = new DistributionConfigImpl(env);
this.env.clear();
this.env.putAll(this.config.getProps());
}
final boolean hasLogFileButConfigDoesNot = this.logFile != null && this.config.getLogFile().toString().equals(DistributionConfig.DEFAULT_LOG_FILE.toString());
if (logWriter == null && hasLogFileButConfigDoesNot) {
this.config.unsafeSetLogFile(this.logFile); // LOG: this is(was) a hack for when logFile and config don't match -- if config specifies a different log-file things will break!
}
// LOG: create LogWriterAppenders (these are closed at shutdown)
final boolean hasLogFile = this.config.getLogFile() != null && !this.config.getLogFile().equals(new File(""));
final boolean hasSecurityLogFile = this.config.getSecurityLogFile() != null && !this.config.getSecurityLogFile().equals(new File(""));
LogService.configureLoggers(hasLogFile, hasSecurityLogFile);
if (hasLogFile || hasSecurityLogFile) {
if (hasLogFile) {
// if log-file then create logWriterAppender
LogWriterAppenders.getOrCreateAppender(LogWriterAppenders.Identifier.MAIN, true, false, this.config, !startDistributedSystem);
}
if (hasSecurityLogFile) {
// if security-log-file then create securityLogWriterAppender
LogWriterAppenders.getOrCreateAppender(LogWriterAppenders.Identifier.SECURITY, true, false, this.config, false);
} else {
// do not create a LogWriterAppender for security -- let it go through to logWriterAppender
}
}
// LOG: create LogWriters for GemFireTracer (or use whatever was passed in)
if (logWriter == null) {
logWriter = LogWriterFactory.createLogWriterLogger(false, false, this.config, !startDistributedSystem);
if (logger.isDebugEnabled()) {
logger.debug("LogWriter for locator is created.");
}
}
if (securityLogWriter == null) {
securityLogWriter = LogWriterFactory.createLogWriterLogger(false, true, this.config, false);
((LogWriterLogger) logWriter).setLogWriterLevel(this.config.getSecurityLogLevel());
securityLogWriter.fine("SecurityLogWriter for locator is created.");
}
this.locatorListener = WANServiceProvider.createLocatorMembershipListener();
if(locatorListener != null) {
this.locatorListener.setPort(this.port);
this.locatorListener.setConfig(this.getConfig());
}
this.handler = new PrimaryHandler(this.port, this, locatorListener);
GemFireTracer tracer = GemFireTracer.getLog(InternalLocator.class);
tracer.setLogWriter(logWriter);
tracer.setSecurityLogWriter(securityLogWriter);
tracer.setLogger(LogService.getLogger(GemFireTracer.class));
ThreadGroup group = LoggingThreadGroup.createThreadGroup("Distribution locators", logger);
stats = new LocatorStats();
server = new TcpServer(this.port, this.bindAddress, null, this.config,
this.handler, new DelayedPoolStatHelper(), group, this.toString());
}
private void startTcpServer() throws IOException {
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_STARTING_0, this));
server.start();
try {
Thread.sleep(1000);
}
catch (InterruptedException ie) {
// always safe to exit this thread...
Thread.currentThread().interrupt();
logger.warn(LocalizedMessage.create(LocalizedStrings.ONE_ARG, "Interrupted"), ie);
}
}
public SharedConfiguration getSharedConfiguration() {
return this.sharedConfig;
}
public DistributionConfigImpl getConfig() {
return config;
}
/**
* Start peer location in this locator. If you plan on starting a distributed
* system later, this method should be called first so that the distributed
* system can use this locator.
*
* @param withDS true if a distributed system has been or will be started
* @throws IOException
* @since 5.7
*/
public void startPeerLocation(boolean withDS) throws IOException {
if(isPeerLocator()) {
throw new IllegalStateException(LocalizedStrings.InternalLocator_PEER_LOCATION_IS_ALREADY_RUNNING_FOR_0.toLocalizedString(this));
}
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_STARTING_PEER_LOCATION_FOR_0, this));
String locatorsProp = this.config.getLocators();
// check for settings that would require only locators to hold the
// coordinator - e.g., security and network-partition detection
boolean locatorsAreCoordinators = false;
boolean networkPartitionDetectionEnabled = this.config.getEnableNetworkPartitionDetection();
if (networkPartitionDetectionEnabled) {
locatorsAreCoordinators = true;
}
else {
// check if security is enabled
String prop = this.config.getSecurityPeerAuthInit();
locatorsAreCoordinators = (prop != null && prop.length() > 0);
if (!locatorsAreCoordinators) {
locatorsAreCoordinators = Boolean.getBoolean("gemfire.disable-floating-coordinator");
}
}
if (locatorsAreCoordinators) {
// LOG: changed from config to info
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_FORCING_GROUP_COORDINATION_INTO_LOCATORS));
}
// LOG: moved these into InternalDistributedSystem.initialize -- the only other code path constructs InternalLocator 1st which also sets these
//com.gemstone.org.jgroups.util.GemFireTracer.setLogWriter(this.logWriter);
//com.gemstone.org.jgroups.util.GemFireTracer
// .setSecurityLogWriter(this.securityLogWriter);
// install gemfire serialization and socket functions into jgroups
JChannel.setDefaultGFFunctions(new GFJGBasicAdapter());
this.locatorImpl = new LocatorImpl(port, EXPIRY_MS,
this.bindAddress, this.stateFile, locatorsProp, locatorsAreCoordinators,
networkPartitionDetectionEnabled, withDS
);
this.handler.addHandler(GossipData.class, this.locatorImpl);
peerLocator = true;
if(!server.isAlive()) {
startTcpServer();
}
}
/**
* @return the gossipServer
*/
public LocatorImpl getLocatorHandler() {
return this.locatorImpl;
}
class SharedConfigurationRunnable implements Runnable{
private final InternalLocator locator = InternalLocator.this;
@Override
public void run() {
try {
if (locator.sharedConfig == null) {
// locator.sharedConfig will already be created in case of auto-reconnect
locator.sharedConfig = new SharedConfiguration((GemFireCacheImpl) locator.myCache);
}
locator.sharedConfig.initSharedConfiguration(locator.loadFromSharedConfigDir());
locator.installSharedConfigDistribution();
logger.info("Cluster configuration service start up completed successfully and is now running ....");
} catch (CancelException e) {
if (logger.isDebugEnabled()) {
logger.debug("Cluster configuration start up was cancelled", e);
}
} catch (LockServiceDestroyedException e) {
if (logger.isDebugEnabled()) {
logger.debug("Cluster configuration start up was cancelled", e);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
/**
* Start a distributed system whose life cycle is managed by this locator. When
* the locator is stopped, this distributed system will be disconnected. If a
* distributed system already exists, this method will have no affect.
*
* @throws UnknownHostException
* @since 5.7
*/
public void startDistributedSystem() throws UnknownHostException {
InternalDistributedSystem existing = InternalDistributedSystem.getConnectedInstance();
//TODO : For now set this property to create a PDX registry that does nothing
// Investigate keeping the typeRegistry in the locators
if (existing != null) {
// LOG: changed from config to info
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_USING_EXISTING_DISTRIBUTED_SYSTEM__0, existing));
if (getLocatorHandler() != null) {
// let the GossipServer know the system's address so they can start
// servicing requests
Address addr = ((JGroupMember)existing.getDistributedMember().getNetMember()).getAddress();
getLocatorHandler().setLocalAddress(addr);
}
// don't set the ds variable, so it won't be closed by the locator shutting down
startCache(existing);
}
else {
if (System.getProperty("p2p.joinTimeout", "").length() == 0) {
System.setProperty("p2p.joinTimeout", "5000");
}
String thisLocator;
{
StringBuilder sb = new StringBuilder(100);
if (bindAddress != null) {
sb.append(bindAddress.getHostAddress());
}
else {
sb.append(SocketCreator.getLocalHost().getHostAddress());
}
sb.append('[').append(port).append(']');
thisLocator = sb.toString();
}
if(peerLocator) {
// append this locator to the locators list from the config properties
//this.logger.config("ensuring that this locator is in the locators list");
boolean setLocatorsProp = false;
String locatorsProp = this.config.getLocators();
if (locatorsProp != null && locatorsProp.trim().length() > 0) {
if (!locatorsProp.contains(thisLocator)) {
locatorsProp = locatorsProp + "," + thisLocator;
setLocatorsProp = true;
}
}
else {
locatorsProp = thisLocator;
setLocatorsProp = true;
}
if (setLocatorsProp) {
Properties updateEnv = new Properties();
updateEnv.setProperty(DistributionConfig.LOCATORS_NAME, locatorsProp);
this.config.setApiProps(updateEnv);
// fix for bug 41248
String propName = DistributionConfig.GEMFIRE_PREFIX +
DistributionConfig.LOCATORS_NAME;
if (System.getProperty(propName) != null) {
System.setProperty(propName, locatorsProp);
}
}
// No longer default mcast-port to zero. See 46277.
}
Properties connectEnv = new Properties();
// LogWriterAppender is now shared via that class
// using a DistributionConfig earlier in this method
connectEnv.put(DistributionConfig.DS_CONFIG_NAME, this.config);
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_STARTING_DISTRIBUTED_SYSTEM));
// LOG:CONFIG: changed from config to info
logger.info(LogMarker.CONFIG, LocalizedMessage.create(LocalizedStrings.InternalDistributedSystem_STARTUP_CONFIGURATIONN_0, this.config.toLoggerString()));
myDs = (InternalDistributedSystem)DistributedSystem.connect(connectEnv);
myDs.addDisconnectListener(new DisconnectListener() {
@Override
public void onDisconnect(InternalDistributedSystem sys) {
stop(false, false);
}
});
startCache(myDs);
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_LOCATOR_STARTED_ON__0, thisLocator));
((InternalDistributedSystem)myDs).setDependentLocator(this);
}
}
private void startCache(DistributedSystem ds) {
GemFireCacheImpl gfc = GemFireCacheImpl.getInstance();
if (gfc == null) {
logger.info("Creating cache for locator.");
this.myCache = new CacheFactory(ds.getProperties()).create();
gfc = (GemFireCacheImpl)this.myCache;
} else {
logger.info("Using existing cache for locator.");
((InternalDistributedSystem) ds).handleResourceEvent(
ResourceEvent.LOCATOR_START, this);
}
startJmxManagerLocationService(gfc);
startSharedConfigurationService(gfc);
}
/**
* End the initialization of the locator. This method should
* be called once the location services and distributed
* system are started.
*
* @param distributedSystem
* The distributed system to use for the statistics.
*
* @since 5.7
*
* @throws UnknownHostException
*/
public void endStartLocator(InternalDistributedSystem distributedSystem) throws UnknownHostException {
env = null;
if (distributedSystem == null) {
distributedSystem = InternalDistributedSystem.getConnectedInstance();
}
if(distributedSystem != null) {
onConnect(distributedSystem);
} else {
InternalDistributedSystem.addConnectListener(this);
}
WanLocatorDiscoverer s = WANServiceProvider.createLocatorDiscoverer();
if(s != null) {
s.discover(this.port, config, locatorListener);
}
}
/**
* Start server location services in this locator. Server location
* can only be started once there is a running distributed system.
*
* @param distributedSystem
* The distributed system which the server location services
* should use. If null, the method will try to find an already
* connected distributed system.
* @throws ExecutionException
* @since 5.7
*/
public void startServerLocation(InternalDistributedSystem distributedSystem)
throws IOException
{
if(isServerLocator()) {
throw new IllegalStateException(LocalizedStrings.InternalLocator_SERVER_LOCATION_IS_ALREADY_RUNNING_FOR_0.toLocalizedString(this));
}
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_STARTING_SERVER_LOCATION_FOR_0, this));
if (distributedSystem == null) {
distributedSystem = InternalDistributedSystem.getConnectedInstance();
if (distributedSystem == null) {
throw new IllegalStateException(LocalizedStrings.InternalLocator_SINCE_SERVER_LOCATION_IS_ENABLED_THE_DISTRIBUTED_SYSTEM_MUST_BE_CONNECTED.toLocalizedString());
}
}
this.productUseLog.monitorUse(distributedSystem);
ServerLocator sl = new ServerLocator(this.port,
this.bindAddress,
this.hostnameForClients,
this.logFile,
this.productUseLog,
getConfig().getName(),
distributedSystem,
stats);
this.handler.addHandler(LocatorListRequest.class, sl);
this.handler.addHandler(ClientConnectionRequest.class, sl);
this.handler.addHandler(QueueConnectionRequest.class, sl);
this.handler.addHandler(ClientReplacementRequest.class, sl);
this.handler.addHandler(GetAllServersRequest.class, sl);
this.handler.addHandler(LocatorStatusRequest.class, sl);
this.serverLocator = sl;
if(!server.isAlive()) {
startTcpServer();
}
}
/**
* Stop this locator.
*/
@Override
public void stop() {
stop(false, true);
}
/**
* Was this locator stopped during forced-disconnect processing but should
* reconnect?
*/
public boolean getStoppedForReconnect() {
return this.stoppedForReconnect;
}
/**
* Stop this locator
* @param stopForReconnect - stopping for distributed system reconnect
* @param waitForDisconnect - wait up to 60 seconds for the locator to completely stop
*/
public void stop(boolean stopForReconnect, boolean waitForDisconnect) {
final boolean isDebugEnabled = logger.isDebugEnabled();
if (this.server.isShuttingDown()) {
// fix for bug 46156
// If we are already shutting down don't do all of this again.
// But, give the server a bit of time to shut down so a new
// locator can be created, if desired, when this method returns
if (!stopForReconnect && waitForDisconnect) {
long endOfWait = System.currentTimeMillis() + 60000;
if (isDebugEnabled && this.server.isAlive()) {
logger.debug("sleeping to wait for the locator server to shut down...");
}
while (this.server.isAlive() && System.currentTimeMillis() < endOfWait) {
try { Thread.sleep(500); } catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
if (isDebugEnabled) {
if (this.server.isAlive()) {
logger.debug("60 seconds have elapsed waiting for the locator server to shut down - terminating wait and returning");
} else {
logger.debug("the locator server has shut down");
}
}
}
return;
}
this.stoppedForReconnect = stopForReconnect;
if (this.server.isAlive()) {
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_STOPPING__0, this));
try {
stopLocator(this.port, this.bindAddress);
} catch ( ConnectException ignore ) {
// must not be running
}
boolean interrupted = Thread.interrupted();
try {
this.server.join(60 * 1000);
} catch (InterruptedException ex) {
interrupted = true;
logger.warn(LocalizedMessage.create(LocalizedStrings.InternalLocator_INTERRUPTED_WHILE_STOPPING__0, this), ex);
// Continue running -- doing our best to stop everything...
}
finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
if (this.server.isAlive()) {
logger.fatal(LocalizedMessage.create(LocalizedStrings.InternalLocator_COULD_NOT_STOP__0__IN_60_SECONDS, this));
}
}
removeLocator(this);
handleShutdown();
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_0__IS_STOPPED, this));
if (stoppedForReconnect) {
if (this.myDs != null) {
launchRestartThread();
}
}
}
/** answers whether this locator is currently stopped */
public boolean isStopped() {
return this.server == null || !this.server.isAlive();
}
private void handleShutdown() {
if (!this.shutdownHandled.compareAndSet(false, true)) {
return; // already shutdown
}
productUseLog.close();
if (myDs != null) {
((InternalDistributedSystem)myDs).setDependentLocator(null);
}
if (this.myCache != null && !this.stoppedForReconnect) {
logger.info("Closing locator's cache");
try {
this.myCache.close();
} catch (RuntimeException ex) {
logger.info("Could not close locator's cache because: {}", ex);
}
}
if(stats != null) {
stats.close();
}
if(this.locatorListener != null){
this.locatorListener.clearLocatorInfo();
}
this.isSharedConfigurationStarted = false;
if (myDs != null && !this.stoppedForReconnect) {
if (myDs.isConnected()) {
logger.info(LocalizedMessage.create(LocalizedStrings.InternalLocator_DISCONNECTING_DISTRIBUTED_SYSTEM_FOR_0, this));
myDs.disconnect();
}
}
}
/**
* Waits for a locator to be told to stop.
*
* @throws InterruptedException thrown if the thread is interrupted
*/
public void waitToStop() throws InterruptedException {
boolean restarted;
do {
restarted = false;
this.server.join();
if (this.stoppedForReconnect) {
restarted = this.myDs.waitUntilReconnected(-1, TimeUnit.SECONDS);
}
} while (restarted);
}
/** launch a thread that will restart location services */
private void launchRestartThread() {
// create a thread group having a last-chance exception-handler
ThreadGroup group = LoggingThreadGroup.createThreadGroup("Locator restart thread group");
this.restartThread = new Thread(group, "Location services restart thread") {
public void run() {
boolean restarted;
try {
restarted = attemptReconnect();
logger.info("attemptReconnect returned {}", restarted);
} catch (InterruptedException e) {
logger.info("attempt to restart location services was interrupted", e);
} catch (IOException e) {
logger.info("attempt to restart location services terminated", e);
}
InternalLocator.this.restartThread = null;
}
};
this.restartThread.setDaemon(true);
this.restartThread.start();
}
/**
* reconnects the locator to a restarting DistributedSystem. If quorum checks
* are enabled this will start peer location services before a distributed
* system is available if the quorum check succeeds. It will then wait
* for the system to finish reconnecting before returning. If quorum checks
* are not being done this merely waits for the distributed system to reconnect
* and then starts location services.
* @return true if able to reconnect the locator to the new distributed system
*/
public boolean attemptReconnect() throws InterruptedException, IOException {
boolean restarted = false;
if (this.stoppedForReconnect) {
logger.info("attempting to restart locator");
boolean tcpServerStarted = false;
InternalDistributedSystem ds = this.myDs;
long waitTime = ds.getConfig().getMaxWaitTimeForReconnect()/2;
QuorumChecker checker = null;
while (ds.getReconnectedSystem() == null &&
!ds.isReconnectCancelled()) {
if (checker == null) {
checker = this.myDs.getQuorumChecker();
if (checker != null) {
logger.info("The distributed system returned this quorum checker: {}", checker);
}
}
if (checker != null && !tcpServerStarted) {
boolean start = checker.checkForQuorum(3*this.myDs.getConfig().getMemberTimeout());
if (start) {
// start up peer location. server location is started after the DS finishes
// reconnecting
logger.info("starting peer location");
if(this.locatorListener != null){
this.locatorListener.clearLocatorInfo();
}
this.stoppedForReconnect = false;
this.myDs = null;
this.myCache = null;
restartWithoutDS();
tcpServerStarted = true;
setLocator(this);
}
}
ds.waitUntilReconnected(waitTime, TimeUnit.MILLISECONDS);
}
InternalDistributedSystem newSystem = (InternalDistributedSystem)ds.getReconnectedSystem();
// LogWriter log = new ManagerLogWriter(LogWriterImpl.FINE_LEVEL, System.out);
if (newSystem != null) {
// log.fine("reconnecting locator: starting location services");
if (!tcpServerStarted) {
if(this.locatorListener != null){
this.locatorListener.clearLocatorInfo();
}
this.stoppedForReconnect = false;
}
restartWithDS(newSystem, GemFireCacheImpl.getInstance());
setLocator(this);
restarted = true;
}
}
return restarted;
}
private void restartWithoutDS() throws IOException {
synchronized (locatorLock) {
if (locator != this && hasLocator()) {
throw new IllegalStateException("A locator can not be created because one already exists in this JVM.");
}
this.myDs = null;
this.myCache = null;
logger.info("Locator restart: initializing TcpServer peer location services");
this.server.restarting(null, null, null);
if (this.productUseLog.isClosed()) {
this.productUseLog.reopen();
}
if (!this.server.isAlive()) {
logger.info("Locator restart: starting TcpServer");
startTcpServer();
}
}
}
private void restartWithDS(InternalDistributedSystem newSystem, GemFireCacheImpl newCache) throws IOException {
synchronized (locatorLock) {
if (locator != this && hasLocator()) {
throw new IllegalStateException("A locator can not be created because one already exists in this JVM.");
}
this.myDs = newSystem;
this.myCache = newCache;
((InternalDistributedSystem)myDs).setDependentLocator(this);
logger.info("Locator restart: initializing TcpServer");
if (isSharedConfigurationEnabled()) {
this.sharedConfig = new SharedConfiguration(newCache);
}
this.server.restarting(newSystem, newCache, this.sharedConfig);
if (this.productUseLog.isClosed()) {
this.productUseLog.reopen();
}
this.productUseLog.monitorUse(newSystem);
this.isSharedConfigurationStarted = true;
if (isSharedConfigurationEnabled()) {
ExecutorService es = newCache.getDistributionManager().getThreadPool();
es.submit(new SharedConfigurationRunnable());
}
if (!this.server.isAlive()) {
logger.info("Locator restart: starting TcpServer");
startTcpServer();
}
logger.info("Locator restart: initializing JMX manager");
startJmxManagerLocationService(newCache);
endStartLocator((InternalDistributedSystem)myDs);
logger.info("Locator restart completed");
}
}
// implementation of abstract method in Locator
@Override
public DistributedSystem getDistributedSystem() {
return myDs;
}
@Override
public boolean isPeerLocator() {
return peerLocator;
}
@Override
public boolean isServerLocator() {
return this.serverLocator != null;
}
/**
* Returns null if no server locator;
* otherwise returns the advisee that represents the server locator.
*/
public ServerLocator getServerLocatorAdvisee() {
return this.serverLocator;
}
/******
*
* @author bansods
*
*/
class FetchSharedConfigStatus implements Callable<SharedConfigurationStatusResponse> {
static final int SLEEPTIME = 1000;
static final byte MAX_RETRIES = 5;
public SharedConfigurationStatusResponse call() throws Exception {
SharedConfigurationStatusResponse response;
final InternalLocator locator = InternalLocator.this;
for (int i=0; i<MAX_RETRIES; i++) {
if (locator.sharedConfig != null) {
SharedConfigurationStatus status = locator.sharedConfig.getStatus();
if (status != SharedConfigurationStatus.STARTED || status != SharedConfigurationStatus.NOT_STARTED) {
break;
}
}
Thread.sleep(SLEEPTIME);
}
if (locator.sharedConfig != null) {
response = locator.sharedConfig.createStatusResponse();
} else {
response = new SharedConfigurationStatusResponse();
response.setStatus(SharedConfigurationStatus.UNDETERMINED);
}
return response;
}
}
public SharedConfigurationStatusResponse getSharedConfigurationStatus() {
ExecutorService es = ((GemFireCacheImpl)myCache).getDistributionManager().getWaitingThreadPool();
Future<SharedConfigurationStatusResponse> statusFuture = es.submit(new FetchSharedConfigStatus());
SharedConfigurationStatusResponse response = null;
try {
response = statusFuture.get(5, TimeUnit.SECONDS);
} catch (Exception e) {
logger.info("Exception occured while fetching the status {}", CliUtil.stackTraceAsString(e));
response = new SharedConfigurationStatusResponse();
response.setStatus(SharedConfigurationStatus.UNDETERMINED);
}
return response;
}
private static class PrimaryHandler implements TcpHandler {
private volatile HashMap<Class, TcpHandler> handlerMapping = new HashMap<Class, TcpHandler>();
private volatile HashSet<TcpHandler> allHandlers = new HashSet<TcpHandler>();
private TcpServer tcpServer;
private final LocatorMembershipListener locatorListener;
//private final List<LocatorJoinMessage> locatorJoinMessages;
private Object locatorJoinObject = new Object();
InternalLocator interalLocator;
boolean willHaveServerLocator; // flag to avoid warning about missing handlers during startup
public PrimaryHandler(int port, InternalLocator locator,
LocatorMembershipListener listener) {
this.locatorListener = listener;
interalLocator = locator;
//this.locatorJoinMessages = new ArrayList<LocatorJoinMessage>();
}
// this method is synchronized to make sure that no new handlers are added while
//initialization is taking place.
public synchronized void init(TcpServer tcpServer) {
this.tcpServer = tcpServer;
for(Iterator itr = allHandlers.iterator(); itr.hasNext();) {
TcpHandler handler = (TcpHandler) itr.next();
handler.init(tcpServer);
}
}
public void restarting(DistributedSystem ds, GemFireCache cache, SharedConfiguration sharedConfig) {
if (ds != null) {
for (TcpHandler handler: this.allHandlers) {
handler.restarting(ds, cache, sharedConfig);
}
}
}
public Object processRequest(Object request) throws IOException {
TcpHandler handler = (TcpHandler)handlerMapping.get(request.getClass());
if (handler != null) {
Object result;
result = handler.processRequest(request);
return result;
}
else {
Object response;
if(locatorListener != null){
response = locatorListener.handleRequest(request);
}
else {
if (!(willHaveServerLocator && (request instanceof ServerLocationRequest))) {
logger
.warn(LocalizedMessage
.create(
LocalizedStrings.InternalLocator_EXPECTED_ONE_OF_THESE_0_BUT_RECEIVED_1,
new Object[] { handlerMapping.keySet(), request }));
}
return null;
}
return response;
}
}
private JmxManagerLocatorResponse findJmxManager(JmxManagerLocatorRequest request) {
JmxManagerLocatorResponse result = null;
// NYI
return result;
}
public void shutDown() {
try {
for(Iterator itr = allHandlers.iterator(); itr.hasNext(); ) {
TcpHandler handler = (TcpHandler) itr.next();
handler.shutDown();
}
} finally {
this.interalLocator.handleShutdown();
}
}
public synchronized boolean isHandled(Class clazz) {
return this.handlerMapping.containsKey(clazz);
}
public synchronized void addHandler(Class clazz, TcpHandler handler) {
HashMap tmpHandlerMapping = new HashMap(handlerMapping);
HashSet tmpAllHandlers = new HashSet(allHandlers);
tmpHandlerMapping.put(clazz, handler);
if(tmpAllHandlers.add(handler) && tcpServer != null ) {
handler.init(tcpServer);
}
handlerMapping = tmpHandlerMapping;
allHandlers = tmpAllHandlers;
}
public void endRequest(Object request,long startTime) {
TcpHandler handler = (TcpHandler) handlerMapping.get(request.getClass());
if(handler != null) {
handler.endRequest(request, startTime);
}
}
public void endResponse(Object request,long startTime) {
TcpHandler handler = (TcpHandler) handlerMapping.get(request.getClass());
if(handler != null) {
handler.endResponse(request, startTime);
}
}
}
public void onConnect(InternalDistributedSystem sys) {
try {
stats.hookupStats(sys, SocketCreator.getLocalHost().getCanonicalHostName() + "-" + server.getBindAddress().toString());
} catch(UnknownHostException uhe) {
uhe.printStackTrace();
}
}
/**
* Returns collection of locator strings representing every locator instance
* hosted by this member.
*
* @see #getLocators()
*/
public static Collection<String> getLocatorStrings() {
Collection<String> locatorStrings = null;
try {
Collection<DistributionLocatorId> locatorIds =
DistributionLocatorId.asDistributionLocatorIds(getLocators());
locatorStrings = DistributionLocatorId.asStrings(locatorIds);
} catch (UnknownHostException e) {
locatorStrings = null;
}
if (locatorStrings == null || locatorStrings.isEmpty()) {
return null;
} else {
return locatorStrings;
}
}
/**
* A helper object so that the TcpServer can record
* its stats to the proper place. Stats are only recorded
* if a distributed system is started.
*
*/
protected class DelayedPoolStatHelper implements PoolStatHelper {
public void startJob() {
stats.incRequestInProgress(1);
}
public void endJob() {
stats.incRequestInProgress(-1);
}
}
public void startSharedConfigurationService(GemFireCacheImpl gfc){
if (this.config.getEnableClusterConfiguration() && !this.isSharedConfigurationStarted) {
if (!isDedicatedLocator()) {
logger.info("Cluster configuration service is only supported in dedicated locators");
return;
}
this.isSharedConfigurationStarted = true;
installSharedConfigStatus();
ExecutorService es = gfc.getDistributionManager().getThreadPool();
es.submit(new SharedConfigurationRunnable());
} else {
logger.info("Cluster configuration service is disabled");
}
}
public void startJmxManagerLocationService(GemFireCacheImpl gfc) {
if (gfc.getJmxManagerAdvisor() != null) {
if (!this.handler.isHandled(JmxManagerLocatorRequest.class)) {
this.handler.addHandler(JmxManagerLocatorRequest.class, new JmxManagerLocator(gfc));
}
}
}
/***
* Creates and installs the handler {@link ConfigurationRequestHandler}
*/
public void installSharedConfigDistribution() {
if (!this.handler.isHandled(ConfigurationRequest.class)) {
this.handler.addHandler(ConfigurationRequest.class, new ConfigurationRequestHandler(this.sharedConfig));
logger.info("ConfigRequestHandler installed");
}
}
public void installSharedConfigStatus() {
if (!this.handler.isHandled(SharedConfigurationStatusRequest.class)) {
this.handler.addHandler(SharedConfigurationStatusRequest.class, new SharedConfigurationStatusRequestHandler());
logger.info("SharedConfigStatusRequestHandler installed");
}
}
}
| |
package com.jetbrains.edu.coursecreator.actions;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.intellij.ide.projectView.ProjectView;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.containers.HashMap;
import com.intellij.util.io.ZipUtil;
import com.jetbrains.edu.coursecreator.CCLanguageManager;
import com.jetbrains.edu.coursecreator.CCUtils;
import com.jetbrains.edu.coursecreator.ui.CreateCourseArchiveDialog;
import com.jetbrains.edu.learning.StudyTaskManager;
import com.jetbrains.edu.learning.core.EduNames;
import com.jetbrains.edu.learning.core.EduUtils;
import com.jetbrains.edu.learning.courseFormat.Course;
import com.jetbrains.edu.learning.courseFormat.Lesson;
import com.jetbrains.edu.learning.courseFormat.Task;
import com.jetbrains.edu.learning.courseFormat.TaskFile;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipOutputStream;
public class CCCreateCourseArchive extends DumbAwareAction {
private static final Logger LOG = Logger.getInstance(CCCreateCourseArchive.class.getName());
private String myZipName;
private String myLocationDir;
public void setZipName(String zipName) {
myZipName = zipName;
}
public void setLocationDir(String locationDir) {
myLocationDir = locationDir;
}
public CCCreateCourseArchive() {
super("Generate Course Archive", "Generate Course Archive", null);
}
@Override
public void update(@NotNull AnActionEvent e) {
Presentation presentation = e.getPresentation();
Project project = e.getProject();
presentation.setEnabledAndVisible(project != null && CCUtils.isCourseCreator(project));
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final Project project = e.getData(CommonDataKeys.PROJECT);
final Module module = e.getData(LangDataKeys.MODULE);
if (project == null || module == null) {
return;
}
createCourseArchive(project, module);
}
private void createCourseArchive(final Project project, Module module) {
final Course course = StudyTaskManager.getInstance(project).getCourse();
if (course == null) return;
CreateCourseArchiveDialog dlg = new CreateCourseArchiveDialog(project, this);
dlg.show();
if (dlg.getExitCode() != DialogWrapper.OK_EXIT_CODE) {
return;
}
final VirtualFile baseDir = project.getBaseDir();
VirtualFile archiveFolder = CCUtils.generateFolder(project, module, this, myZipName);
if (archiveFolder == null) {
return;
}
CCLanguageManager manager = CCUtils.getStudyLanguageManager(course);
if (manager == null) {
return;
}
FileFilter filter = new FileFilter() {
@Override
public boolean accept(File pathname) {
return !manager.doNotPackFile(pathname);
}
};
for (VirtualFile child : baseDir.getChildren()) {
String name = child.getName();
File fromFile = new File(child.getPath());
if (CCUtils.GENERATED_FILES_FOLDER.equals(name) || ".idea".equals(name)
|| name.contains("iml") || manager.doNotPackFile(fromFile)) {
continue;
}
copyChild(archiveFolder, filter, child, fromFile);
}
final List<Lesson> lessons = course.getLessons();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
final Map<TaskFile, TaskFile> savedTaskFiles = new HashMap<TaskFile, TaskFile>();
replaceAnswerFilesWithTaskFiles(savedTaskFiles);
generateJson(project, archiveFolder);
resetTaskFiles(savedTaskFiles);
VirtualFileManager.getInstance().refreshWithoutFileWatcher(false);
packCourse(archiveFolder);
synchronize(project);
}
private void replaceAnswerFilesWithTaskFiles(Map<TaskFile, TaskFile> savedTaskFiles) {
for (Lesson lesson : lessons) {
final VirtualFile lessonDir = baseDir.findChild(EduNames.LESSON + String.valueOf(lesson.getIndex()));
if (lessonDir == null) continue;
for (Task task : lesson.getTaskList()) {
final VirtualFile taskDir = lessonDir.findChild(EduNames.TASK + String.valueOf(task.getIndex()));
if (taskDir == null) continue;
for (final Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) {
TaskFile taskFileCopy = new TaskFile();
TaskFile taskFile = entry.getValue();
TaskFile.copy(taskFile, taskFileCopy);
savedTaskFiles.put(taskFile, taskFileCopy);
VirtualFile userFileDir = VfsUtil.findRelativeFile(archiveFolder, lessonDir.getName(), taskDir.getName());
if (userFileDir == null) {
continue;
}
String taskFileName = entry.getKey();
EduUtils.createStudentFileFromAnswer(project, userFileDir, taskDir, taskFileName, taskFile);
}
}
}
}
});
}
private static void copyChild(VirtualFile archiveFolder, FileFilter filter, VirtualFile child, File fromFile) {
File toFile = new File(archiveFolder.getPath(), child.getName());
try {
if (child.isDirectory()) {
FileUtil.copyDir(fromFile, toFile, filter);
}
else {
if (filter.accept(fromFile)) {
FileUtil.copy(fromFile, toFile);
}
}
}
catch (IOException e) {
LOG.info("Failed to copy" + fromFile.getPath(), e);
}
}
private static void resetTaskFiles(Map<TaskFile, TaskFile> savedTaskFiles) {
for (Map.Entry<TaskFile, TaskFile> entry : savedTaskFiles.entrySet()) {
entry.getKey().setAnswerPlaceholders(entry.getValue().getAnswerPlaceholders());
}
}
private static void synchronize(@NotNull final Project project) {
VirtualFileManager.getInstance().refreshWithoutFileWatcher(true);
ProjectView.getInstance(project).refresh();
}
private void packCourse(@NotNull final VirtualFile baseDir) {
try {
final File zipFile = new File(myLocationDir, myZipName + ".zip");
ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
VirtualFile[] courseFiles = baseDir.getChildren();
for (VirtualFile file : courseFiles) {
ZipUtil.addFileOrDirRecursively(zos, null, new File(file.getPath()), file.getName(), null, null);
}
zos.close();
Messages.showInfoMessage("Course archive was saved to " + zipFile.getPath(), "Course Archive Was Created Successfully");
}
catch (IOException e1) {
LOG.error(e1);
}
}
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
private static void generateJson(@NotNull final Project project, VirtualFile parentDir) {
final Course course = StudyTaskManager.getInstance(project).getCourse();
final Gson gson = new GsonBuilder().setPrettyPrinting().excludeFieldsWithoutExposeAnnotation().create();
final String json = gson.toJson(course);
final File courseJson = new File(parentDir.getPath(), EduNames.COURSE_META_FILE);
OutputStreamWriter outputStreamWriter = null;
try {
outputStreamWriter = new OutputStreamWriter(new FileOutputStream(courseJson), "UTF-8");
outputStreamWriter.write(json);
}
catch (Exception e) {
Messages.showErrorDialog(e.getMessage(), "Failed to Generate Json");
LOG.info(e);
}
finally {
try {
if (outputStreamWriter != null) {
outputStreamWriter.close();
}
}
catch (IOException e1) {
//close silently
}
}
}
}
| |
package trikita.talalarmo.ui;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.net.Uri;
import android.text.format.DateFormat;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.LinearLayout;
import android.widget.PopupMenu;
import static trikita.anvil.DSL.*;
import trikita.anvil.Anvil;
import trikita.jedux.Action;
import trikita.talalarmo.Actions;
import trikita.talalarmo.App;
import trikita.talalarmo.MainActivity;
import trikita.talalarmo.R;
public class AlarmLayout {
public static void view() {
backgroundColor(Theme.get(App.getState().settings().theme()).backgroundColor);
linearLayout(() -> {
orientation(LinearLayout.VERTICAL);
header();
frameLayout(() -> {
size(FILL, 0);
weight(1f);
if (App.getState().alarm().on()) {
alarmOnLayout();
} else {
alarmOffLayout();
}
});
bottomBar();
});
}
private static void header() {
linearLayout(() -> {
size(FILL, WRAP);
gravity(CENTER_VERTICAL);
Theme.materialIcon(() -> {
textColor(Theme.get(App.getState().settings().theme()).secondaryTextColor);
textSize(dip(32));
padding(dip(15));
text("\ue855"); // "alarm" icon
});
textView(() -> {
size(WRAP, WRAP);
weight(1f);
typeface("fonts/Roboto-Light.ttf");
textSize(dip(20));
textColor(Theme.get(App.getState().settings().theme()).primaryTextColor);
text(R.string.app_name);
});
});
}
private static void alarmOffLayout() {
textView(() -> {
size(FILL, FILL);
padding(dip(20));
gravity(LEFT | CENTER_VERTICAL);
typeface("fonts/Roboto-Light.ttf");
allCaps(true);
textSize(dip(32));
textColor(Theme.get(App.getState().settings().theme()).primaryTextColor);
text(R.string.tv_start_alarm_text);
onClick(v -> App.dispatch(new Action<>(Actions.Alarm.ON)));
});
}
private static void alarmOnLayout() {
frameLayout(() -> {
size(FILL, FILL);
// On tablets leave some margin around the clock view to avoid gigantic circles
if ((Anvil.currentView().getResources().getConfiguration().screenLayout &
Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE) {
margin(dip(48));
} else {
margin(dip(8));
}
int w = Anvil.currentView().getWidth();
int h = Anvil.currentView().getHeight();
if (h == 0 || w == 0) {
Anvil.currentView().post(Anvil::render);
}
int hourCircleSize;
int minuteCircleSize;
int amPmWidth;
if (isPortrait()) {
hourCircleSize = (int) (w * 1.1f * 0.62f);
minuteCircleSize = (int) (hourCircleSize * 0.62f);
amPmWidth = (int) (hourCircleSize * 0.62f * 0.62f);
} else {
hourCircleSize = (int) (h);
minuteCircleSize = (int) (hourCircleSize * 0.62f);
amPmWidth = (int) (hourCircleSize * 0.62f * 0.62f);
}
frameLayout(() -> {
size(hourCircleSize, hourCircleSize);
if (isPortrait()) {
x(w / 2 - hourCircleSize * 0.21f - hourCircleSize / 2);
y(h / 2 + hourCircleSize * 0.19f - hourCircleSize / 2);
} else {
x(w / 2 - hourCircleSize * 0.38f - hourCircleSize / 2);
y(h / 2 + hourCircleSize * 0.00f - hourCircleSize / 2);
}
gravity(CENTER);
v(ClockView.class, () -> {
size(FILL, FILL);
progress(App.getState().alarm().hours());
max(12);
onSeekBarChange((v, progress, fromUser) -> {
if (fromUser) {
App.dispatch(new Action<>(Actions.Alarm.SET_HOUR, progress));
}
});
Anvil.currentView().invalidate();
});
textView(() -> {
size(WRAP, WRAP);
int hours = App.getState().alarm().hours();
if (DateFormat.is24HourFormat(Anvil.currentView().getContext())) {
text(String.format("%02d", hours + (App.getState().alarm().am() ? 0 : 12)));
} else {
if (hours == 0) {
text("12");
} else {
text(String.format("%02d", hours));
}
}
layoutGravity(CENTER);
typeface("fonts/Roboto-Light.ttf");
textSize(hourCircleSize * 0.3f);
textColor(Theme.get(App.getState().settings().theme()).primaryColor);
});
});
frameLayout(() -> {
size(minuteCircleSize, minuteCircleSize);
if (isPortrait()) {
x(w / 2 - hourCircleSize * 0.25f + minuteCircleSize / 2);
y(h / 2 + hourCircleSize * 0.05f - hourCircleSize / 2 - minuteCircleSize / 2);
} else {
x(w / 2 - hourCircleSize * 0.25f + minuteCircleSize / 2);
y(h / 2 + hourCircleSize * 0.28f - hourCircleSize / 2 - minuteCircleSize / 2);
}
gravity(CENTER);
v(ClockView.class, () -> {
size(FILL, FILL);
progress(App.getState().alarm().minutes());
max(60);
onSeekBarChange((v, progress, fromUser) -> {
if (fromUser) {
if (App.getState().settings().snap()) {
progress = (int) (Math.round(progress / 5.0) * 5) % 60;
}
App.dispatch(new Action<>(Actions.Alarm.SET_MINUTE, progress));
}
});
Anvil.currentView().invalidate();
});
textView(() -> {
size(WRAP, WRAP);
text(String.format("%02d", App.getState().alarm().minutes()));
layoutGravity(CENTER);
typeface("fonts/Roboto-Light.ttf");
textSize(minuteCircleSize * 0.3f);
textColor(Theme.get(App.getState().settings().theme()).primaryColor);
});
});
v(AmPmSwitch.class, () -> {
size(amPmWidth, (int) (amPmWidth / 1.5f));
if (isPortrait()) {
x(w / 2 - hourCircleSize * 0.21f - amPmWidth * 3 / 4);
y(h / 2 + hourCircleSize * 0.05f - hourCircleSize / 2 - amPmWidth / 1.5f / 2);
} else {
x(w / 2 - hourCircleSize * 0.25f + minuteCircleSize - amPmWidth / 2);
y(h / 2 + hourCircleSize * 0.25f - amPmWidth / 1.5f / 2);
}
checked(App.getState().alarm().am());
onCheckedChange((CompoundButton buttonView, boolean isChecked) -> {
App.dispatch(new Action<>(Actions.Alarm.SET_AM_PM, isChecked));
});
});
});
}
private static void bottomBar() {
linearLayout(() -> {
size(FILL, dip(62));
backgroundColor(Theme.get(App.getState().settings().theme()).backgroundTranslucentColor);
Theme.materialIcon(() -> {
text("\ue857"); // ALARM OFF
textSize(dip(32));
textColor(Theme.get(App.getState().settings().theme()).secondaryTextColor);
padding(dip(15));
visibility(App.getState().alarm().on());
onClick(v -> App.dispatch(new Action<>(Actions.Alarm.OFF)));
});
textView(() -> {
size(0, FILL);
weight(1f);
margin(dip(10), 0);
typeface("fonts/Roboto-Light.ttf");
textSize(dip(16));
textColor(Theme.get(App.getState().settings().theme()).primaryTextColor);
gravity(CENTER | CENTER_VERTICAL);
text(formatAlarmTime(Anvil.currentView().getContext()));
});
Theme.materialIcon(() -> {
text("\ue5d4"); // "more vert"
textSize(dip(32));
textColor(Theme.get(App.getState().settings().theme()).secondaryTextColor);
padding(dip(15));
onClick(AlarmLayout::showSettingsMenu);
});
});
}
private static String formatAlarmTime(Context c) {
if (!App.getState().alarm().on()) {
return "";
}
long t = App.getState().alarm().nextAlarm().getTimeInMillis() - System.currentTimeMillis() - 1;
t = t / 60 / 1000;
int m = (int) (t % 60);
int h = (int) (t / 60);
String minSeq = (m == 0) ? "" :
(m == 1) ? c.getString(R.string.minute) :
c.getString(R.string.minutes, Long.toString(m));
String hourSeq = (h == 0) ? "" :
(h == 1) ? c.getString(R.string.hour) :
c.getString(R.string.hours, Long.toString(h));
int index = ((h > 0) ? 1 : 0) | ((m > 0) ? 2 : 0);
String[] formats = c.getResources().getStringArray(R.array.alarm_set);
return String.format(formats[index], hourSeq, minSeq);
}
private static void showSettingsMenu(View v) {
PopupMenu menu = new PopupMenu(v.getContext(), v);
menu.getMenuInflater().inflate(R.menu.overflow_popup, menu.getMenu());
menu.setOnMenuItemClickListener(item -> {
if (item.getItemId() == R.id.menu_settings) {
((MainActivity) v.getContext()).openSettings();
} else if (item.getItemId() == R.id.menu_feedback) {
Context c = v.getContext();
Intent intent = new Intent(Intent.ACTION_SENDTO,
Uri.fromParts("mailto", "adm.trikita@gmail.com", null));
intent.putExtra(Intent.EXTRA_SUBJECT, "Feedback about Talalarmo");
v.getContext().startActivity(Intent.createChooser(intent, c.getString(R.string.leave_feedback)));
}
return true;
});
menu.show();
}
}
| |
package com.frodo.github;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.NavigationView;
import android.support.design.widget.Snackbar;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import com.facebook.drawee.view.SimpleDraweeView;
import com.frodo.app.android.core.toolbox.PermissionChecker;
import com.frodo.app.android.core.toolbox.ScreenUtils;
import com.frodo.app.android.ui.FragmentScheduler;
import com.frodo.app.android.ui.activity.FragmentContainerActivity;
import com.frodo.app.framework.broadcast.LocalBroadcastManager;
import com.frodo.app.framework.log.Logger;
import com.frodo.github.bean.dto.response.User;
import com.frodo.github.business.account.AccountModel;
import com.frodo.github.business.account.LoginFragment;
import com.frodo.github.business.activity.EventsFragment;
import com.frodo.github.business.activity.NotificationsFragment;
import com.frodo.github.business.explore.ExploreFragment;
import com.frodo.github.business.repository.RepositoryIssuesFragment;
import com.frodo.github.business.repository.RepositoryListFragment;
import com.frodo.github.business.user.ProfileFragment;
import com.frodo.github.business.user.UserModel;
import com.frodo.github.common.ApiFragment;
import com.frodo.github.view.CircleProgressDialog;
import com.frodo.github.view.ViewProvider;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.InterstitialAd;
import com.mikepenz.iconics.IconicsDrawable;
import com.mikepenz.octicons_typeface_library.Octicons;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Action;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
/**
* Created by frodo on 2016/4/28. Main Page
*/
public class MainActivity extends FragmentContainerActivity
{
private DrawerLayout drawerLayout;
private NavigationView navigationView;
private View navigationHeadView;
private Toolbar toolbar;
private ActionBarDrawerToggle actionBarDrawerToggle;
private FloatingActionButton fab;
private AccountModel accountModel;
private UserModel userModel;
@Override protected void onCreate(Bundle savedInstanceState)
{
PermissionChecker.verifyStoragePermissions(this);
super.onCreate(savedInstanceState);
loadAds();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults)
{
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode)
{
case PermissionChecker.REQUEST_EXTERNAL_STORAGE:
boolean writeAccepted = grantResults[1] == PackageManager.PERMISSION_GRANTED;
getMainController().getLogCollector()
.enableCollect(writeAccepted && getMainController().getConfig().isDebug());
break;
default:
break;
}
}
@Override public int getLayoutId()
{
return R.layout.activity_main;
}
@Override public void initView()
{
drawerLayout = (DrawerLayout) findViewById(R.id.id_drawer_layout);
navigationView = (NavigationView) findViewById(R.id.id_nv_menu);
navigationHeadView = navigationView.getHeaderView(0);
navigationView.setPadding(0, ScreenUtils.getStatusHeight(this), 0, 0);
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
actionBarDrawerToggle =
new ActionBarDrawerToggle(this, drawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close);
actionBarDrawerToggle.syncState();
fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setImageDrawable(
new IconicsDrawable(this).icon(Octicons.Icon.oct_gist_secret).colorRes(android.R.color.black));
}
@Override public void registerListener()
{
toolbar.setNavigationOnClickListener(new View.OnClickListener()
{
@Override public void onClick(View v)
{
if (getSupportFragmentManager().getBackStackEntryCount() == 0)
{
drawerLayout.openDrawer(GravityCompat.START);
}
else
{
toolbar.setTitle("GitHub");
MainActivity.super.onBackPressed();
}
}
});
navigationHeadView.setOnClickListener(new View.OnClickListener()
{
@Override public void onClick(View v)
{
drawerLayout.closeDrawer(GravityCompat.START);
if (accountModel.isSignIn())
{
String login = accountModel.getSignInUser();
Bundle arguments = new Bundle();
arguments.putString("username", login);
FragmentScheduler.nextFragmentWithUniqueTag(MainActivity.this, ProfileFragment.class, arguments);
}
else
{
// FragmentScheduler.nextFragment(MainActivity.this, LoginFragment.class);
FragmentScheduler.doDirect(MainActivity.this, FragmentScheduler.schema() + "/login");
}
}
});
navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener()
{
private MenuItem mPreMenuItem;
@Override public boolean onNavigationItemSelected(MenuItem menuItem)
{
if (mPreMenuItem != null)
{
mPreMenuItem.setChecked(false);
}
menuItem.setChecked(true);
drawerLayout.closeDrawers();
mPreMenuItem = menuItem;
toolbar.setTitle(menuItem.getTitle());
Bundle bundle;
switch (menuItem.getItemId())
{
case R.id.action_notifications:
FragmentScheduler.nextFragment(MainActivity.this, NotificationsFragment.class);
break;
case R.id.action_sign_in:
// FragmentScheduler.nextFragment(MainActivity.this, LoginFragment.class);
FragmentScheduler.doDirect(MainActivity.this, FragmentScheduler.schema() + "/login");
break;
case R.id.action_sign_out:
onLogout();
break;
case R.id.action_explore:
FragmentScheduler.nextFragment(MainActivity.this, ExploreFragment.class);
break;
case R.id.action_news:
bundle = new Bundle();
bundle.putString("events_args", String.format("events_user_%s", accountModel.getSignInUser()));
FragmentScheduler.nextFragmentWithUniqueTag(MainActivity.this, EventsFragment.class, bundle);
break;
case R.id.action_issues:
bundle = new Bundle();
bundle.putString("issues_args", "issues_account");
FragmentScheduler
.nextFragmentWithUniqueTag(MainActivity.this, RepositoryIssuesFragment.class, bundle);
break;
case R.id.action_events:
bundle = new Bundle();
bundle.putString("events_args",
String.format("events_account_%s", accountModel.getSignInUser()));
FragmentScheduler.nextFragmentWithUniqueTag(MainActivity.this, EventsFragment.class, bundle);
break;
case R.id.action_repositories:
bundle = new Bundle();
bundle.putString("repos_args", String.format("repos_user_%s", accountModel.getSignInUser()));
FragmentScheduler
.nextFragmentWithUniqueTag(MainActivity.this, RepositoryListFragment.class, bundle);
break;
case R.id.action_author:
bundle = new Bundle();
bundle.putString("username", "frodoking");
FragmentScheduler.nextFragmentWithUniqueTag(MainActivity.this, ProfileFragment.class, bundle);
break;
case R.id.action_iconics_test:
bundle = new Bundle();
bundle.putString("api", "IconicsTest");
FragmentScheduler.nextFragment(MainActivity.this, ApiFragment.class, bundle);
break;
case R.id.action_icon_api:
bundle = new Bundle();
bundle.putString("api", "StaticOcticons");
FragmentScheduler.nextFragment(MainActivity.this, ApiFragment.class, bundle);
break;
case R.id.action_jsoup_api:
bundle = new Bundle();
bundle.putString("api", "JsoupApi");
FragmentScheduler.nextFragment(MainActivity.this, ApiFragment.class, bundle);
break;
default:
toolbar.setTitle("GitHub");
ViewProvider.wrapNotImplementFeature(MainActivity.this, null);
break;
}
return true;
}
});
drawerLayout.addDrawerListener(actionBarDrawerToggle);
fab.setOnClickListener(new View.OnClickListener()
{
@Override public void onClick(View view)
{
Snackbar.make(view, R.string.contribute_description, Snackbar.LENGTH_LONG).setAction("Action", null)
.show();
}
});
getMainController().getLocalBroadcastManager().register("drawer", new LocalBroadcastManager.MessageInterceptor()
{
@Override public Boolean intercept(Object o)
{
if (o instanceof Boolean)
{
boolean isEnableShowDrawer = (boolean) o;
if (isEnableShowDrawer)
{
actionBarDrawerToggle.onDrawerSlide(null, 0);
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNDEFINED);
}
else
{
actionBarDrawerToggle.onDrawerSlide(null, 1);
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED);
}
}
else if (o instanceof User)
{
MainActivity.super.onBackPressed();
updateUserView((User) o);
}
return true;
}
});
}
@Override public void initBusiness()
{
FragmentScheduler.replaceFragment(this, MainFragment.class);
accountModel = getMainController().getModelFactory()
.getOrCreateIfAbsent(AccountModel.TAG, AccountModel.class, getMainController());
userModel = getMainController().getModelFactory()
.getOrCreateIfAbsent(UserModel.TAG, UserModel.class, getMainController());
if (accountModel.isSignIn())
{
userModel.loadUserWithReactor(accountModel.getSignInUser()).subscribeOn(Schedulers.io())
.doOnSubscribe(new Consumer()
{
@Override public void accept(Object o) throws Exception
{
CircleProgressDialog.showLoadingDialog(MainActivity.this);
}
}).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<User>()
{
@Override public void accept(User user)
{
updateUserView(user);
}
}, new Consumer<Throwable>()
{
@Override public void accept(Throwable throwable)
{
throwable.printStackTrace();
}
}, new Action()
{
@Override public void run()
{
CircleProgressDialog.hideLoadingDialog();
}
});
}
else
{
updateUserView(null);
}
}
private void updateUserView(User user)
{
SimpleDraweeView headSDV = (SimpleDraweeView) navigationHeadView.findViewById(R.id.head_sdv);
TextView loginTV = (TextView) navigationHeadView.findViewById(R.id.id_username);
TextView repoTV = (TextView) navigationHeadView.findViewById(R.id.id_repo);
if (user != null)
{
headSDV.setImageURI(Uri.parse(user.avatar_url));
loginTV.setText(user.login);
repoTV.setText(user.html_url);
navigationView.getMenu().clear();
navigationView.inflateMenu(R.menu.menu_drawer_already_signed);
}
else
{
headSDV.setImageURI("");
loginTV.setText("GitHub");
repoTV.setText("https://github.com");
navigationView.getMenu().clear();
navigationView.inflateMenu(R.menu.menu_drawer_not_signed);
}
if (getMainController().getConfig().isDebug())
{
navigationView.inflateMenu(R.menu.menu_debug);
}
updateMenu(navigationView.getMenu());
}
private void updateMenu(Menu menu)
{
ViewProvider.updateMenuItem(this, menu, R.id.action_notifications, Octicons.Icon.oct_bell);
ViewProvider.updateMenuItem(this, menu, R.id.action_explore, Octicons.Icon.oct_telescope);
ViewProvider.updateMenuItem(this, menu, R.id.action_sign_in, Octicons.Icon.oct_sign_in);
ViewProvider.updateMenuItem(this, menu, R.id.action_sign_out, Octicons.Icon.oct_sign_out);
ViewProvider.updateMenuItem(this, menu, R.id.action_news, Octicons.Icon.oct_radio_tower);
ViewProvider.updateMenuItem(this, menu, R.id.action_issues, Octicons.Icon.oct_issue_opened);
ViewProvider.updateMenuItem(this, menu, R.id.action_events, Octicons.Icon.oct_rss);
ViewProvider.updateMenuItem(this, menu, R.id.action_repositories, Octicons.Icon.oct_repo);
ViewProvider.updateMenuItem(this, menu, R.id.action_setting, Octicons.Icon.oct_gear);
ViewProvider.updateMenuItem(this, menu, R.id.action_author, Octicons.Icon.oct_gist_secret);
ViewProvider.updateMenuItem(this, menu, R.id.action_iconics_test, Octicons.Icon.oct_repo);
ViewProvider.updateMenuItem(this, menu, R.id.action_icon_api, Octicons.Icon.oct_repo);
ViewProvider.updateMenuItem(this, menu, R.id.action_jsoup_api, Octicons.Icon.oct_repo);
}
@Override public boolean onCreateOptionsMenu(Menu menu)
{
return super.onCreateOptionsMenu(menu);
}
@Override public boolean onOptionsItemSelected(MenuItem item)
{
if (actionBarDrawerToggle.onOptionsItemSelected(item))
{
return true;
}
return super.onOptionsItemSelected(item);
}
@Override public void onConfigurationChanged(Configuration newConfig)
{
super.onConfigurationChanged(newConfig);
Logger.fLog().tag(tag()).i("onConfigurationChanged");
}
@Override public void onBackPressed()
{
if (drawerLayout.isDrawerOpen(GravityCompat.START))
{
drawerLayout.closeDrawer(GravityCompat.START);
}
else
{
super.onBackPressed();
}
}
@Override protected void onDestroy()
{
getMainController().getLocalBroadcastManager().unRegisterGroup("drawer");
super.onDestroy();
}
private void onLogout()
{
accountModel.logoutUserWithReactor().subscribeOn(Schedulers.io()).doOnSubscribe(new Consumer<Disposable>()
{
@Override public void accept(Disposable disposable)
{
CircleProgressDialog.showLoadingDialog(MainActivity.this);
}
}).subscribeOn(AndroidSchedulers.mainThread()).observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<Void>()
{
@Override public void accept(Void aVoid)
{
updateUserView(null);
}
}, new Consumer<Throwable>()
{
@Override public void accept(Throwable throwable)
{
throwable.printStackTrace();
}
}, new Action()
{
@Override public void run()
{
CircleProgressDialog.hideLoadingDialog();
}
});
}
private void loadAds()
{
InterstitialAd mInterstitialAd = new InterstitialAd(this);
mInterstitialAd.setAdUnitId("ca-app-pub-5257007452683157/9157734222");
AdRequest.Builder adRequestBuilder = new AdRequest.Builder();
adRequestBuilder.addTestDevice(AdRequest.DEVICE_ID_EMULATOR);
mInterstitialAd.loadAd(adRequestBuilder.build());
mInterstitialAd.show();
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.collection.IList;
import com.hazelcast.collection.IQueue;
import com.hazelcast.collection.ISet;
import com.hazelcast.collection.ItemEvent;
import com.hazelcast.collection.ItemListener;
import com.hazelcast.config.ListenerConfig;
import com.hazelcast.core.EntryAdapter;
import com.hazelcast.core.EntryEvent;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.LifecycleEvent;
import com.hazelcast.core.LifecycleListener;
import com.hazelcast.instance.impl.Node;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.serialization.SerializationService;
import com.hazelcast.map.IMap;
import com.hazelcast.map.impl.MapService;
import com.hazelcast.map.impl.operation.MergeOperation;
import com.hazelcast.map.impl.record.Record;
import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadata;
import com.hazelcast.map.listener.EntryMergedListener;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.operationservice.Operation;
import com.hazelcast.spi.impl.operationservice.impl.OperationServiceImpl;
import com.hazelcast.spi.merge.PassThroughMergePolicy;
import com.hazelcast.spi.merge.SplitBrainMergeTypes;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.Message;
import com.hazelcast.topic.MessageListener;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.spi.impl.merge.MergingValueFactory.createMergingEntry;
import static com.hazelcast.test.Accessors.getNode;
import static com.hazelcast.test.Accessors.getSerializationService;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class ClientListenersTest extends HazelcastTestSupport {
private final TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
HazelcastInstance client;
HazelcastInstance server;
@After
public void cleanup() {
hazelcastFactory.terminateAll();
}
@Before
public void setup() {
ClientConfig config = new ClientConfig();
config.getSerializationConfig().addPortableFactory(5, new PortableFactory() {
@Override
public Portable create(int classId) {
if (classId == 6) {
return new ClientRegressionWithMockNetworkTest.SamplePortable();
}
return null;
}
});
config.addListenerConfig(new ListenerConfig("com.hazelcast.client.ClientListenersTest$StaticListener"));
server = hazelcastFactory.newHazelcastInstance();
client = hazelcastFactory.newHazelcastClient(config);
}
@Test
public void testEntryListener_withPortableNotRegisteredInNode() throws Exception {
final IMap<Object, Object> map = client.getMap(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<Object, Object>() {
@Override
public void entryAdded(EntryEvent<Object, Object> event) {
latch.countDown();
}
}, true);
map.put(1, new ClientRegressionWithMockNetworkTest.SamplePortable(1));
assertOpenEventually(latch);
}
@Test
public void testEntryMergeListener_withPortableNotRegisteredInNode() throws Exception {
final IMap<Object, Object> map = client.getMap(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryMergedListener<Object, Object>() {
@Override
public void entryMerged(EntryEvent<Object, Object> event) {
latch.countDown();
}
}, true);
Node node = getNode(server);
NodeEngineImpl nodeEngine = node.nodeEngine;
OperationServiceImpl operationService = nodeEngine.getOperationService();
SerializationService serializationService = getSerializationService(server);
Data key = serializationService.toData(1);
Data value = serializationService.toData(new ClientRegressionWithMockNetworkTest.SamplePortable(1));
SplitBrainMergeTypes.MapMergeTypes mergingEntry = createMergingEntry(serializationService, key, value,
Mockito.mock(Record.class), ExpiryMetadata.NULL);
Operation op = new MergeOperation(map.getName(), Collections.singletonList(mergingEntry),
new PassThroughMergePolicy<>(), false);
int partitionId = nodeEngine.getPartitionService().getPartitionId(key);
operationService.invokeOnPartition(MapService.SERVICE_NAME, op, partitionId);
assertOpenEventually(latch);
}
@Test
public void testItemListener_withPortableNotRegisteredInNode() throws Exception {
final IQueue<Object> queue = client.getQueue(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
queue.addItemListener(new ItemListener<Object>() {
@Override
public void itemAdded(ItemEvent<Object> item) {
latch.countDown();
}
@Override
public void itemRemoved(ItemEvent<Object> item) {
}
}, true);
queue.offer(new ClientRegressionWithMockNetworkTest.SamplePortable(1));
assertOpenEventually(latch);
}
@Test
public void testSetListener_withPortableNotRegisteredInNode() throws Exception {
final ISet<Object> set = client.getSet(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
set.addItemListener(new ItemListener<Object>() {
@Override
public void itemAdded(ItemEvent<Object> item) {
latch.countDown();
}
@Override
public void itemRemoved(ItemEvent<Object> item) {
}
}, true);
set.add(new ClientRegressionWithMockNetworkTest.SamplePortable(1));
assertOpenEventually(latch);
}
@Test
public void testListListener_withPortableNotRegisteredInNode() throws Exception {
final IList<Object> list = client.getList(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
list.addItemListener(new ItemListener<Object>() {
@Override
public void itemAdded(ItemEvent<Object> item) {
latch.countDown();
}
@Override
public void itemRemoved(ItemEvent<Object> item) {
}
}, true);
list.add(new ClientRegressionWithMockNetworkTest.SamplePortable(1));
assertOpenEventually(latch);
}
@Test
public void testTopic_withPortableNotRegisteredInNode() throws Exception {
final ITopic<Object> topic = client.getTopic(randomMapName());
final CountDownLatch latch = new CountDownLatch(1);
topic.addMessageListener(new MessageListener<Object>() {
@Override
public void onMessage(Message<Object> message) {
latch.countDown();
}
});
topic.publish(new ClientRegressionWithMockNetworkTest.SamplePortable(1));
assertOpenEventually(latch);
}
@Test
public void testLifecycleListener_registeredViaClassName() {
assertTrue(StaticListener.CALLED_AT_LEAST_ONCE.get());
}
public static class StaticListener implements LifecycleListener {
private static final AtomicBoolean CALLED_AT_LEAST_ONCE = new AtomicBoolean();
@Override
public void stateChanged(LifecycleEvent event) {
CALLED_AT_LEAST_ONCE.set(true);
}
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.curator.inventory;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
import com.google.common.collect.Sets;
import com.metamx.common.lifecycle.LifecycleStart;
import com.metamx.common.lifecycle.LifecycleStop;
import com.metamx.common.logger.Logger;
import io.druid.curator.ShutdownNowIgnoringExecutorService;
import io.druid.curator.cache.PathChildrenCacheFactory;
import io.druid.curator.cache.SimplePathChildrenCacheFactory;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.ChildData;
import org.apache.curator.framework.recipes.cache.PathChildrenCache;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
import org.apache.curator.utils.ZKPaths;
import java.io.IOException;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicReference;
/**
* An InventoryManager watches updates to inventory on Zookeeper (or some other discovery-like service publishing
* system). It is built up on two object types: containers and inventory objects.
* <p/>
* The logic of the InventoryManager just maintains a local cache of the containers and inventory it sees on ZK. It
* provides methods for getting at the container objects, which house the actual individual pieces of inventory.
* <p/>
* A Strategy is provided to the constructor of an Inventory manager, this strategy provides all of the
* object-specific logic to serialize, deserialize, compose and alter the container and inventory objects.
*/
public class CuratorInventoryManager<ContainerClass, InventoryClass>
{
private static final Logger log = new Logger(CuratorInventoryManager.class);
private final Object lock = new Object();
private final CuratorFramework curatorFramework;
private final InventoryManagerConfig config;
private final CuratorInventoryManagerStrategy<ContainerClass, InventoryClass> strategy;
private final ConcurrentMap<String, ContainerHolder> containers;
private final Set<ContainerHolder> uninitializedInventory;
private final PathChildrenCacheFactory cacheFactory;
private volatile PathChildrenCache childrenCache;
public CuratorInventoryManager(
CuratorFramework curatorFramework,
InventoryManagerConfig config,
ExecutorService exec,
CuratorInventoryManagerStrategy<ContainerClass, InventoryClass> strategy
)
{
this.curatorFramework = curatorFramework;
this.config = config;
this.strategy = strategy;
this.containers = new MapMaker().makeMap();
this.uninitializedInventory = Sets.newConcurrentHashSet();
//NOTE: cacheData is temporarily set to false and we get data directly from ZK on each event.
//this is a workaround to solve curator's out-of-order events problem
//https://issues.apache.org/jira/browse/CURATOR-191
this.cacheFactory = new SimplePathChildrenCacheFactory(false, true, new ShutdownNowIgnoringExecutorService(exec));
}
@LifecycleStart
public void start() throws Exception
{
synchronized (lock) {
if (childrenCache != null) {
return;
}
childrenCache = cacheFactory.make(curatorFramework, config.getContainerPath());
}
childrenCache.getListenable().addListener(new ContainerCacheListener());
try {
childrenCache.start(PathChildrenCache.StartMode.POST_INITIALIZED_EVENT);
}
catch (Exception e) {
synchronized (lock) {
try {
stop();
}
catch (IOException e1) {
log.error(e1, "Exception when stopping InventoryManager that couldn't start.");
}
}
throw e;
}
}
@LifecycleStop
public void stop() throws IOException
{
synchronized (lock) {
if (childrenCache == null) {
return;
}
// This close() call actually calls shutdownNow() on the executor registered with the Cache object...
childrenCache.close();
childrenCache = null;
}
for (String containerKey : Lists.newArrayList(containers.keySet())) {
final ContainerHolder containerHolder = containers.remove(containerKey);
if (containerHolder == null) {
log.wtf("!? Got key[%s] from keySet() but it didn't have a value!?", containerKey);
} else {
// This close() call actually calls shutdownNow() on the executor registered with the Cache object...
containerHolder.getCache().close();
}
}
}
public InventoryManagerConfig getConfig()
{
return config;
}
public ContainerClass getInventoryValue(String containerKey)
{
final ContainerHolder containerHolder = containers.get(containerKey);
return containerHolder == null ? null : containerHolder.getContainer();
}
public Iterable<ContainerClass> getInventory()
{
return Iterables.transform(
containers.values(),
new Function<ContainerHolder, ContainerClass>()
{
@Override
public ContainerClass apply(ContainerHolder input)
{
return input.getContainer();
}
}
);
}
private byte[] getZkDataForNode(String path) {
try {
return curatorFramework.getData().decompressed().forPath(path);
} catch(Exception ex) {
log.warn(ex, "Exception while getting data for node %s", path);
return null;
}
}
private class ContainerHolder
{
private final AtomicReference<ContainerClass> container;
private final PathChildrenCache cache;
private boolean initialized = false;
ContainerHolder(
ContainerClass container,
PathChildrenCache cache
)
{
this.container = new AtomicReference<ContainerClass>(container);
this.cache = cache;
}
private ContainerClass getContainer()
{
return container.get();
}
private void setContainer(ContainerClass newContainer)
{
container.set(newContainer);
}
private PathChildrenCache getCache()
{
return cache;
}
}
private class ContainerCacheListener implements PathChildrenCacheListener
{
private volatile boolean containersInitialized = false;
private volatile boolean doneInitializing = false;
@Override
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception
{
switch (event.getType()) {
case CHILD_ADDED:
synchronized (lock) {
final ChildData child = event.getData();
byte[] data = getZkDataForNode(child.getPath());
if(data == null) {
log.info("Ignoring event: Type - %s , Path - %s , Version - %s",
event.getType(),
child.getPath(),
child.getStat().getVersion());
return;
}
final String containerKey = ZKPaths.getNodeFromPath(child.getPath());
final ContainerClass container = strategy.deserializeContainer(data);
// This would normally be a race condition, but the only thing that should be mutating the containers
// map is this listener, which should never run concurrently. If the same container is going to disappear
// and come back, we expect a removed event in between.
if (containers.containsKey(containerKey)) {
log.error("New node[%s] but there was already one. That's not good, ignoring new one.", child.getPath());
} else {
final String inventoryPath = String.format("%s/%s", config.getInventoryPath(), containerKey);
PathChildrenCache inventoryCache = cacheFactory.make(curatorFramework, inventoryPath);
inventoryCache.getListenable().addListener(new InventoryCacheListener(containerKey, inventoryPath));
containers.put(containerKey, new ContainerHolder(container, inventoryCache));
log.debug("Starting inventory cache for %s, inventoryPath %s", containerKey, inventoryPath);
inventoryCache.start(PathChildrenCache.StartMode.POST_INITIALIZED_EVENT);
strategy.newContainer(container);
}
break;
}
case CHILD_REMOVED:
synchronized (lock) {
final ChildData child = event.getData();
final String containerKey = ZKPaths.getNodeFromPath(child.getPath());
final ContainerHolder removed = containers.remove(containerKey);
if (removed == null) {
log.warn("Container[%s] removed that wasn't a container!?", child.getPath());
break;
}
// This close() call actually calls shutdownNow() on the executor registered with the Cache object, it
// better have its own executor or ignore shutdownNow() calls...
log.debug("Closing inventory cache for %s. Also removing listeners.", containerKey);
removed.getCache().getListenable().clear();
removed.getCache().close();
strategy.deadContainer(removed.getContainer());
// also remove node from uninitilized, in case a nodes gets removed while we are starting up
synchronized (removed) {
markInventoryInitialized(removed);
}
break;
}
case CHILD_UPDATED:
synchronized (lock) {
final ChildData child = event.getData();
byte[] data = getZkDataForNode(child.getPath());
if (data == null) {
log.info("Ignoring event: Type - %s , Path - %s , Version - %s",
event.getType(),
child.getPath(),
child.getStat().getVersion());
return;
}
final String containerKey = ZKPaths.getNodeFromPath(child.getPath());
final ContainerClass container = strategy.deserializeContainer(data);
log.debug("Container[%s] updated.", child.getPath());
ContainerHolder holder = containers.get(containerKey);
if (holder == null) {
log.warn("Container update[%s], but the old container didn't exist!? Ignoring.", child.getPath());
} else {
synchronized (holder) {
holder.setContainer(strategy.updateContainer(holder.getContainer(), container));
}
}
break;
}
case INITIALIZED:
synchronized (lock) {
// must await initialized of all containerholders
for (ContainerHolder holder : containers.values()) {
synchronized (holder) {
if (!holder.initialized) {
uninitializedInventory.add(holder);
}
}
}
containersInitialized = true;
maybeDoneInitializing();
break;
}
}
}
// must be run in synchronized(lock) { synchronized(holder) { ... } } block
private void markInventoryInitialized(final ContainerHolder holder)
{
holder.initialized = true;
uninitializedInventory.remove(holder);
maybeDoneInitializing();
}
private void maybeDoneInitializing()
{
if (doneInitializing) {
return;
}
// only fire if we are done initializing the parent PathChildrenCache
if (containersInitialized && uninitializedInventory.isEmpty()) {
doneInitializing = true;
strategy.inventoryInitialized();
}
}
private class InventoryCacheListener implements PathChildrenCacheListener
{
private final String containerKey;
private final String inventoryPath;
public InventoryCacheListener(String containerKey, String inventoryPath)
{
this.containerKey = containerKey;
this.inventoryPath = inventoryPath;
log.info("Created new InventoryCacheListener for %s", inventoryPath);
}
@Override
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception
{
final ContainerHolder holder = containers.get(containerKey);
if (holder == null) {
return;
}
switch (event.getType()) {
case CHILD_ADDED: {
final ChildData child = event.getData();
byte[] data = getZkDataForNode(child.getPath());
if (data == null) {
log.info("Ignoring event: Type - %s , Path - %s , Version - %s",
event.getType(),
child.getPath(),
child.getStat().getVersion());
return;
}
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
log.debug("CHILD_ADDED[%s] with version[%s]", child.getPath(), event.getData().getStat().getVersion());
final InventoryClass addedInventory = strategy.deserializeInventory(data);
synchronized (holder) {
holder.setContainer(strategy.addInventory(holder.getContainer(), inventoryKey, addedInventory));
}
break;
}
case CHILD_UPDATED: {
final ChildData child = event.getData();
byte[] data = getZkDataForNode(child.getPath());
if (data == null) {
log.info("Ignoring event: Type - %s , Path - %s , Version - %s",
event.getType(),
child.getPath(),
child.getStat().getVersion());
return;
}
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
log.debug("CHILD_UPDATED[%s] with version[%s]", child.getPath(), event.getData().getStat().getVersion());
final InventoryClass updatedInventory = strategy.deserializeInventory(data);
synchronized (holder) {
holder.setContainer(strategy.updateInventory(holder.getContainer(), inventoryKey, updatedInventory));
}
break;
}
case CHILD_REMOVED: {
final ChildData child = event.getData();
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
log.debug("CHILD_REMOVED[%s] with version[%s]", child.getPath(), event.getData().getStat().getVersion());
synchronized (holder) {
holder.setContainer(strategy.removeInventory(holder.getContainer(), inventoryKey));
}
break;
}
case INITIALIZED:
// make sure to acquire locks in (lock -> holder) order
synchronized (lock) {
synchronized (holder) {
markInventoryInitialized(holder);
}
}
break;
}
}
}
}
}
| |
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201809.basicoperations;
import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME;
import com.beust.jcommander.Parameter;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroup;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdRotationMode;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupOperation;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupReturnValue;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupStatus;
import com.google.api.ads.adwords.axis.v201809.cm.AdRotationMode;
import com.google.api.ads.adwords.axis.v201809.cm.ApiError;
import com.google.api.ads.adwords.axis.v201809.cm.ApiException;
import com.google.api.ads.adwords.axis.v201809.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201809.cm.Bids;
import com.google.api.ads.adwords.axis.v201809.cm.CpcBid;
import com.google.api.ads.adwords.axis.v201809.cm.CriterionTypeGroup;
import com.google.api.ads.adwords.axis.v201809.cm.Money;
import com.google.api.ads.adwords.axis.v201809.cm.Operator;
import com.google.api.ads.adwords.axis.v201809.cm.Setting;
import com.google.api.ads.adwords.axis.v201809.cm.TargetingSetting;
import com.google.api.ads.adwords.axis.v201809.cm.TargetingSettingDetail;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface;
import com.google.api.ads.adwords.lib.utils.examples.ArgumentNames;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.OAuthException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.ads.common.lib.utils.examples.CodeSampleParams;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
/**
* This example adds ad groups to a campaign. To get campaigns, run
* GetCampaigns.java.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class AddAdGroups {
private static class AddAdGroupsParams extends CodeSampleParams {
@Parameter(names = ArgumentNames.CAMPAIGN_ID, required = true)
private Long campaignId;
}
public static void main(String[] args) {
AdWordsSession session;
try {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential =
new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
session =
new AdWordsSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
} catch (ConfigurationLoadException cle) {
System.err.printf(
"Failed to load configuration from the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, cle);
return;
} catch (ValidationException ve) {
System.err.printf(
"Invalid configuration in the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, ve);
return;
} catch (OAuthException oe) {
System.err.printf(
"Failed to create OAuth credentials. Check OAuth settings in the %s file. "
+ "Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, oe);
return;
}
AdWordsServicesInterface adWordsServices = AdWordsServices.getInstance();
AddAdGroupsParams params = new AddAdGroupsParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.campaignId = Long.parseLong("INSERT_CAMPAIGN_ID_HERE");
}
try {
runExample(adWordsServices, session, params.campaignId);
} catch (ApiException apiException) {
// ApiException is the base class for most exceptions thrown by an API request. Instances
// of this exception have a message and a collection of ApiErrors that indicate the
// type and underlying cause of the exception. Every exception object in the adwords.axis
// packages will return a meaningful value from toString
//
// ApiException extends RemoteException, so this catch block must appear before the
// catch block for RemoteException.
System.err.println("Request failed due to ApiException. Underlying ApiErrors:");
if (apiException.getErrors() != null) {
int i = 0;
for (ApiError apiError : apiException.getErrors()) {
System.err.printf(" Error %d: %s%n", i++, apiError);
}
}
} catch (RemoteException re) {
System.err.printf(
"Request failed unexpectedly due to RemoteException: %s%n", re);
}
}
/**
* Runs the example.
*
* @param adWordsServices the services factory.
* @param session the session.
* @param campaignId the ID of the campaign where the ad groups will be created.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
*/
public static void runExample(
AdWordsServicesInterface adWordsServices, AdWordsSession session, long campaignId)
throws RemoteException {
// Get the AdGroupService.
AdGroupServiceInterface adGroupService =
adWordsServices.get(session, AdGroupServiceInterface.class);
// Create ad group.
AdGroup adGroup = new AdGroup();
adGroup.setName("Earth to Mars Cruises #" + System.currentTimeMillis());
adGroup.setStatus(AdGroupStatus.ENABLED);
adGroup.setCampaignId(campaignId);
// Optional settings.
// Targeting restriction settings. Depending on the criterionTypeGroup
// value, most TargetingSettingDetail only affect Display campaigns.
// However, the USER_INTEREST_AND_LIST value works for RLSA campaigns -
// Search campaigns targeting using a remarketing list.
TargetingSetting targeting = new TargetingSetting();
// Restricting to serve ads that match your ad group placements.
// This is equivalent to choosing "Target and bid" in the UI.
TargetingSettingDetail placements = new TargetingSettingDetail();
placements.setCriterionTypeGroup(CriterionTypeGroup.PLACEMENT);
placements.setTargetAll(Boolean.FALSE);
// Using your ad group verticals only for bidding. This is equivalent
// to choosing "Bid only" in the UI.
TargetingSettingDetail verticals = new TargetingSettingDetail();
verticals.setCriterionTypeGroup(CriterionTypeGroup.VERTICAL);
verticals.setTargetAll(Boolean.TRUE);
targeting.setDetails(new TargetingSettingDetail[] {placements, verticals});
adGroup.setSettings(new Setting[] {targeting});
// Set the rotation mode.
AdGroupAdRotationMode rotationMode = new AdGroupAdRotationMode(AdRotationMode.OPTIMIZE);
adGroup.setAdGroupAdRotationMode(rotationMode);
// Create ad group bid.
BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration();
Money cpcBidMoney = new Money();
cpcBidMoney.setMicroAmount(10_000_000L);
CpcBid bid = new CpcBid();
bid.setBid(cpcBidMoney);
biddingStrategyConfiguration.setBids(new Bids[] {bid});
adGroup.setBiddingStrategyConfiguration(biddingStrategyConfiguration);
// Add as many additional ad groups as you need.
AdGroup adGroup2 = new AdGroup();
adGroup2.setName("Earth to Venus Cruises #" + System.currentTimeMillis());
adGroup2.setStatus(AdGroupStatus.ENABLED);
adGroup2.setCampaignId(campaignId);
BiddingStrategyConfiguration biddingStrategyConfiguration2 = new BiddingStrategyConfiguration();
Money cpcBidMoney2 = new Money();
cpcBidMoney2.setMicroAmount(10_000_000L);
CpcBid bid2 = new CpcBid();
bid2.setBid(cpcBidMoney2);
biddingStrategyConfiguration2.setBids(new Bids[] {bid2});
adGroup2.setBiddingStrategyConfiguration(biddingStrategyConfiguration2);
// Create operations.
AdGroupOperation operation = new AdGroupOperation();
operation.setOperand(adGroup);
operation.setOperator(Operator.ADD);
AdGroupOperation operation2 = new AdGroupOperation();
operation2.setOperand(adGroup2);
operation2.setOperator(Operator.ADD);
AdGroupOperation[] operations = new AdGroupOperation[] {operation, operation2};
// Add ad groups.
AdGroupReturnValue result = adGroupService.mutate(operations);
// Display new ad groups.
for (AdGroup adGroupResult : result.getValue()) {
System.out.printf("Ad group with name '%s' and ID %d was added.%n",
adGroupResult.getName(), adGroupResult.getId());
}
}
}
| |
package org.ripple.power.ui.editor;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import javax.swing.AbstractAction;
import javax.swing.BorderFactory;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.KeyStroke;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.filechooser.FileFilter;
import javax.swing.undo.CannotRedoException;
import javax.swing.undo.CannotUndoException;
import javax.swing.undo.UndoManager;
import org.ripple.power.ui.UIRes;
import org.ripple.power.ui.graphics.LColor;
public class ROCScriptEditor extends JPanel {
public final static ROCFileFilter FILTER = new ROCFileFilter();
private static class ROCFileFilter extends FileFilter {
public boolean accept(File f) {
if (f.isDirectory()) {
return true;
}
return f.getName().endsWith(".txt") || f.getName().endsWith(".roc") || f.getName().endsWith(".script");
}
public String getDescription() {
return ".txt|.roc|.script";
}
}
/**
*
*/
private static final long serialVersionUID = 1L;
private ROCFileListener _listener = null;
public boolean _isEdited = false;
public boolean _isSaved = true;
private SourcePaper _textEdit = new SourcePaper();
private JScrollPane _jScrollPanel = new JScrollPane();
private BorderLayout _borderLayout = new BorderLayout();
private String fileName = "";
private File file = null;
private UndoManager manager = new UndoManager();
public void setText(String str) {
_textEdit.setText(str);
}
public SourcePaper getROC() {
return _textEdit;
}
public String getText() {
return _textEdit.getText();
}
public ROCScriptEditor() {
try {
this.setLayout(_borderLayout);
this.add(_jScrollPanel, java.awt.BorderLayout.CENTER);
_jScrollPanel.getViewport().add(_textEdit);
_jScrollPanel.setBackground(new Color(70, 70, 70));
_jScrollPanel.setForeground(Color.WHITE);
_jScrollPanel.setBorder(BorderFactory.createLineBorder(LColor.black));
_textEdit.setDocument(new SourceDocument());
_textEdit.getDocument().addDocumentListener(new ROCEditor_Edit_documentAdapter(this));
_textEdit.getDocument().addUndoableEditListener(new ROCEditorListener(manager));
this.getActionMap().put("ctrl_s", new AbstractAction("ctrl_s") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent evt) {
if (_isEdited()) {
saveFile();
} else {
saveAs();
}
}
});
this.getInputMap().put(KeyStroke.getKeyStroke("control S"), "ctrl_s");
_textEdit.getActionMap().put("ctrl_s", new AbstractAction("ctrl_s") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent evt) {
if (file == null) {
saveAs();
} else {
saveFile();
}
}
});
_textEdit.getInputMap().put(KeyStroke.getKeyStroke("control S"), "ctrl_s");
this.getActionMap().put("ctrl_o", new AbstractAction("ctrl_o") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent evt) {
openNew();
}
});
this.getInputMap().put(KeyStroke.getKeyStroke("control O"), "ctrl_o");
_textEdit.getActionMap().put("ctrl_o", new AbstractAction("ctrl_o") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent evt) {
openNew();
}
});
_textEdit.getInputMap().put(KeyStroke.getKeyStroke("control O"), "ctrl_o");
_textEdit.setCaretPosition(0);
_textEdit.getInputMap().put(KeyStroke.getKeyStroke("control Z"), "ctrl_z");
_textEdit.getActionMap().put("ctrl_z", new AbstractAction("ctrl_z") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
undo();
}
});
_textEdit.getInputMap().put(KeyStroke.getKeyStroke("control Y"), "ctrl_y");
_textEdit.getActionMap().put("ctrl_y", new AbstractAction("ctrl_y") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
redo();
}
});
_textEdit.getInputMap().put(KeyStroke.getKeyStroke("control shift Z"), "ctrl_shift_z");
_textEdit.getActionMap().put("ctrl_shift_z", new AbstractAction("ctrl_shift_z") {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
redo();
}
});
_textEdit.setText("#ROC Script\n");
} catch (Exception e) {
e.printStackTrace();
}
}
public File getFile() {
return file;
}
public String getFileName() {
return fileName;
}
public void setFileName(String filename) {
this.fileName = filename;
}
public void setFile(File file) {
this.file = file;
setFileName(file.getName());
_isSaved = true;
_isEdited = false;
}
public boolean _isEdited() {
return this._isEdited;
}
public void openFile() {
try {
_textEdit.setText(readFile(file));
_textEdit.setCaretPosition(0);
_isSaved = true;
_isEdited = false;
} catch (IOException ex) {
}
}
public boolean close() {
boolean savedSuccessfully = true;
if (_isEdited) {
if (UIRes.showConfirmMessage(this.getParent().getParent(), "Confirm",
"Do you want to save changes to the current file?", "YES", "NO") == 0) {
savedSuccessfully = this.saveFile();
}
}
if (savedSuccessfully) {
setFileName("");
file = null;
_textEdit.setText("");
_isSaved = true;
_isEdited = false;
return true;
}
return false;
}
private boolean saveAs() {
if (_listener != null) {
return _listener.doSave();
}
return false;
}
private void openNew() {
if (_listener != null) {
_listener.doLoad();
}
}
private String readFile(File file) throws IOException {
StringBuffer fileBuffer;
String fileString = null;
String line;
try {
FileReader in = new FileReader(file);
BufferedReader dis = new BufferedReader(in);
fileBuffer = new StringBuffer();
while ((line = dis.readLine()) != null) {
fileBuffer.append(line + "\n");
}
in.close();
fileString = fileBuffer.toString();
} catch (IOException e) {
throw e;
}
return fileString;
}
public boolean saveFile() {
try {
if (file != null) {
PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)));
out.print(_textEdit.getText());
out.flush();
out.close();
_isSaved = true;
_isEdited = false;
return true;
}
return saveAs();
} catch (IOException e) {
return false;
}
}
void edit_Performed() {
_isEdited = true;
_isSaved = false;
if (_listener != null) {
_listener.setButtons();
}
}
public void undo() {
try {
manager.undo();
} catch (CannotUndoException e) {
}
}
public void redo() {
try {
manager.redo();
} catch (CannotRedoException e) {
}
}
public ROCFileListener getListener() {
return _listener;
}
public void setListener(ROCFileListener l) {
this._listener = l;
}
public boolean isEdited() {
return _isEdited;
}
public void setEdited(boolean e) {
this._isEdited = e;
}
public boolean isSaved() {
return _isSaved;
}
public void setSaved(boolean s) {
this._isSaved = s;
}
}
class ROCEditor_Edit_documentAdapter implements DocumentListener {
ROCScriptEditor adaptee;
public ROCEditor_Edit_documentAdapter(ROCScriptEditor adaptee) {
this.adaptee = adaptee;
}
@Override
public void insertUpdate(DocumentEvent evt) {
adaptee.edit_Performed();
}
@Override
public void removeUpdate(DocumentEvent evt) {
adaptee.edit_Performed();
}
@Override
public void changedUpdate(DocumentEvent evt) {
adaptee.edit_Performed();
}
}
| |
/*
*
* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://orientdb.com
*
*/
package com.orientechnologies.orient.server.hazelcast;
import com.hazelcast.core.EntryEvent;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.core.IMap;
import com.hazelcast.core.MapEvent;
import com.hazelcast.map.listener.EntryAddedListener;
import com.hazelcast.map.listener.EntryRemovedListener;
import com.hazelcast.map.listener.EntryUpdatedListener;
import com.hazelcast.map.listener.MapClearedListener;
import com.orientechnologies.orient.server.distributed.ODistributedServerLog;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* Optimized concurrent hash map implementation on top of Hazelcast distributed map.
*
* @author Luca Garulli (l.garulli--at--orientdb.com)
*/
public class OHazelcastDistributedMap extends ConcurrentHashMap<String, Object>
implements EntryAddedListener<String, Object>,
EntryRemovedListener<String, Object>,
MapClearedListener,
EntryUpdatedListener<String, Object> {
private final OHazelcastClusterMetadataManager dManager;
private final IMap<String, Object> hzMap;
private final String membershipListenerRegistration;
public static final String ORIENTDB_MAP = "orientdb";
public OHazelcastDistributedMap(
final OHazelcastClusterMetadataManager manager, final HazelcastInstance hz) {
dManager = manager;
hzMap = hz.getMap(ORIENTDB_MAP);
membershipListenerRegistration = hzMap.addEntryListener(this, true);
super.putAll(hzMap);
}
public IMap<String, Object> getHazelcastMap() {
return hzMap;
}
@Override
public Object get(final Object key) {
return hzMap.get(key);
}
@Override
public boolean containsKey(final Object key) {
return hzMap.containsKey(key);
}
@Override
public Set<Entry<String, Object>> entrySet() {
return hzMap.entrySet();
}
public Set<Entry<String, Object>> localEntrySet() {
return super.entrySet();
}
public Object getLocalCachedValue(final Object key) {
final Object res = super.get(key);
if (res != null) return res;
try {
return hzMap.get(key);
} catch (HazelcastInstanceNotActiveException e) {
// IGNORE IT
return null;
}
}
@Override
public Object put(final String key, final Object value) {
try {
hzMap.put(key, value);
} catch (HazelcastInstanceNotActiveException e) {
// IGNORE IT
}
return super.put(key, value);
}
@Override
public Object putIfAbsent(final String key, final Object value) {
try {
hzMap.putIfAbsent(key, value);
} catch (HazelcastInstanceNotActiveException e) {
// IGNORE IT
}
return super.putIfAbsent(key, value);
}
public Object putInLocalCache(final String key, final Object value) {
return super.put(key, value);
}
@Override
public Object remove(final Object key) {
try {
hzMap.remove(key);
} catch (HazelcastInstanceNotActiveException e) {
// IGNORE IT
}
return super.remove(key);
}
@Override
public boolean remove(final Object key, final Object value) {
try {
hzMap.remove(key, value);
} catch (HazelcastInstanceNotActiveException e) {
// IGNORE IT
}
return super.remove(key, value);
}
@Override
public void entryAdded(final EntryEvent<String, Object> event) {
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(
this,
dManager.getLocalNodeName(),
null,
ODistributedServerLog.DIRECTION.NONE,
"Map entry added "
+ event.getKey()
+ "="
+ event.getValue()
+ " from server "
+ dManager.getNodeName(event.getMember(), true));
super.put(event.getKey(), event.getValue());
}
@Override
public void entryUpdated(final EntryEvent<String, Object> event) {
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(
this,
dManager.getLocalNodeName(),
null,
ODistributedServerLog.DIRECTION.NONE,
"Map entry updated "
+ event.getKey()
+ "="
+ event.getValue()
+ " from server "
+ dManager.getNodeName(event.getMember(), true));
super.put(event.getKey(), event.getValue());
}
@Override
public void entryRemoved(final EntryEvent<String, Object> event) {
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(
this,
dManager.getLocalNodeName(),
null,
ODistributedServerLog.DIRECTION.NONE,
"Map entry removed "
+ event.getKey()
+ "="
+ event.getValue()
+ " from "
+ dManager.getNodeName(event.getMember(), true));
super.remove(event.getKey());
}
@Override
public void mapCleared(MapEvent event) {
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(
this,
dManager.getLocalNodeName(),
null,
ODistributedServerLog.DIRECTION.NONE,
"Map cleared from server " + dManager.getNodeName(event.getMember(), true));
super.clear();
}
public void destroy() {
clear();
hzMap.removeEntryListener(membershipListenerRegistration);
}
public void clearLocalCache() {
super.clear();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.notebook.repo;
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.fail;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
import org.apache.zeppelin.notebook.Note;
import org.apache.zeppelin.notebook.NoteInfo;
import org.apache.zeppelin.notebook.Paragraph;
import org.apache.zeppelin.scheduler.Job.Status;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class GCSNotebookRepoTest {
private static final AuthenticationInfo AUTH_INFO = AuthenticationInfo.ANONYMOUS;
private GCSNotebookRepo notebookRepo;
private Storage storage;
@Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "bucketname", Optional.absent(), "gs://bucketname" },
{ "bucketname-with-slash", Optional.absent(), "gs://bucketname-with-slash/" },
{ "bucketname", Optional.of("path/to/dir"), "gs://bucketname/path/to/dir" },
{ "bucketname", Optional.of("trailing/slash"), "gs://bucketname/trailing/slash/" }
});
}
@Parameter(0)
public String bucketName;
@Parameter(1)
public Optional<String> basePath;
@Parameter(2)
public String uriPath;
private Note runningNote;
@Before
public void setUp() throws Exception {
this.runningNote = makeRunningNote();
this.storage = LocalStorageHelper.getOptions().getService();
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath);
this.notebookRepo = new GCSNotebookRepo(new ZeppelinConfiguration(), storage);
}
private static Note makeRunningNote() {
Note note = new Note();
note.setPath("/test_note");
note.setConfig(ImmutableMap.<String, Object>of("key", "value"));
Paragraph p = new Paragraph(note, null);
p.setText("text");
p.setStatus(Status.RUNNING);
note.addParagraph(p);
return note;
}
@Test
public void testList_nonexistent() throws Exception {
assertThat(notebookRepo.list(AUTH_INFO)).isEmpty();
}
@Test
public void testList() throws Exception {
createAt(runningNote, "note.zpln");
createAt(runningNote, "/note.zpln");
createAt(runningNote, "validid/my_12.zpln");
createAt(runningNote, "validid-2/my_123.zpln");
createAt(runningNote, "cannot-be-dir/note.json/foo");
createAt(runningNote, "cannot/be/nested/note.json");
Map<String, NoteInfo> infos = notebookRepo.list(AUTH_INFO);
List<String> noteIds = new ArrayList<>();
for (NoteInfo info : infos.values()) {
noteIds.add(info.getId());
}
// Only valid paths are gs://bucketname/path/<noteid>/note.json
assertThat(noteIds).containsExactlyElementsIn(ImmutableList.of("12", "123"));
}
@Test
public void testGet_nonexistent() throws Exception {
try {
notebookRepo.get("id", "", AUTH_INFO);
fail();
} catch (IOException e) {}
}
@Test
public void testGet() throws Exception {
create(runningNote);
// Status of saved running note is removed in get()
Note got = notebookRepo.get(runningNote.getId(), runningNote.getPath(), AUTH_INFO);
assertThat(got.getLastParagraph().getStatus()).isEqualTo(Status.ABORT);
// But otherwise equal
got.getLastParagraph().setStatus(Status.RUNNING);
assertThat(got).isEqualTo(runningNote);
}
@Test
public void testGet_malformed() throws Exception {
createMalformed("id", "/name");
try {
notebookRepo.get("id", "/name", AUTH_INFO);
fail();
} catch (IOException e) {}
}
@Test
public void testSave_create() throws Exception {
notebookRepo.save(runningNote, AUTH_INFO);
// Output is saved
assertThat(storage.readAllBytes(makeBlobId(runningNote.getId(), runningNote.getPath())))
.isEqualTo(runningNote.toJson().getBytes("UTF-8"));
}
@Test
public void testSave_update() throws Exception {
notebookRepo.save(runningNote, AUTH_INFO);
// Change name of runningNote
runningNote.setPath("/new-name");
notebookRepo.save(runningNote, AUTH_INFO);
assertThat(storage.readAllBytes(makeBlobId(runningNote.getId(), runningNote.getPath())))
.isEqualTo(runningNote.toJson().getBytes("UTF-8"));
}
@Test
public void testRemove_nonexistent() throws Exception {
try {
notebookRepo.remove("id", "/name", AUTH_INFO);
fail();
} catch (IOException e) {}
}
@Test
public void testRemove() throws Exception {
create(runningNote);
notebookRepo.remove(runningNote.getId(), runningNote.getPath(), AUTH_INFO);
assertThat(storage.get(makeBlobId(runningNote.getId(), runningNote.getPath()))).isNull();
}
private String makeName(String relativePath) {
if (basePath.isPresent()) {
return basePath.get() + "/" + relativePath;
} else {
return relativePath;
}
}
private BlobId makeBlobId(String noteId, String notePath) {
if (basePath.isPresent()) {
return BlobId.of(bucketName, basePath.get() + notePath + "_" + noteId +".zpln");
} else {
return BlobId.of(bucketName, notePath.substring(1) + "_" + noteId +".zpln");
}
}
private void createAt(Note note, String relativePath) throws IOException {
BlobId id = BlobId.of(bucketName, makeName(relativePath));
BlobInfo info = BlobInfo.newBuilder(id).setContentType("application/json").build();
storage.create(info, note.toJson().getBytes("UTF-8"));
}
private void create(Note note) throws IOException {
BlobInfo info = BlobInfo.newBuilder(makeBlobId(note.getId(), note.getPath()))
.setContentType("application/json")
.build();
storage.create(info, note.toJson().getBytes("UTF-8"));
}
private void createMalformed(String noteId, String notePath) throws IOException {
BlobInfo info = BlobInfo.newBuilder(makeBlobId(noteId, notePath))
.setContentType("application/json")
.build();
storage.create(info, "{ invalid-json }".getBytes("UTF-8"));
}
/* These tests test path parsing for illegal paths, and do not use the parameterized vars */
@Test
public void testInitialization_pathNotSet() throws Exception {
try {
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), "");
new GCSNotebookRepo(new ZeppelinConfiguration(), storage);
fail();
} catch (IOException e) {}
}
@Test
public void testInitialization_malformedPath() throws Exception {
try {
System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), "foo");
new GCSNotebookRepo(new ZeppelinConfiguration(), storage);
fail();
} catch (IOException e) {}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import consulo.logging.Logger;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.MethodSignature;
import com.intellij.psi.util.PsiFormatUtil;
import com.intellij.psi.util.PsiFormatUtilBase;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
/**
* User: anna
*/
public class PsiMethodReferenceUtil
{
private static final Logger LOG = Logger.getInstance(PsiMethodReferenceUtil.class);
public static boolean isSecondSearchPossible(PsiType[] parameterTypes, QualifierResolveResult qualifierResolveResult, PsiMethodReferenceExpression methodRef)
{
if(parameterTypes.length > 0 &&
!(parameterTypes[0] instanceof PsiPrimitiveType) &&
!methodRef.isConstructor() &&
isStaticallyReferenced(methodRef) &&
isReceiverType(parameterTypes[0], qualifierResolveResult.getContainingClass(), qualifierResolveResult.getSubstitutor()))
{
return true;
}
return false;
}
public static boolean isResolvedBySecondSearch(@Nonnull PsiMethodReferenceExpression methodRef, @Nullable MethodSignature signature, boolean varArgs, boolean isStatic, int parametersCount)
{
if(signature == null)
{
return false;
}
final QualifierResolveResult qualifierResolveResult = getQualifierResolveResult(methodRef);
final PsiType[] functionalMethodParameterTypes = signature.getParameterTypes();
return (parametersCount + 1 == functionalMethodParameterTypes.length && !varArgs || varArgs && functionalMethodParameterTypes.length > 0 && !isStatic) && isSecondSearchPossible
(functionalMethodParameterTypes, qualifierResolveResult, methodRef);
}
@Nullable
public static PsiType getQualifierType(PsiMethodReferenceExpression expression)
{
final PsiTypeElement typeElement = expression.getQualifierType();
if(typeElement != null)
{
return typeElement.getType();
}
else
{
PsiType qualifierType = null;
final PsiElement qualifier = expression.getQualifier();
if(qualifier instanceof PsiExpression)
{
qualifierType = ((PsiExpression) qualifier).getType();
}
if(qualifierType == null && qualifier instanceof PsiReferenceExpression)
{
return JavaPsiFacade.getElementFactory(expression.getProject()).createType((PsiReferenceExpression) qualifier);
}
return qualifierType;
}
}
public static boolean isReturnTypeCompatible(PsiMethodReferenceExpression expression, JavaResolveResult result, PsiType functionalInterfaceType)
{
return isReturnTypeCompatible(expression, result, functionalInterfaceType, null);
}
private static boolean isReturnTypeCompatible(PsiMethodReferenceExpression expression, JavaResolveResult result, PsiType functionalInterfaceType, Ref<String> errorMessage)
{
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(functionalInterfaceType);
final PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(resolveResult);
if(interfaceMethod != null)
{
final PsiType interfaceReturnType = LambdaUtil.getFunctionalInterfaceReturnType(functionalInterfaceType);
if(PsiType.VOID.equals(interfaceReturnType) || interfaceReturnType == null)
{
return true;
}
PsiSubstitutor subst = result.getSubstitutor();
PsiType methodReturnType = null;
PsiClass containingClass = null;
final PsiElement resolve = result.getElement();
if(resolve instanceof PsiMethod)
{
containingClass = ((PsiMethod) resolve).getContainingClass();
methodReturnType = PsiTypesUtil.patchMethodGetClassReturnType(expression, (PsiMethod) resolve);
if(methodReturnType == null)
{
methodReturnType = ((PsiMethod) resolve).getReturnType();
if(PsiType.VOID.equals(methodReturnType))
{
return false;
}
PsiClass qContainingClass = getQualifierResolveResult(expression).getContainingClass();
if(qContainingClass != null && containingClass != null &&
isReceiverType(getFirstParameterType(functionalInterfaceType, expression), qContainingClass, subst))
{
subst = TypeConversionUtil.getClassSubstitutor(containingClass, qContainingClass, subst);
LOG.assertTrue(subst != null);
}
methodReturnType = subst.substitute(methodReturnType);
}
}
else if(resolve instanceof PsiClass)
{
if(PsiEquivalenceUtil.areElementsEquivalent(resolve, JavaPsiFacade.getElementFactory(expression.getProject()).getArrayClass(PsiUtil.getLanguageLevel(expression))))
{
final PsiTypeParameter[] typeParameters = ((PsiClass) resolve).getTypeParameters();
if(typeParameters.length == 1)
{
final PsiType arrayComponentType = subst.substitute(typeParameters[0]);
if(arrayComponentType == null)
{
return false;
}
methodReturnType = arrayComponentType.createArrayType();
}
}
containingClass = (PsiClass) resolve;
}
if(methodReturnType == null)
{
if(containingClass == null)
{
return false;
}
methodReturnType = JavaPsiFacade.getElementFactory(expression.getProject()).createType(containingClass, subst);
}
methodReturnType = PsiUtil.captureToplevelWildcards(methodReturnType, expression);
if(TypeConversionUtil.isAssignable(interfaceReturnType, methodReturnType))
{
return true;
}
if(errorMessage != null)
{
errorMessage.set("Bad return type in method reference: " +
"cannot convert " + methodReturnType.getCanonicalText() + " to " + interfaceReturnType.getCanonicalText());
}
}
return false;
}
public static class QualifierResolveResult
{
private final PsiClass myContainingClass;
private final PsiSubstitutor mySubstitutor;
private final boolean myReferenceTypeQualified;
public QualifierResolveResult(PsiClass containingClass, PsiSubstitutor substitutor, boolean referenceTypeQualified)
{
myContainingClass = containingClass;
mySubstitutor = substitutor;
myReferenceTypeQualified = referenceTypeQualified;
}
@Nullable
public PsiClass getContainingClass()
{
return myContainingClass;
}
public PsiSubstitutor getSubstitutor()
{
return mySubstitutor;
}
public boolean isReferenceTypeQualified()
{
return myReferenceTypeQualified;
}
}
public static boolean isValidQualifier(PsiMethodReferenceExpression expression)
{
final PsiElement referenceNameElement = expression.getReferenceNameElement();
if(referenceNameElement instanceof PsiKeyword)
{
final PsiElement qualifier = expression.getQualifier();
if(qualifier instanceof PsiTypeElement)
{
return true;
}
if(qualifier instanceof PsiReferenceExpression && ((PsiReferenceExpression) qualifier).resolve() instanceof PsiClass)
{
return true;
}
}
return false;
}
@Nonnull
public static QualifierResolveResult getQualifierResolveResult(@Nonnull PsiMethodReferenceExpression methodReferenceExpression)
{
PsiClass containingClass = null;
PsiSubstitutor substitutor = PsiSubstitutor.EMPTY;
final PsiExpression expression = methodReferenceExpression.getQualifierExpression();
if(expression != null)
{
PsiType expressionType = expression.getType();
if(expressionType instanceof PsiCapturedWildcardType)
{
expressionType = ((PsiCapturedWildcardType) expressionType).getUpperBound();
}
else
{
expressionType = replaceArrayType(expressionType, expression);
}
PsiClassType.ClassResolveResult result = PsiUtil.resolveGenericsClassInType(expressionType);
containingClass = result.getElement();
if(containingClass != null)
{
substitutor = result.getSubstitutor();
}
if(containingClass == null && expression instanceof PsiReferenceExpression)
{
final JavaResolveResult resolveResult = ((PsiReferenceExpression) expression).advancedResolve(false);
final PsiElement resolve = resolveResult.getElement();
if(resolve instanceof PsiClass)
{
containingClass = (PsiClass) resolve;
substitutor = resolveResult.getSubstitutor();
return new QualifierResolveResult(containingClass, substitutor, true);
}
}
}
else
{
final PsiTypeElement typeElement = methodReferenceExpression.getQualifierType();
if(typeElement != null)
{
PsiType type = replaceArrayType(typeElement.getType(), typeElement);
PsiClassType.ClassResolveResult result = PsiUtil.resolveGenericsClassInType(type);
containingClass = result.getElement();
if(containingClass != null)
{
return new QualifierResolveResult(containingClass, result.getSubstitutor(), true);
}
}
}
return new QualifierResolveResult(containingClass, substitutor, false);
}
public static boolean isStaticallyReferenced(@Nonnull PsiMethodReferenceExpression methodReferenceExpression)
{
final PsiExpression qualifierExpression = methodReferenceExpression.getQualifierExpression();
if(qualifierExpression != null)
{
return qualifierExpression instanceof PsiReferenceExpression && ((PsiReferenceExpression) qualifierExpression).resolve() instanceof PsiClass;
}
return true;
}
//if P1, ..., Pn is not empty and P1 is a subtype of ReferenceType, then the method reference expression is treated as
// if it were a method invocation expression with argument expressions of types P2, ...,Pn.
public static boolean isReceiverType(@Nullable PsiType receiverType, PsiClass containingClass, PsiSubstitutor psiSubstitutor)
{
if(receiverType == null)
{
return false;
}
return TypeConversionUtil.isAssignable(JavaPsiFacade.getElementFactory(containingClass.getProject()).createType(containingClass, psiSubstitutor), replaceArrayType(receiverType,
containingClass));
}
public static PsiType getFirstParameterType(PsiType functionalInterfaceType, PsiElement context)
{
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(functionalInterfaceType);
final MethodSignature function = LambdaUtil.getFunction(resolveResult.getElement());
if(function != null)
{
final int interfaceMethodParamsLength = function.getParameterTypes().length;
if(interfaceMethodParamsLength > 0)
{
PsiType type = resolveResult.getSubstitutor().substitute(function.getParameterTypes()[0]);
return type != null ? PsiUtil.captureToplevelWildcards(type, context) : null;
}
}
return null;
}
private static PsiType replaceArrayType(PsiType type, @Nonnull PsiElement context)
{
if(type instanceof PsiArrayType)
{
type = JavaPsiFacade.getElementFactory(context.getProject()).getArrayClassType(((PsiArrayType) type).getComponentType(), PsiUtil.getLanguageLevel(context));
}
return type;
}
public static String checkMethodReferenceContext(PsiMethodReferenceExpression methodRef)
{
final PsiElement resolve = methodRef.resolve();
if(resolve == null)
{
return null;
}
return checkMethodReferenceContext(methodRef, resolve, methodRef.getFunctionalInterfaceType());
}
public static String checkMethodReferenceContext(PsiMethodReferenceExpression methodRef, PsiElement resolve, PsiType functionalInterfaceType)
{
final PsiClass containingClass = resolve instanceof PsiMethod ? ((PsiMethod) resolve).getContainingClass() : (PsiClass) resolve;
final boolean isStaticSelector = isStaticallyReferenced(methodRef);
final PsiElement qualifier = methodRef.getQualifier();
boolean isMethodStatic = false;
boolean receiverReferenced = false;
boolean isConstructor = true;
if(resolve instanceof PsiMethod)
{
final PsiMethod method = (PsiMethod) resolve;
isMethodStatic = method.hasModifierProperty(PsiModifier.STATIC);
isConstructor = method.isConstructor();
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(functionalInterfaceType);
final PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(resolveResult);
receiverReferenced = isResolvedBySecondSearch(methodRef, interfaceMethod != null ? interfaceMethod.getSignature(LambdaUtil.getSubstitutor(interfaceMethod, resolveResult)) : null, method
.isVarArgs(), isMethodStatic, method.getParameterList().getParametersCount());
if(method.hasModifierProperty(PsiModifier.ABSTRACT) && qualifier instanceof PsiSuperExpression)
{
return "Abstract method '" + method.getName() + "' cannot be accessed directly";
}
}
if(!receiverReferenced && isStaticSelector && !isMethodStatic && !isConstructor)
{
return "Non-static method cannot be referenced from a static context";
}
if(!receiverReferenced && !isStaticSelector && isMethodStatic)
{
return "Static method referenced through non-static qualifier";
}
if(receiverReferenced && isStaticSelector && isMethodStatic && !isConstructor)
{
return "Static method referenced through receiver";
}
if(isMethodStatic && isStaticSelector && qualifier instanceof PsiTypeElement)
{
final PsiJavaCodeReferenceElement referenceElement = PsiTreeUtil.getChildOfType(qualifier, PsiJavaCodeReferenceElement.class);
if(referenceElement != null)
{
final PsiReferenceParameterList parameterList = referenceElement.getParameterList();
if(parameterList != null && parameterList.getTypeArguments().length > 0)
{
return "Parameterized qualifier on static method reference";
}
}
}
if(isConstructor)
{
if(containingClass != null && PsiUtil.isInnerClass(containingClass) && containingClass.isPhysical())
{
PsiClass outerClass = containingClass.getContainingClass();
if(outerClass != null && !InheritanceUtil.hasEnclosingInstanceInScope(outerClass, methodRef, true, false))
{
return "An enclosing instance of type " + PsiFormatUtil.formatClass(outerClass, PsiFormatUtilBase.SHOW_NAME) + " is not in scope";
}
}
}
return null;
}
public static String checkTypeArguments(PsiTypeElement qualifier, PsiType psiType)
{
if(psiType instanceof PsiClassType)
{
final PsiJavaCodeReferenceElement referenceElement = qualifier.getInnermostComponentReferenceElement();
if(referenceElement != null)
{
PsiType[] typeParameters = referenceElement.getTypeParameters();
for(PsiType typeParameter : typeParameters)
{
if(typeParameter instanceof PsiWildcardType)
{
return "Unexpected wildcard";
}
}
}
}
return null;
}
public static String checkReturnType(PsiMethodReferenceExpression expression, JavaResolveResult result, PsiType functionalInterfaceType)
{
final Ref<String> errorMessage = Ref.create();
if(!isReturnTypeCompatible(expression, result, functionalInterfaceType, errorMessage))
{
return errorMessage.get();
}
return null;
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.sharedpasses.opti;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.truth.StringSubject;
import com.google.template.soy.error.ErrorReporter;
import com.google.template.soy.logging.LoggingFunction;
import com.google.template.soy.shared.restricted.Signature;
import com.google.template.soy.shared.restricted.SoyFunctionSignature;
import com.google.template.soy.soytree.MsgFallbackGroupNode;
import com.google.template.soy.soytree.MsgNode;
import com.google.template.soy.soytree.MsgPlaceholderNode;
import com.google.template.soy.soytree.RawTextNode;
import com.google.template.soy.soytree.SoyFileNode;
import com.google.template.soy.soytree.SoyFileSetNode;
import com.google.template.soy.soytree.SoyNode;
import com.google.template.soy.soytree.TemplateNode;
import com.google.template.soy.testing.SoyFileSetParserBuilder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class SimplifyVisitorTest {
@Test
public void testMsgBlockNodeChildrenAreNotReplaced() throws Exception {
String soyFileContent =
"{namespace boo}\n"
+ "\n"
+ "{template foo}\n"
+ "\n"
+ " {msg desc=\"\"}\n"
+ " blah\n"
+ " {'blah'}\n"
+ " blah\n"
+ " {call aaa /}\n"
+ " blah\n"
+ " <div class=\"{call aaa /}\">\n"
+ " </div>\n"
+ " blah\n"
+ " {/msg}\n"
+ "{/template}\n"
+ "\n"
+ "/***/\n"
+ "{template aaa}\n"
+ " blah\n"
+ "{/template}";
MsgNode msgNode =
((MsgFallbackGroupNode)
((TemplateNode) simplifySoyFiles(soyFileContent).getChild(0).getChild(0))
.getChild(0))
.getChild(0);
assertThat(msgNode.numChildren()).isEqualTo(8);
// The MsgPlaceholderNode children are not replaced.
assertThat(msgNode.getChild(1)).isInstanceOf(MsgPlaceholderNode.class);
assertThat(msgNode.getChild(3)).isInstanceOf(MsgPlaceholderNode.class);
assertThat(msgNode.getChild(5)).isInstanceOf(MsgPlaceholderNode.class);
assertThat(msgNode.getChild(6)).isInstanceOf(MsgPlaceholderNode.class);
// But the contents within the MsgPlaceholderNode children can be replaced.
assertThat(((MsgPlaceholderNode) msgNode.getChild(1)).getChild(0))
.isInstanceOf(RawTextNode.class);
}
@Test
public void testSimplifyPrintNode() throws Exception {
assertSimplification("{'foo'}").isEqualTo("foo");
assertSimplification("{'abcdefgh' |insertWordBreaks:5}").isEqualTo("abcde<wbr>fgh");
// Doesn't simplify PrintNode with non-constant expression (but expression is simplified).
assertSimplification("{@param boo : ?}\n" + "{1 + 3 + $boo}")
.isEqualTo("{@param boo: ?}\n{4 + $boo}");
// formatNum is not annotated as a SoyPureFunction, so it should not be simplified.
assertSimplification("{formatNum(5)}").isEqualTo("{formatNum(5)}");
// Doesn't simplify PrintNode with non-constant directive arg.
assertSimplification("{@param boo : ?}\n" + "{'0123456789' |insertWordBreaks:$boo}")
.isEqualTo("{@param boo: ?}\n{'0123456789' |insertWordBreaks:$boo}");
}
@Test
public void testSimplifyIfNode() throws Exception {
assertSimplification(
"{if not false}", " 111", "{/if}", "{if true and false}", " 222", "{/if}")
.isEqualTo("111");
assertSimplification("{if ''}", " 111", "{elseif not 1}", " 222", "{else}", " 333", "{/if}")
.isEqualTo("333");
assertSimplification(
"{if false}", " 111", "{elseif true}", " 222", "{else}", " 333", "{/if}")
.isEqualTo("222");
assertSimplification(
"{@param boo : ?}",
"{if false}",
" 111",
"{elseif $boo}",
" 222",
"{elseif true}",
" 333",
"{else}",
" 444",
"{/if}")
.isEqualTo("{@param boo: ?}\n{if $boo}222{else}333{/if}");
assertSimplification(
"{@param boo : ?}",
"{if 0}",
" 111",
"{elseif 1}",
" {if true}",
" {if $boo}",
" 222",
" {elseif ''}",
" 333",
" {elseif 'blah'}",
" 444",
" {else}",
" 555",
" {/if}",
" {else}",
" 666",
" {/if}",
"{else}",
" 777",
"{/if}")
.isEqualTo("{@param boo: ?}\n{if $boo}222{else}444{/if}");
}
@Test
public void testSimplifySwitchNode() throws Exception {
assertSimplification(
"{@param boo : ?}",
"{switch 1 + 2}",
" {case 1}111",
" {case 2, 3}222333",
" {case $boo}444",
" {default}goo",
"{/switch}")
.isEqualTo("{@param boo: ?}\n222333");
assertSimplification(
"{switch 1 + 2}", " {case 1}111", " {case 2}222", " {default}333", "{/switch}")
.isEqualTo("333");
assertSimplification(
"{@param boo : ?}",
"{switch 1 + 2}",
" {case $boo}111",
" {case 2}222",
" {case 3}333",
" {default}444",
"{/switch}")
.isEqualTo("{@param boo: ?}\n{switch 3}{case $boo}111{default}333{/switch}");
}
@Test
public void testRewriteContentNodes_let() {
assertSimplification("{let $foo kind='text'}hello{/let}{$foo}{$foo}")
.isEqualTo("{let $foo : 'hello' /}{$foo}{$foo}");
assertSimplification("{let $foo kind='text'}{xid('foo')}:{xid('bar')}{/let}{$foo}{$foo}")
.isEqualTo("{let $foo : '' + xid('foo') + ':' + xid('bar') /}{$foo}{$foo}");
}
@Test
public void testRewriteContentNodes_callParam() {
assertSimplification(
"{@param p: ?}",
"{call t}",
" {param p kind='text'}",
" hello world {$p}",
" {/param}",
"{/call}")
.isEqualTo("{@param p: ?}\n{call t}{param p: 'hello world ' + $p /}{/call}");
}
@Test
public void testCallParamWithLoggingFunctionNotRewritten() {
assertSimplification(
"<{t2()} data-ved=\"{currentVed()}\"></>",
"{/template}",
"{template t2 kind=\"html<?>\"}",
" {@attribute? data-ved: string}",
" <div @data-ved></div>")
.isEqualTo(
"{call t2}{param ssk: null /}{param dataVed kind=\"text\"}{currentVed()"
+ " |escapeHtmlAttribute}{/param}{/call}");
}
@Test
public void testCallBind() {
assertSimplification(
"{@param tpl: (a: string, b: string) => html<?>}",
"{call $tpl.bind(record(a:'anA'))}",
" {param b: 'aB' /}",
"{/call}")
.isEqualTo(
"{@param tpl: (a: string, b: string) => html<?>}\n"
+ "{call $tpl}{param a: 'anA' /}{param b: 'aB' /}{/call}");
assertSimplification(
"{@param tpl: (a: string, b: string) => html<?>}",
"{call $tpl.bind(record(a:'anA', b:'aB')) /}")
.isEqualTo(
"{@param tpl: (a: string, b: string) => html<?>}\n"
+ "{call $tpl}{param a: 'anA' /}{param b: 'aB' /}{/call}");
}
@Test
public void testInlineLets() {
assertSimplification("{@param p: ?}", "{let $a : $p /}", "{let $b : $a /}", "{$b}")
.isEqualTo(normalized("{@param p: ?}", "{$p}"));
assertNoOp("{@param p: ?}", "{let $b : $p /}{$b + $b}");
assertSimplification(
"{@param p: ?}",
"{let $b : $p + 1 /}",
"{for $i in range(10)}",
"{let $c : $i + 1 /}",
"{$b + $c}",
"{/for}")
.isEqualTo(
normalized(
"{@param p: ?}",
"{let $b : $p + 1 /}", // b doesn't move inside the loop
"{for $i in range(10)}",
"{$b + ($i + 1)}", // c does because it is designed inside the loop
"{/for}"));
}
@Test
public void testInlineLets_trivialValuesMoveInsideLoops() {
assertSimplification(
"{let $b : 1 /}", "{for $i in range(10)}", "{let $c : $i + 1 /}", "{$b + $c}", "{/for}")
.isEqualTo(normalized("{for $i in range(10)}", "{1 + ($i + 1)}", "{/for}"));
}
@Test
public void testInliningUnlocksFurtherOptimization() {
// First the two lets should get turned into letvaluenodes
// Then foo will be inlined
// Then the if we be calulated, deleting the else branch
// then bar will be inlined
// Then the print node will be eliminated.
assertSimplification(
"{let $foo kind='text'}foo{/let}",
"{let $bar kind='text'}bar{/let}",
"{if $foo}Hello {$bar}{else}Goodbye {$bar}{/if}")
.isEqualTo(normalized("Hello bar"));
}
@Test
public void testInlineIntoMsg() {
assertSimplification("{msg desc='...'}Hello {'foo' phname=\"FOO\"}{/msg}")
.isEqualTo(normalized("{msg desc=\"...\"}Hello foo{/msg}"));
assertSimplification("{let $foo kind='text'}foo{/let}", "{msg desc='...'}Hello {$foo}{/msg}")
.isEqualTo(normalized("{msg desc=\"...\"}Hello foo{/msg}"));
}
private static StringSubject assertSimplification(String... input) {
SoyFileSetNode node = parse(join(input));
SimplifyVisitor.create(
node.getNodeIdGenerator(),
ImmutableList.copyOf(node.getChildren()),
ErrorReporter.exploding())
.simplify(node.getChild(0));
return assertThat(toString(node.getChild(0).getChild(0)));
}
private static void assertNoOp(String... input) {
SoyFileSetNode node = parse(join(input));
String original = toString(node.getChild(0).getChild(0));
SimplifyVisitor.create(
node.getNodeIdGenerator(),
ImmutableList.copyOf(node.getChildren()),
ErrorReporter.exploding())
.simplify(node.getChild(0));
String rewritten = toString(node.getChild(0).getChild(0));
assertThat(rewritten).isEqualTo(original);
}
private static SoyFileSetNode parse(String input) {
return SoyFileSetParserBuilder.forFileContents(
join("{namespace ns}", "{template t}", input, "{/template}"))
.runOptimizer(false)
.addSoySourceFunction(new CurrentVedFunction())
.parse()
.fileSet();
}
private static String toString(SoyNode node) {
String string = node.toSourceString();
return string.replace("{template t}\n", "").replace("\n{/template}", "").trim();
}
private static String normalized(String... args) {
return toString(parse(join(args)).getChild(0).getChild(0));
}
private static String join(String... args) {
return Joiner.on('\n').join(args);
}
private SoyFileSetNode simplifySoyFiles(String... soyFileContents) throws Exception {
SoyFileSetNode fileSet =
SoyFileSetParserBuilder.forFileContents(soyFileContents).parse().fileSet();
SimplifyVisitor simplifyVisitor =
SimplifyVisitor.create(
fileSet.getNodeIdGenerator(),
ImmutableList.copyOf(fileSet.getChildren()),
ErrorReporter.exploding());
for (SoyFileNode file : fileSet.getChildren()) {
simplifyVisitor.simplify(file);
}
return fileSet;
}
@SoyFunctionSignature(name = "currentVed", value = @Signature(returnType = "string"))
private static final class CurrentVedFunction implements LoggingFunction {
@Override
public String getPlaceholder() {
return "";
}
}
}
| |
/*
* All rights reserved. (C) Copyright 2009, Trinity College Dublin
*/
package com.mind_era.knime.common.util.swing.colour;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.annotation.CheckReturnValue;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JColorChooser;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JSpinner;
import javax.swing.JSpinner.NumberEditor;
import javax.swing.SpinnerNumberModel;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.knime.core.util.Pair;
import com.mind_era.knime.common.util.interval.Interval;
import com.mind_era.knime.common.util.interval.Interval.DefaultInterval;
import com.mind_era.knime.common.util.swing.colour.ComplexLegend.ComplexSample;
import com.mind_era.knime.common.view.StatTypes;
/**
* A slightly complex control to handle {@link ComplexModel}
* {@link ColourComputer}s.
*
* @author <a href="mailto:bakosg@tcd.ie">Gabor Bakos</a>
*/
@Nonnull
@CheckReturnValue
public class ComplexControl extends JButton implements
ColourControl<ComplexModel> {
private static final long serialVersionUID = 8142060029147780548L;
/** The no (visible) text label. */
private static final String NO_TEXT = "\u00a0";
private ComplexModel model;
private final ComplexLegend sample = new ComplexLegend();
private final JCheckBox connectButton;
private static final class ComplexColourPanel extends JPanel implements
ActionListener {
private static final long serialVersionUID = 7049084436247735940L;
/**
* The column number for the spinners.
*/
private static final int SPINNER_COLUMNS = 7;
private ComplexModel mod;
private final List<Spinners<?>> spinners = new ArrayList<Spinners<?>>();
private final ComplexControl parent;
private static interface Spinners<Colours> {
/** @return The low values {@link JSpinner} */
public JSpinner getLowSpinner();
/** @return The high values {@link JSpinner} */
public JSpinner getHighSpinner();
/** @return The actual state of the implementation. */
public Pair<Interval<Double>, Colours> getState();
}
private static class ColourButtonAction extends AbstractAction {
private static final long serialVersionUID = -1459471211823151265L;
private final SpinnersCommon parent;
private final JButton button;
private final Positions pos;
/**
* @param parent
* The parent component.
* @param button
* The button to listen. (It's background colour works as
* a model.)
* @param pos
* The position of the {@code button}.
*/
public ColourButtonAction(final SpinnersCommon parent,
final JButton button, final Positions pos) {
super("\u2588\u2588\u2588\u2588");
this.parent = parent;
this.button = button;
this.pos = pos;
}
@Override
public void actionPerformed(final ActionEvent e) {
final Color newColour = JColorChooser.showDialog(parent,
"Select a color", button.getBackground());
if (newColour != null) {
parent.setColour(pos, newColour);
parent.fireModelChange();
}
}
}
private static class DiscreteControl extends SpinnersCommon implements
Spinners<Color> {
private static final long serialVersionUID = 4240875805367342933L;
private final JButton colourButton;
/**
* @param key
* initial values
* @param colour
* initial colour
*/
public DiscreteControl(final Interval<Double> key,
final Color colour) {
super(key);
final GridBagLayout gbl = new GridBagLayout();
setLayout(gbl);
final GridBagConstraints highConstraints = new GridBagConstraints();
highConstraints.gridx = 0;
highConstraints.gridy = 0;
highConstraints.fill = GridBagConstraints.HORIZONTAL;
gbl.setConstraints(getHighSpinner(), highConstraints);
add(getHighSpinner(), highConstraints);
final GridBagConstraints colourConstraint = new GridBagConstraints();
colourConstraint.gridheight = 2;
colourConstraint.gridx = 1;
colourConstraint.gridy = 0;
colourConstraint.fill = GridBagConstraints.HORIZONTAL;
colourButton = new JButton();
colourButton.setBackground(colour);
colourButton.setForeground(colour);
gbl.setConstraints(colourButton, colourConstraint);
colourButton.setAction(new ColourButtonAction(this,
colourButton, Positions.Middle));
add(colourButton, colourConstraint);
final GridBagConstraints lowSpinnerConstraint = new GridBagConstraints();
lowSpinnerConstraint.gridx = 0;
lowSpinnerConstraint.gridy = 1;
lowSpinnerConstraint.fill = GridBagConstraints.HORIZONTAL;
add(getLowSpinner(), lowSpinnerConstraint);
}
@Override
public Pair<Interval<Double>, Color> getState() {
return new Pair<Interval<Double>, Color>(
new DefaultInterval<Double>((Double) getLowSpinner()
.getValue(), (Double) getHighSpinner()
.getValue(), true, false), colourButton
.getBackground());
}
@Override
protected void setColour(final Positions pos, final Color newColour) {
colourButton.setBackground(newColour);
colourButton.setForeground(newColour);
}
}
private class ContinuousControl extends SpinnersCommon implements
Spinners<Pair<Color, Color>> {
private static final long serialVersionUID = -5070099376158524580L;
private final JButton highColourButton;
private final JButton lowColourButton;
/**
* @param key
* initial values
* @param lowColour
* initial low colour
* @param highColour
* initial high colour
*/
public ContinuousControl(final Interval<Double> key,
final Color lowColour, final Color highColour) {
super(key);
setLayout(new GridBagLayout());
add(getHighSpinner(), new GridBagConstraints());
highColourButton = new JButton();
highColourButton.setBackground(highColour);
highColourButton.setForeground(highColour);
final GridBagConstraints hbcc = new GridBagConstraints();
hbcc.gridx = 1;
hbcc.gridy = 0;
add(highColourButton, hbcc);
highColourButton.setAction(new ColourButtonAction(this,
highColourButton, Positions.Up));
final GridBagConstraints lsc = new GridBagConstraints();
lsc.gridx = 0;
lsc.gridy = 1;
add(getLowSpinner(), lsc);
lowColourButton = new JButton();
lowColourButton.setBackground(lowColour);
lowColourButton.setForeground(lowColour);
lowColourButton.setAction(new ColourButtonAction(this,
lowColourButton, Positions.Down));
final GridBagConstraints lbcc = new GridBagConstraints();
lbcc.gridx = 1;
lbcc.gridy = 1;
add(lowColourButton, lbcc);
}
@Override
public Pair<Interval<Double>, Pair<Color, Color>> getState() {
return new Pair<Interval<Double>, Pair<Color, Color>>(
new DefaultInterval<Double>((Double) getLowSpinner()
.getValue(), (Double) getHighSpinner()
.getValue(), true, false),
new Pair<Color, Color>(lowColourButton.getBackground(),
highColourButton.getBackground()));
}
@Override
protected void setColour(final Positions pos, final Color newColour) {
switch (pos) {
case Down:
lowColourButton.setBackground(newColour);
lowColourButton.setForeground(newColour);
break;
case Up:
highColourButton.setBackground(newColour);
highColourButton.setForeground(newColour);
break;
default:
break;
}
}
}
private static abstract class SpinnersCommon extends ListenablePanel
implements ActionListener {
private static final long serialVersionUID = 6896425032410426881L;
private final JSpinner lowSpinner;
private final JSpinner highSpinner;
/**
* Commons constructor to the {@link Spinners} implementations.
*
* @param key
* The initial {@link Interval}.
*/
protected SpinnersCommon(final Interval<Double> key) {
super();
lowSpinner = new JSpinner(new SpinnerNumberModel(key.getLow()
.doubleValue(), Double.NEGATIVE_INFINITY,
Double.POSITIVE_INFINITY, .1));
highSpinner = new JSpinner(new SpinnerNumberModel(key.getHigh()
.doubleValue(), Double.NEGATIVE_INFINITY,
Double.POSITIVE_INFINITY, .1));
((NumberEditor) lowSpinner.getEditor()).getTextField()
.setColumns(ComplexColourPanel.SPINNER_COLUMNS);
((NumberEditor) highSpinner.getEditor()).getTextField()
.setColumns(ComplexColourPanel.SPINNER_COLUMNS);
lowSpinner.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
if (((Double) lowSpinner.getValue())
.compareTo((Double) highSpinner.getValue()) > 0) {
lowSpinner.setValue(highSpinner.getValue());
}
fireModelChange();
}
});
highSpinner.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
if (((Double) lowSpinner.getValue())
.compareTo((Double) highSpinner.getValue()) > 0) {
highSpinner.setValue(lowSpinner.getValue());
}
fireModelChange();
}
});
}
/**
* Changes the colour value at {@code pos}.
*
* @param pos
* The position where the {@code newColour} belongs to.
* @param newColour
* The new {@link Color} at that positon.
*/
protected abstract void setColour(Positions pos, Color newColour);
/**
* @return the low spinner
* @see Spinners#getLowSpinner()
*/
public final JSpinner getLowSpinner() {
return lowSpinner;
}
/**
* @return the high spinner
* @see Spinners#getHighSpinner()
*/
public final JSpinner getHighSpinner() {
return highSpinner;
}
@Override
public void actionPerformed(final ActionEvent e) {
fireModelChange();
}
}
/**
* @param parent
* The parent component.
* @param model
* initial model
*/
public ComplexColourPanel(final ComplexControl parent,
final ComplexModel model) {
super();
this.parent = parent;
setModel(model);
}
/**
* @param spinner
* The actual {@link Spinners}.
* @param pos
* The position to add new control.
* @param isDiscrete
* The type of control should be {@link DiscreteControl} (
* {@code true}), or {@link ContinuousControl} ({@code false}
* ).
* @return The add action for that position and type.
*/
public Action createAddAction(final Spinners<?> spinner,
final Positions pos, final boolean isDiscrete) {
return new AbstractAction(isDiscrete ? "single colour"
: "linear gradient") {
private static final long serialVersionUID = -5090340712099813802L;
@Override
public void actionPerformed(final ActionEvent e) {
final SortedMap<Interval<Double>, Color> discretes = new TreeMap<Interval<Double>, Color>(
mod.getDiscretes());
final SortedMap<Interval<Double>, Pair<Color, Color>> continuouses = new TreeMap<Interval<Double>, Pair<Color, Color>>(
mod.getContinuouses());
final Pair<Interval<Double>, ?> state = spinner.getState();
if (isDiscrete) {
switch (pos) {
case Up:
discretes.put(new DefaultInterval<Double>(state
.getFirst().getHigh(), state.getFirst()
.getHigh(), true, false), Color.BLACK);
break;
case Down:
discretes.put(new DefaultInterval<Double>(state
.getFirst().getLow(), state.getFirst()
.getLow(), true, false), Color.BLACK);
break;
default:
break;
}
} else {
switch (pos) {
case Up:
continuouses.put(new DefaultInterval<Double>(state
.getFirst().getHigh(), state.getFirst()
.getHigh(), true, false),
new Pair<Color, Color>(Color.WHITE,
Color.BLACK));
break;
case Down:
continuouses.put(new DefaultInterval<Double>(state
.getFirst().getLow(), state.getFirst()
.getLow(), true, false),
new Pair<Color, Color>(Color.WHITE,
Color.BLACK));
break;
default:
break;
}
}
setModel(new ComplexModel(continuouses, discretes));
}
};
}
/**
* Updates this {@link ComplexControl} with the new model.
*
* @param model
* The new {@link ComplexModel}.
*/
private void setModel(final ComplexModel model) {
mod = model;
removeAll();
spinners.clear();
setLayout(new GridLayout(1, 2));
final ComplexSample samp = ComplexSample.create(true);
samp.setModel(mod);
add(samp);
final JPanel panel = new JPanel(new GridLayout(0, 1));
final NavigableMap<Interval<Double>, Object> union = new TreeMap<Interval<Double>, Object>(
mod.getDiscretes());
union.putAll(mod.getContinuouses());
for (final Entry<Interval<Double>, ?> entry : union.descendingMap()
.entrySet()) {
final Spinners<?> spinner;
if (entry.getValue() instanceof Pair<?, ?>) {
final Pair<?, ?> pair = (Pair<?, ?>) entry.getValue();
if (pair.getFirst() instanceof Color
&& pair.getSecond() instanceof Color) {
@SuppressWarnings("unchecked")
final Pair<Color, Color> colours = (Pair<Color, Color>) pair;
spinner = new ContinuousControl(entry.getKey(), colours
.getFirst(), colours.getSecond());
} else {
throw new IllegalStateException(
"Only colours are supported: "
+ pair.getFirst().getClass() + " "
+ pair.getSecond().getClass());
}
} else if (entry.getValue() instanceof Color) {
final Color colour = (Color) entry.getValue();
spinner = new DiscreteControl(entry.getKey(), colour);
} else {
throw new IllegalStateException(
"Only gradient and single colours are supported."
+ entry.getValue().getClass());
}
spinners.add(spinner);
final JPopupMenu popup = createPopupMenu(spinner);
if (spinner instanceof ListenablePanel) {
final ListenablePanel p = (ListenablePanel) spinner;
panel.add(p);
p.addActionListener(this);
p.setComponentPopupMenu(popup);
} else {
assert false;
}
}
add(panel);
setPreferredSize(new Dimension(300, union.size() * 70));
connectControls(parent.connectButton.isSelected());
revalidate();
repaint();
}
/**
* @param spinner
* A {@link DiscreteControl} of a {@link ContinuousControl}.
* @return The popup menu belonging to the {@code spinner}.
*/
private JPopupMenu createPopupMenu(final Spinners<?> spinner) {
final JPopupMenu popup = new JPopupMenu("Add/Remove");
popup.add(new JMenuItem(new AbstractAction("Remove") {
private static final long serialVersionUID = -6291744466377161856L;
@Override
public void actionPerformed(final ActionEvent e) {
final SortedMap<Interval<Double>, Pair<Color, Color>> continuouses = new TreeMap<Interval<Double>, Pair<Color, Color>>(
mod.getContinuouses());
final SortedMap<Interval<Double>, Color> discretes = new TreeMap<Interval<Double>, Color>(
mod.getDiscretes());
final Pair<Interval<Double>, ?> state = spinner.getState();
final Color discreteColour = discretes.get(state.getFirst());
if (discreteColour != null
&& discreteColour.equals(state.getSecond())) {
discretes.remove(state.getFirst());
} else {
final Pair<Color, Color> continuousColours = continuouses
.get(state.getFirst());
if (continuousColours != null
&& continuousColours.equals(state.getSecond())) {
continuouses.remove(state.getFirst());
} else {
assert false;
}
}
setModel(new ComplexModel(continuouses, discretes));
}
}));
final JMenu above = new JMenu("Add above");
popup.add(above);
above.add(new JMenuItem(
createAddAction(spinner, Positions.Up, true)));
above.add(new JMenuItem(createAddAction(spinner, Positions.Up,
false)));
final JMenu below = new JMenu("Add below");
popup.add(below);
below.add(new JMenuItem(createAddAction(spinner, Positions.Down,
true)));
below.add(new JMenuItem(createAddAction(spinner, Positions.Down,
false)));
return popup;
}
private static interface NeighbourChangeListener extends ChangeListener {
}
/**
* @param connect
* Connects the neighbour controls if possible.
*/
public void connectControls(final boolean connect) {
if (connect) {
Spinners<?> last = null;
for (final Spinners<?> spinner : spinners) {
if (last != null) {
final Spinners<?> prev = last;
prev.getLowSpinner().addChangeListener(
new NeighbourChangeListener() {
public void stateChanged(final ChangeEvent e) {
if (!prev
.getLowSpinner()
.getValue()
.equals(
spinner
.getHighSpinner()
.getValue())) {
spinner.getHighSpinner().setValue(
prev.getLowSpinner()
.getValue());
}
}
});
spinner.getHighSpinner().addChangeListener(
new NeighbourChangeListener() {
public void stateChanged(final ChangeEvent e) {
if (!prev
.getLowSpinner()
.getValue()
.equals(
spinner
.getHighSpinner()
.getValue())) {
prev.getLowSpinner().setValue(
spinner.getHighSpinner()
.getValue());
}
}
});
}
last = spinner;
}
} else {
for (final Spinners<?> spinner : spinners) {
for (final ChangeListener listener : spinner
.getLowSpinner().getChangeListeners().clone()) {
if (listener instanceof NeighbourChangeListener) {
spinner.getLowSpinner().removeChangeListener(
listener);
}
}
for (final ChangeListener listener : spinner
.getHighSpinner().getChangeListeners().clone()) {
if (listener instanceof NeighbourChangeListener) {
spinner.getHighSpinner().removeChangeListener(
listener);
}
}
}
}
}
@Override
public void actionPerformed(final ActionEvent e) {
final NavigableMap<Interval<Double>, Color> discretes = new TreeMap<Interval<Double>, Color>();
final NavigableMap<Interval<Double>, Pair<Color, Color>> continuouses = new TreeMap<Interval<Double>, Pair<Color, Color>>();
for (final Spinners<?> spinner : spinners) {
final Pair<Interval<Double>, ?> state = spinner.getState();
if (state.getSecond() instanceof Color) {
final Color col = (Color) state.getSecond();
discretes.put(state.getFirst(), col);
}
if (state.getSecond() instanceof Pair<?, ?>) {
@SuppressWarnings("unchecked")
final Pair<Color, Color> pair = (Pair<Color, Color>) state
.getSecond();
continuouses.put(state.getFirst(), pair);
}
}
setModel(new ComplexModel(continuouses, discretes));
}
}
/**
* Constructs a control with the default settings.
*
* @param parameter
* The associated parameter.
* @param stat
* The associated statistics type.
*/
public ComplexControl(final String parameter, @Nullable final StatTypes stat) {
this(new ComplexModelFactory().getDefaultModel(), parameter, stat);
}
/**
* Constructs a control with {@code model} initial positions.
*
* @param model
* The initial model.
* @param parameter
* The associated parameter.
* @param stat
* The associated statistics type.
*/
public ComplexControl(final ComplexModel model, final String parameter,
@Nullable final StatTypes stat) {
super();
connectButton = new JCheckBox();
final Action complexAction = new AbstractAction(ComplexControl.NO_TEXT) {
private static final long serialVersionUID = -5519318869307880427L;
@Override
public void actionPerformed(final ActionEvent e) {
if (ComplexControl.NO_TEXT.equals(e.getActionCommand())) {
final ComplexColourPanel ccpanel = new ComplexColourPanel(
ComplexControl.this, getColourModel());
final JDialog dialog = new JDialog(JOptionPane
.getRootFrame(), "Adjust colour ranges ("
+ parameter + (stat == null ? "" : ", " + stat)
+ ")", true);
final JComponent optionPane = new JPanel(new BorderLayout());
optionPane.add(ccpanel, BorderLayout.CENTER);
final JPanel buttonPanel = new JPanel();
buttonPanel.add(new JButton(new AbstractAction("OK") {
private static final long serialVersionUID = 4629170869596602196L;
@Override
public void actionPerformed(final ActionEvent e) {
ComplexControl.this.setModel(ccpanel.mod);
ComplexControl.this.fireActionPerformed(e);
dialog.dispose();
}
}));
buttonPanel.add(new JButton(new AbstractAction("Cancel") {
private static final long serialVersionUID = -3808194145044700722L;
@Override
public void actionPerformed(final ActionEvent e) {
dialog.dispose();
}
}));
final AbstractAction connectAction = new AbstractAction(
"Connect neighbours") {
private static final long serialVersionUID = 361372379394476430L;
@Override
public void actionPerformed(final ActionEvent e) {
final boolean connect = connectButton.isSelected();
ccpanel.connectControls(connect);
}
};
connectButton.setAction(connectAction);
if (!connectButton.isSelected()) {
connectButton.doClick();
}
buttonPanel.add(connectButton);
optionPane.add(buttonPanel, BorderLayout.SOUTH);
dialog.setContentPane(optionPane);
dialog.setPreferredSize(new Dimension(ccpanel
.getPreferredSize().width + 10, ccpanel
.getPreferredSize().height + 30));
dialog.setSize(new Dimension(
ccpanel.getPreferredSize().width + 10, ccpanel
.getPreferredSize().height + 70));
dialog.setVisible(true);
}
}
};
setAction(complexAction);
setText(ComplexControl.NO_TEXT);
setPreferredSize(new Dimension(30, 70));
final GridBagLayout gbl = new GridBagLayout();
setLayout(gbl);
setModel(model);
final GridBagConstraints sampleConstraint = new GridBagConstraints();
sampleConstraint.fill = GridBagConstraints.VERTICAL;
sample.setPreferredSize(new Dimension(70, 70));
sample.setMinimumSize(new Dimension(60, 50));
add(sample, sampleConstraint);
}
@Override
public void setModel(final ComplexModel model) {
this.model = model;
update();
fireActionPerformed(new ActionEvent(this, (int) (System
.currentTimeMillis() & 0xffffffff), "modelChanged"));
}
private void update() {
sample.setModel(model, Orientation.East);
sample.repaint();
}
@Override
public ComplexModel getColourModel() {
return model;
}
}
| |
/*
* Copyright (C) 2018 The DNA Authors
* This file is part of The DNA library.
*
* The DNA is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The DNA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with The DNA. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.github.DNAProject.network.websocket;
import com.alibaba.fastjson.JSON;
import com.github.DNAProject.core.block.Block;
import com.github.DNAProject.core.transaction.Transaction;
import com.github.DNAProject.network.connect.AbstractConnector;
import com.github.DNAProject.network.exception.ConnectorException;
import okhttp3.*;
import java.io.IOException;
import java.util.*;
/**
*
*/
public class WebsocketClient extends AbstractConnector {
private WebSocket mWebSocket = null;
private Object lock;
private boolean logFlag;
private long reqId = 0;
public static String wsUrl = "";
private WebsocketClient wsClient = null;
public WebsocketClient(String url,Object lock) {
wsUrl = url;
this.lock = lock;
wsClient = this;
}
public void setLog(boolean b) {
logFlag = b;
}
public void startWebsocketThread(boolean log) {
this.logFlag = log;
Thread thread = new Thread(
new Runnable() {
@Override
public void run() {
wsClient.wsStart();
}
});
thread.start();
}
@Override
public String getUrl(){
return wsUrl;
}
public void sendHeartBeat() {
Map map = new HashMap<>();
map.put("Action", "heartbeat");
map.put("Version", "V1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
}
public void sendSubscribe(Map map) {
map.put("Action", "subscribe");
map.put("Version", "V1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
}
public void send(Map map) {
mWebSocket.send(JSON.toJSONString(map));
}
public void setReqId(long reqId){
this.reqId = reqId;
}
private long generateReqId(){
if(reqId == 0) {
return new Random().nextInt() & Integer.MAX_VALUE;
}
return reqId;
}
@Override
public Object sendRawTransaction(boolean preExec,String userid,String hexData) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "sendrawtransaction");
map.put("Version", "1.0.0");
map.put("Data", hexData);
map.put("Id", generateReqId());
if(preExec){
map.put("PreExec", "1");
}
mWebSocket.send(JSON.toJSONString(map));
if(preExec){
return "0";
}
return "";
}
@Override
public Object sendRawTransaction(String hexData) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "sendrawtransaction");
map.put("Version", "1.0.0");
map.put("Data", hexData);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public Transaction getRawTransaction(String txhash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "gettransaction");
map.put("Version", "1.0.0");
map.put("Hash", txhash);
map.put("Raw", "1");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Object getRawTransactionJson(String txhash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "gettransaction");
map.put("Version", "1.0.0");
map.put("Hash", txhash);
map.put("Raw", "0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public int getNodeCount() throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getconnectioncount");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return 0;
}
@Override
public int getBlockHeight() throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockheight");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return 0;
}
@Override
public Block getBlock(int height) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockbyheight");
map.put("Version", "1.0.0");
map.put("Height",height);
map.put("Raw","1");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Block getBlock(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockbyhash");
map.put("Version", "1.0.0");
map.put("Hash",hash);
map.put("Raw","1");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Block getBlockJson(int height) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockbyheight");
map.put("Version", "1.0.0");
map.put("Height",height);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Block getBlockJson(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockbyhash");
map.put("Version", "1.0.0");
map.put("Hash",hash);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Object getBalance(String address) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getbalance");
map.put("Version", "1.0.0");
map.put("Addr",address);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return null;
}
@Override
public Object getContract(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getcontract");
map.put("Version", "1.0.0");
map.put("Raw","1");
map.put("Hash", hash);
map.put("Id", generateReqId());
return mWebSocket.send(JSON.toJSONString(map));
}
@Override
public Object getContractJson(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getcontract");
map.put("Version", "1.0.0");
map.put("Raw","0");
map.put("Hash", hash);
map.put("Id", generateReqId());
return mWebSocket.send(JSON.toJSONString(map));
}
@Override
public Object getSmartCodeEvent(int height) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getsmartcodeeventbyheight");
map.put("Version", "1.0.0");
map.put("Height", height);
map.put("Id", generateReqId());
return mWebSocket.send(JSON.toJSONString(map));
}
@Override
public Object getSmartCodeEvent(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getsmartcodeeventbyhash");
map.put("Version", "1.0.0");
map.put("Hash", hash);
return mWebSocket.send(JSON.toJSONString(map));
}
@Override
public int getBlockHeightByTxHash(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getblockheightbytxhash");
map.put("Version", "1.0.0");
map.put("Hash", hash);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return 0;
}
@Override
public String getStorage(String codehash, String key) throws ConnectorException, IOException {
Map map = new HashMap<>();
map.put("Action", "getstorage");
map.put("Version", "1.0.0");
map.put("Hash", codehash);
map.put("Key", key);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public Object getMerkleProof(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getmerkleproof");
map.put("Version", "1.0.0");
map.put("Hash", hash);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public String getAllowance(String asset,String from,String to) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getallowance");
map.put("Version", "1.0.0");
map.put("Asset", asset);
map.put("From", from);
map.put("To", to);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public Object getMemPoolTxCount() throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getmempooltxcount");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public Object getMemPoolTxState(String hash) throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getmempooltxstate");
map.put("Version", "1.0.0");
map.put("Hash", hash);
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public String getVersion() throws ConnectorException, IOException{
Map map = new HashMap<>();
map.put("Action", "getversion");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public String getGrantOng(String address) throws ConnectorException, IOException {
Map map = new HashMap<>();
map.put("Action", "getgrantong");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
map.put("Addr", address);
mWebSocket.send(JSON.toJSONString(map));
return "";
}
@Override
public int getNetworkId() throws ConnectorException, IOException {
Map map = new HashMap<>();
map.put("Action", "getnetworkid");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return 0;
}
@Override
public Object getNodeSyncStatus() throws ConnectorException, IOException {
Map map = new HashMap<>();
map.put("Action", "getsyncstatus");
map.put("Version", "1.0.0");
map.put("Id", generateReqId());
mWebSocket.send(JSON.toJSONString(map));
return 0;
}
@Override
public String getSideChainData(String sideChainID) throws ConnectorException, IOException {
return null;
}
public void wsStart() {
//request = new Request.Builder().url(WS_URL).build();
String httpUrl = null;
if (wsUrl.contains("wss")) {
httpUrl = "https://" + wsUrl.split("://")[1];
} else {
httpUrl = "http://" + wsUrl.split("://")[1];
}
Request request = new Request.Builder().url(wsUrl).addHeader("Origin", httpUrl).build();
OkHttpClient mClient = new OkHttpClient.Builder().build();
mWebSocket = mClient.newWebSocket(request, new WebSocketListener() {
@Override
public void onOpen(WebSocket webSocket, Response response) {
System.out.println("opened websocket connection");
sendHeartBeat();
new Timer().schedule(new TimerTask() {
@Override
public void run() {
sendHeartBeat();
}
}, 1000, 30000);
}
@Override
public void onMessage(WebSocket webSocket, String s) {
if (logFlag) {
System.out.println("websoket onMessage:" + s);
}
Result result = JSON.parseObject(s, Result.class);
try {
synchronized (lock) {
MsgQueue.addResult(result);
lock.notify();
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void onClosing(WebSocket webSocket, int code, String reason) {
System.out.println(reason);
}
@Override
public void onClosed(WebSocket webSocket, int code, String reason) {
System.out.println("close:" + reason);
}
@Override
public void onFailure(WebSocket webSocket, Throwable t, Response response) {
System.out.println("onFailure:" + response);
wsStart();
}
});
}
}
| |
package CSTime;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
/*
Copyright 2017 Tarek Mohamed
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Created by tarek on 5/26/17.
*/
public class DateTime implements Comparable<DateTime>, Comparator<DateTime>, Cloneable {
public final static String DEFAULT_DATE_SHORT_FORMAT = "dd/MM/yyyy";
public final static String DEFAULT_DATE_LONG_FORMAT = "dd '%s of %s' yyyy";
public final static String DEFAULT_DATE_TIME_12_HOUR = "dd/MM/yyyy hh:mm:ss,sss a";
public final static String DEFAULT_DATE_TIME_12_HOUR_NO_MILLIS = "dd/MM/yyyy hh:mm:ss a";
public final static String DEFAULT_DATE_TIME_24_HOUR = "dd/MM/yyyy HH:mm:ss,sss";
public final static String DEFAULT_DATE_TIME_24_HOUR_NO_MILLIS = "dd/MM/yyyy HH:mm:ss";
public final static String DEFAULT_TIME_12_HOUR = "hh:mm:ss,sss a";
public final static String DEFAULT_TIME_12_HOUR_NO_MILLIS = "hh:mm:ss a";
public final static String DEFAULT_TIME_24_HOUR = "HH:mm:ss,sss";
public final static String DEFAULT_TIME_24_HOUR_NO_MILLIS = "HH:mm:ss";
public final static String DEFAULT_DATE = "dd/MM/yyyy";
public final static String DEFAULT_DATE_REVERSE = "yyyy/MM/dd";
private Calendar calendar;
String[] MONTH_NAME = new String[]{
"January"
, "February"
, "March"
, "April"
, "May"
, "June"
, "July"
, "August"
, "September"
, "October"
, "November"
, "December"
};
String[] DAY_NAME = new String[]{
"Saturday"
, "Sunday"
, "Monday"
, "Tuesday"
, "Wednesday"
, "Thursday"
, "Friday"
};
enum CompareDateBy {
Years,
Months,
Days,
Hours,
Minutes,
Seconds,
Milliseconds,
Date,
Time
}
/**
* Constructor for the DateTime class
* @param date the date and time in Date class instance
*/
public DateTime(Date date){
calendar = Calendar.getInstance();
calendar.setTime(date);
}
/**
* Constructor for the DateTime class
*
* @param dateString the date and time String format
* @param format The DateTime String format of the dateString
*/
public DateTime(String dateString, String format) {
calendar = Calendar.getInstance();
try {
calendar.setTime(new SimpleDateFormat(format).parse(dateString));
} catch (ParseException e) {
e.printStackTrace();
}
}
/**
* Constructor for the DateTime class
* @param milliseconds The total date and time in milliseconds
*/
public DateTime(long milliseconds){
calendar = Calendar.getInstance();
calendar.setTimeInMillis(milliseconds);
}
/**
* Constructor for the DateTime class
*
* @param calendar The calendar object to be used.
*/
public DateTime(Calendar calendar) {
this.calendar = calendar;
}
/**
* Constructor for the DateTime class
* @param years The years of the date
* @param months The months of the date
* @param daysOfMonth the day in month of the date
*/
public DateTime(int years, int months, int daysOfMonth){
this.calendar = Calendar.getInstance();
this.calendar.set(Calendar.YEAR, years);
this.calendar.set(Calendar.MONTH, months - 1);
this.calendar.set(Calendar.DAY_OF_MONTH, daysOfMonth);
this.calendar.set(Calendar.HOUR_OF_DAY, 0);
this.calendar.set(Calendar.MINUTE, 0);
this.calendar.set(Calendar.SECOND, 0);
this.calendar.set(Calendar.MILLISECOND, 0);
}
/**
* Constructor for the DateTime class
* @param years The years of the date
* @param months The months of the date
* @param daysOfMonth the day in month of the date
* @param hours24 the hours in 24-hour system time
* @param minutes the minutes of the time
* @param seconds the current seconds of the desired time
*/
public DateTime(int years, int months, int daysOfMonth, int hours24, int minutes, int seconds){
this.calendar = Calendar.getInstance();
this.calendar.set(Calendar.YEAR, years);
this.calendar.set(Calendar.MONTH, months - 1);
this.calendar.set(Calendar.DAY_OF_MONTH, daysOfMonth);
this.calendar.set(Calendar.HOUR_OF_DAY, hours24);
this.calendar.set(Calendar.MINUTE, minutes);
this.calendar.set(Calendar.SECOND, seconds);
this.calendar.set(Calendar.MILLISECOND, 0);
}
/**
* Constructor for the DateTime class
* @param years The years of the date
* @param months The months of the date
* @param daysOfMonth the day in month of the date
* @param hours24 the hours in 24-hour system time
* @param minutes the minutes of the time
* @param seconds the current seconds of the desired time
* @param milliseconds the current milliseconds of the desired time
*/
public DateTime(int years, int months, int daysOfMonth, int hours24, int minutes, int seconds, int milliseconds){
this.calendar = Calendar.getInstance();
this.calendar.set(Calendar.YEAR, years);
this.calendar.set(Calendar.MONTH, months - 1);
this.calendar.set(Calendar.DAY_OF_MONTH, daysOfMonth);
this.calendar.set(Calendar.HOUR_OF_DAY, hours24);
this.calendar.set(Calendar.MINUTE, minutes);
this.calendar.set(Calendar.SECOND, seconds);
this.calendar.set(Calendar.MILLISECOND, milliseconds);
}
/**
* A method that returns new instance of DateTime class
* @return A new DateTime instance with the date and time of now
*/
public static DateTime getNow(){
return new DateTime(new Date());
}
/**
*
* @return the Date class instance
*/
public Calendar getCalendar(){
return ((Calendar)calendar.clone());
}
/**
*
* @return the total milliseconds of the DateTime class
*/
public Long getTimeInMilliseconds(){
return calendar.getTimeInMillis();
}
/**
*
* @return A numeric representation of the current year's number in the DateTime instance
*/
public Integer getYears(){
return calendar.get(Calendar.YEAR);
}
/**
*
* @return A numeric representation of the current month's number in the DateTime instance
*/
public Integer getMonthNumeric(){
return calendar.get(Calendar.MONTH) + 1;
}
/**
*
* @return A String representation of the current month's name in the DateTime instance
*/
public String getMonthName(){
return MONTH_NAME[calendar.get(Calendar.MONTH)];
}
/**
*
* @return A numeric representation of the current day's number in the year's days in the DateTime instance
*/
public Integer getDayOfMonth(){
return calendar.get(Calendar.DAY_OF_MONTH);
}
/**
*
* @return A numeric representation of the current day's number in the year's days in the DateTime instance
*/
public Integer getMaxDayNumberOfYear(){
return calendar.getActualMaximum(Calendar.DAY_OF_YEAR);
}
/**
*
* @return A numeric representation of the current day's number in the year's days in the DateTime instance
*/
public Integer getDayOfYear(){
return calendar.get(Calendar.DAY_OF_YEAR);
}
/**
*
* @return A numeric representation of the current weeks's day number in the DateTime instance
*/
public Integer getDayOfWeekNumeric(){
return calendar.get(Calendar.DAY_OF_WEEK) + 1;
}
/**
*
* @return A string representation of the current week's day name in the DateTime instance
*/
public String getDayOfWeekName(){
return DAY_NAME[calendar.get(Calendar.DAY_OF_WEEK)];
}
/**
*
* @return A numeric representation of the current hours in 24-hour format in the DateTime instance
*/
public Integer getHours24(){
return calendar.get(Calendar.HOUR_OF_DAY);
}
/**
*
* @return A numeric representation of the current hours in 12-hour format in the DateTime instance
*/
public Integer getHours12(){
return calendar.get(Calendar.HOUR);
}
/**
*
* @return A string representation of the current AM/PM in the DateTime instance
*/
public String getAM_PM(){
return new SimpleDateFormat("a").format(calendar.getTime());
}
/**
*
* @return A numeric representation of the current minutes in the DateTime instance
*/
public Integer getMinutes(){
return calendar.get(Calendar.MINUTE);
}
/**
*
* @return A numeric representation of the current seconds in the DateTime instance
*/
public Integer getSeconds(){
return calendar.get(Calendar.SECOND);
}
/**
*
* @return A numeric representation of the current milliseconds in the DateTime instance
*/
public Integer getMilliseconds(){
return calendar.get(Calendar.MILLISECOND);
}
/**
* A method used to get a formatted String of the time
* @param hasMillis should the output string contain milliseconds?
* @param is24Hours should the output string contain time in 24-hour format?
* @return a string representation of the time
*/
public String getTime(boolean hasMillis, boolean is24Hours){
if (hasMillis && is24Hours) return new SimpleDateFormat(DEFAULT_TIME_24_HOUR).format(calendar.getTime());
else if (!hasMillis && is24Hours) return new SimpleDateFormat(DEFAULT_TIME_24_HOUR_NO_MILLIS).format(calendar.getTime());
else if (hasMillis && !is24Hours) return new SimpleDateFormat(DEFAULT_TIME_12_HOUR).format(calendar.getTime());
else return new SimpleDateFormat(DEFAULT_TIME_12_HOUR_NO_MILLIS).format(calendar.getTime());
}
/**
*
* @return A string representing date in day/month/year format
*/
public String getShortDate() {
return new SimpleDateFormat(DEFAULT_DATE_SHORT_FORMAT).format(calendar.getTime());
}
/**
*
* @return A string representing long date format
*/
public String getLongDate(){
String post = "th";
switch (getDayOfMonth()){
case 1 :
post = "st";
break;
case 2 :
post = "nd";
break;
case 3 :
post = "rd";
break;
}
return String.format(new SimpleDateFormat(DEFAULT_DATE_LONG_FORMAT).format(calendar.getTime())
,post
,getMonthName());
}
/**
* A method used to get a formatted String of the date and time
* @param hasMillis should the output string contain milliseconds?
* @param is24Hours should the output string contain time in 24-hour format?
* @return a string representation of the date and time
*/
public String getDateTime(boolean hasMillis, boolean is24Hours){
if (hasMillis && is24Hours) return new SimpleDateFormat(DEFAULT_DATE_TIME_24_HOUR).format(calendar.getTime());
else if (!hasMillis && is24Hours) return new SimpleDateFormat(DEFAULT_DATE_TIME_24_HOUR_NO_MILLIS).format(calendar.getTime());
else if (hasMillis && !is24Hours) return new SimpleDateFormat(DEFAULT_DATE_TIME_12_HOUR).format(calendar.getTime());
else return new SimpleDateFormat(DEFAULT_DATE_TIME_12_HOUR_NO_MILLIS).format(calendar.getTime());
}
/**
* A method used to get a formatted String of the date and time
*
* @param format The format required
* @return a string representation of the date and time
*/
public String getDateTime(String format) {
return new SimpleDateFormat(format).format(calendar.getTime());
}
/**
* Adds years to the date of this DateTime instance
* @param years the number of years to be added
*/
public void addYears(int years){
calendar.add(Calendar.YEAR, years);
}
/**
* Adds months to the date of this DateTime instance
* @param months the number of years to be added
*/
public void addMonths(int months){
calendar.add(Calendar.MONTH, months);
}
/**
* Adds days to the date of this DateTime instance
* @param days the number of years to be added
*/
public void addDays(int days){
calendar.add(Calendar.DAY_OF_YEAR, days);
}
/**
* Adds hours to the date of this DateTime instance
* @param hours the number of years to be added
*/
public void addHours(int hours){
calendar.add(Calendar.HOUR, hours);
}
/**
* Adds minutes to the date of this DateTime instance
* @param minutes the number of years to be added
*/
public void addMinutes(int minutes){
calendar.add(Calendar.MINUTE, minutes);
}
/**
* Adds seconds to the date of this DateTime instance
* @param seconds the number of years to be added
*/
public void addSeconds(int seconds){
calendar.add(Calendar.SECOND, seconds);
}
/**
* Adds milliseconds to the date of this DateTime instance
* @param milliseconds the number of years to be added
*/
public void addMilliseconds(int milliseconds){
calendar.add(Calendar.MILLISECOND, milliseconds);
}
/**
* Comparing this DateTime instance to another DateTime and returns an integer :
*
* 7 : Years
* 6 : Months
* 5 : Days
* 4 : Hours
* 3 : Minutes
* 2 : Seconds
* 1 : Milliseconds
* 0 : Identical
*
* @param dateTime the other DateTime instance
* @return an integer indicating the difference between the two DateTime instances
* - Positive if this DateTime instance is more recent than the parameter
* - Negative if this DateTime instance is older than the parameter
* - Zero if this instance is equal to the parameter
*/
@Override
public int compareTo(DateTime dateTime) {
if (this.getYears() > dateTime.getYears()) return 7;
else if (this.getYears() < dateTime.getYears()) return -7;
if (this.getMonthNumeric() > dateTime.getMonthNumeric()) return 6;
else if (this.getMonthNumeric() < dateTime.getMonthNumeric()) return -6;
if (this.getDayOfMonth() > dateTime.getDayOfMonth()) return 5;
else if (this.getDayOfMonth() < dateTime.getDayOfMonth()) return -5;
if (this.getHours24() > dateTime.getHours24()) return 4;
else if (this.getHours24() < dateTime.getHours24()) return -4;
if (this.getMinutes() > dateTime.getMinutes()) return 3;
else if (this.getMinutes() < dateTime.getMinutes()) return -3;
if (this.getSeconds() > dateTime.getSeconds()) return 2;
else if (this.getSeconds() < dateTime.getSeconds()) return -2;
if (this.getMilliseconds() > dateTime.getMilliseconds()) return 1;
else if (this.getMilliseconds() < dateTime.getMilliseconds()) return -1;
return 0;
}
/**
* Compares this DateTime instance with another by a certain parameter
* @param dateTime the other instance of DateTime class
* @param compareDateBy the comparing parameter
* @return an integer indicating the comparison's result
* - Positive if this DateTime instance is more recent than the parameter
* - Negative if this DateTime instance is older than the parameter
* - Zero if this instance is equal to the parameter
*/
public int compareTo(DateTime dateTime, CompareDateBy compareDateBy) {
switch (compareDateBy){
case Years:
if (this.getYears() > dateTime.getYears()) return 1;
else return -1;
case Months:
if (this.getMonthNumeric() > dateTime.getMonthNumeric()) return 1;
else return -1;
case Days:
if (this.getDayOfMonth() > dateTime.getDayOfMonth()) return 1;
else return -1;
case Hours:
if (this.getHours24() > dateTime.getHours24()) return 1;
else return -1;
case Minutes:
if (this.getMinutes() > dateTime.getMinutes()) return 1;
else return -1;
case Seconds:
if (this.getSeconds() > dateTime.getSeconds()) return 1;
else return -1;
case Milliseconds:
if (this.getMilliseconds() > dateTime.getMilliseconds()) return 1;
else return -1;
case Date:
if (this.getYears() > dateTime.getYears()) return 1;
else if (this.getYears() < dateTime.getYears()) return -1;
if (this.getMonthNumeric() > dateTime.getMonthNumeric()) return 1;
else if (this.getMonthNumeric() < dateTime.getMonthNumeric()) return -1;
if (this.getDayOfMonth() > dateTime.getDayOfMonth()) return 1;
else if (this.getDayOfMonth() < dateTime.getDayOfMonth()) return -1;
case Time:
if (this.getHours24() > dateTime.getHours24()) return 1;
else if (this.getHours24() < dateTime.getHours24()) return -1;
if (this.getMinutes() > dateTime.getMinutes()) return 1;
else if (this.getMinutes() < dateTime.getMinutes()) return -1;
if (this.getSeconds() > dateTime.getSeconds()) return 1;
else if (this.getSeconds() < dateTime.getSeconds()) return -1;
if (this.getMilliseconds() > dateTime.getMilliseconds()) return 1;
else if (this.getMilliseconds() < dateTime.getMilliseconds()) return -1;
}
return 0;
}
/**
* implementing the Comparator object to sort the DateTime instances
* @param dateTime1 The first DateTime instance
* @param dateTime2 The second DateTime instance
* @return The result of comparing the first DateTime instance to the other
*/
@Override
public int compare(DateTime dateTime1, DateTime dateTime2) {
return dateTime1.compareTo(dateTime2);
}
/**
* Creates new DateTime clone out of this DateTime instance
* @return a new DateTime instance with the same values
*/
@Override
protected DateTime clone() {
return new DateTime(calendar.getTimeInMillis());
}
}
| |
/**
* Copyright 2016, 2017 Peter Zybrick and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Pete Zybrick
* @version 1.0.0, 2017-09
*
*/
package com.pzybrick.iote2e.tests.simws;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import javax.websocket.ContainerProvider;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import org.apache.avro.util.Utf8;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.eclipse.jetty.util.component.LifeCycle;
import com.pzybrick.iote2e.common.utils.Iote2eUtils;
import com.pzybrick.iote2e.schema.avro.Iote2eRequest;
import com.pzybrick.iote2e.schema.avro.Iote2eResult;
import com.pzybrick.iote2e.schema.avro.OPERATION;
import com.pzybrick.iote2e.schema.util.Iote2eRequestReuseItem;
import com.pzybrick.iote2e.schema.util.Iote2eResultReuseItem;
import com.pzybrick.iote2e.ws.security.LoginVo;
/**
* The Class ClientBasicInjector.
*/
public class ClientBasicInjector {
/** The Constant logger. */
private static final Logger logger = LogManager.getLogger(ClientBasicInjector.class);
/** The uri. */
private URI uri;
/** The container. */
private WebSocketContainer container;
/** The test source login. */
private static Utf8 TEST_SOURCE_LOGIN = new Utf8("pzybrick1");
/** The test source name. */
private static Utf8 TEST_SOURCE_NAME = new Utf8("local_t001");
/** The test source type. */
private static Utf8 TEST_SOURCE_TYPE = new Utf8("testSourceType");
/** The test sensor name. */
private static Utf8 TEST_SENSOR_NAME = new Utf8("testSensorName"); // fan
/**
* The main method.
*
* @param args the arguments
*/
public static void main(String[] args) {
// "ws://localhost:8090/iote2e/"
try {
ClientBasicInjector clientBasicInjector = new ClientBasicInjector();
clientBasicInjector.process(args[0]);
} catch (Exception e) {
logger.info(e);
e.printStackTrace();
}
}
/**
* Process.
*
* @param url the url
* @throws Exception the exception
*/
public void process(String url) throws Exception {
try {
uri = URI.create(url);
container = ContainerProvider.getWebSocketContainer();
try {
List<IotClientSocketThread> iotClientSocketThreads = new ArrayList<IotClientSocketThread>();
for (int i = 1; i < 2; i++) {
IotClientSocketThread iotClientSocketThread = new IotClientSocketThread().setLogin( TEST_SOURCE_LOGIN.toString() )
.setUri(uri).setContainer(container);
iotClientSocketThreads.add(iotClientSocketThread);
iotClientSocketThread.start();
}
for (IotClientSocketThread iotClientSocketThread : iotClientSocketThreads) {
iotClientSocketThread.join();
}
} finally {
// Force lifecycle stop when done with container.
// This is to free up threads and resources that the
// JSR-356 container allocates. But unfortunately
// the JSR-356 spec does not handle lifecycles (yet)
if (container instanceof LifeCycle) {
((LifeCycle) container).stop();
}
}
} catch (Throwable t) {
t.printStackTrace(System.err);
}
}
/**
* The Class IotClientSocketThread.
*/
private static class IotClientSocketThread extends Thread {
/** The login. */
private String login;
/** The uri. */
private URI uri;
/** The container. */
private WebSocketContainer container;
/** The shutdown. */
private boolean shutdown;
/** The iote 2 e request reuse item. */
private Iote2eRequestReuseItem iote2eRequestReuseItem = new Iote2eRequestReuseItem();
/** The iote 2 e result reuse item. */
private Iote2eResultReuseItem iote2eResultReuseItem = new Iote2eResultReuseItem();
/**
* Instantiates a new iot client socket thread.
*
* @throws Exception the exception
*/
public IotClientSocketThread() throws Exception {
}
/* (non-Javadoc)
* @see java.lang.Thread#run()
*/
@Override
public void run() {
Session session = null;
try {
//LoginVo loginVo = new LoginVo().setLogin(TEST_SOURCE_LOGIN.toString()).setSourceName(TEST_SOURCE_NAME.toString()).setOptionalFilterSensorName(TEST_SENSOR_NAME.toString());
LoginVo loginVo = new LoginVo().setLoginName(TEST_SOURCE_LOGIN.toString()).setSourceName(TEST_SOURCE_NAME.toString());
ConcurrentLinkedQueue<byte[]> iote2eResultBytes = new ConcurrentLinkedQueue<byte[]>();
ClientSocketAvro iotClientSocketAvro = new ClientSocketAvro(this,iote2eResultBytes);
session = container.connectToServer(iotClientSocketAvro, uri);
session.getBasicRemote().sendText( Iote2eUtils.getGsonInstance().toJson(loginVo));
for (int i = 45; i < 56; i++) {
Map<CharSequence, CharSequence> pairs = new HashMap<CharSequence, CharSequence>();
pairs.put( TEST_SENSOR_NAME, new Utf8(String.valueOf(i)));
Iote2eRequest iote2eRequest = Iote2eRequest.newBuilder().setLoginName(TEST_SOURCE_LOGIN)
.setSourceName(TEST_SOURCE_NAME)
.setSourceType(TEST_SOURCE_TYPE)
.setRequestUuid(UUID.randomUUID().toString())
.setRequestTimestamp(Iote2eUtils.getDateNowUtc8601()).setOperation(OPERATION.SENSORS_VALUES)
.setPairs(pairs).build();
session.getBasicRemote().sendBinary(ByteBuffer.wrap(iote2eRequestReuseItem.toByteArray(iote2eRequest)));
try {
sleep(1000L);
} catch (InterruptedException e) { }
}
logger.info("Rcvd Messages:");
while (!iote2eResultBytes.isEmpty()) {
byte[] bytes = iote2eResultBytes.poll();
if( bytes != null ) {
Iote2eResult iote2eResult = iote2eResultReuseItem.fromByteArray(bytes);
logger.info(iote2eResult.toString());
}
}
try {
sleep(2000L);
} catch (InterruptedException e) { }
} catch (Exception e) {
logger.error(e.getMessage(),e);
} finally {
if (session != null && session.isOpen()) {
try {
session.close();
} catch (Exception e) {
}
}
}
}
/**
* Shutdown.
*/
public void shutdown() {
shutdown = true;
interrupt();
}
/**
* Gets the login.
*
* @return the login
*/
public String getLogin() {
return login;
}
/**
* Sets the login.
*
* @param login the login
* @return the iot client socket thread
*/
public IotClientSocketThread setLogin(String login) {
this.login = login;
return this;
}
/**
* Gets the uri.
*
* @return the uri
*/
public URI getUri() {
return uri;
}
/**
* Sets the uri.
*
* @param uri the uri
* @return the iot client socket thread
*/
public IotClientSocketThread setUri(URI uri) {
this.uri = uri;
return this;
}
/**
* Gets the container.
*
* @return the container
*/
public WebSocketContainer getContainer() {
return container;
}
/**
* Sets the container.
*
* @param container the container
* @return the iot client socket thread
*/
public IotClientSocketThread setContainer(WebSocketContainer container) {
this.container = container;
return this;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* various Parser tests for INSERT/UPDATE/DELETE
*/
public class TestIUD {
private static HiveConf conf;
private ParseDriver pd;
private SemanticAnalyzer sA;
@BeforeClass
public static void initialize() {
conf = new HiveConf(SemanticAnalyzer.class);
SessionState.start(conf);
}
@Before
public void setup() throws SemanticException {
pd = new ParseDriver();
sA = new CalcitePlanner(conf);
}
ASTNode parse(String query) throws ParseException {
ASTNode nd = pd.parse(query);
return (ASTNode) nd.getChild(0);
}
@Test
public void testDeleteNoWhere() throws ParseException {
ASTNode ast = parse("DELETE FROM src");
Assert.assertEquals("AST doesn't match",
"(TOK_DELETE_FROM " +
"(TOK_TABNAME src))", ast.toStringTree());
}
@Test
public void testDeleteWithWhere() throws ParseException {
ASTNode ast = parse("DELETE FROM src WHERE key IS NOT NULL AND src.value < 0");
Assert.assertEquals("AST doesn't match",
"(TOK_DELETE_FROM " +
"(TOK_TABNAME src) " +
"(TOK_WHERE " +
"(AND " +
"(TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL key)) " +
"(< (. (TOK_TABLE_OR_COL src) value) 0))))",
ast.toStringTree());
}
@Test
public void testUpdateNoWhereSingleSet() throws ParseException {
ASTNode ast = parse("UPDATE src set key = 3");
Assert.assertEquals("AST doesn't match",
"(TOK_UPDATE_TABLE " +
"(TOK_TABNAME src) " +
"(TOK_SET_COLUMNS_CLAUSE " +
"(= " +
"(TOK_TABLE_OR_COL key) 3)))",
ast.toStringTree());
}
@Test
public void testUpdateNoWhereMultiSet() throws ParseException {
ASTNode ast = parse("UPDATE src set key = 3, value = 8");
Assert.assertEquals("AST doesn't match",
"(TOK_UPDATE_TABLE " +
"(TOK_TABNAME src) " +
"(TOK_SET_COLUMNS_CLAUSE " +
"(= " +
"(TOK_TABLE_OR_COL key) 3) " +
"(= " +
"(TOK_TABLE_OR_COL value) 8)))",
ast.toStringTree());
}
@Test
public void testUpdateWithWhereSingleSet() throws ParseException {
ASTNode ast = parse("UPDATE src SET key = 3 WHERE value IS NULL");
Assert.assertEquals("AST doesn't match",
"(TOK_UPDATE_TABLE " +
"(TOK_TABNAME src) " +
"(TOK_SET_COLUMNS_CLAUSE " +
"(= " +
"(TOK_TABLE_OR_COL key) 3)) " +
"(TOK_WHERE (TOK_FUNCTION TOK_ISNULL (TOK_TABLE_OR_COL value))))",
ast.toStringTree());
}
@Test
public void testUpdateWithWhereSingleSetExpr() throws ParseException {
ASTNode ast = parse("UPDATE src SET key = -3+(5*9)%8, val = cast(6.1 + c as INT), d = d - 1 WHERE value IS NULL");
Assert.assertEquals("AST doesn't match",
"(TOK_UPDATE_TABLE (TOK_TABNAME src) " +
"(TOK_SET_COLUMNS_CLAUSE " +
"(= (TOK_TABLE_OR_COL key) (+ (- 3) (% (* 5 9) 8))) " +
"(= (TOK_TABLE_OR_COL val) (TOK_FUNCTION TOK_INT (+ 6.1 (TOK_TABLE_OR_COL c)))) " +
"(= (TOK_TABLE_OR_COL d) (- (TOK_TABLE_OR_COL d) 1))) " +
"(TOK_WHERE (TOK_FUNCTION TOK_ISNULL (TOK_TABLE_OR_COL value))))",
ast.toStringTree());
}
@Test
public void testUpdateWithWhereMultiSet() throws ParseException {
ASTNode ast = parse("UPDATE src SET key = 3, value = 8 WHERE VALUE = 1230997");
Assert.assertEquals("AST doesn't match",
"(TOK_UPDATE_TABLE " +
"(TOK_TABNAME src) " +
"(TOK_SET_COLUMNS_CLAUSE " +
"(= " +
"(TOK_TABLE_OR_COL key) 3) " +
"(= " +
"(TOK_TABLE_OR_COL value) 8)) " +
"(TOK_WHERE (= (TOK_TABLE_OR_COL VALUE) 1230997)))",
ast.toStringTree());
}
@Test
public void testStandardInsertIntoTable() throws ParseException {
ASTNode ast = parse("INSERT into TABLE page_view SELECT pvs.viewTime, pvs.userid from page_view_stg pvs where pvs.userid is null");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_TABREF (TOK_TABNAME page_view_stg) pvs)) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view))) " +
"(TOK_SELECT " +
"(TOK_SELEXPR (. (TOK_TABLE_OR_COL pvs) viewTime)) " +
"(TOK_SELEXPR (. (TOK_TABLE_OR_COL pvs) userid))) " +
"(TOK_WHERE (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL pvs) userid)))))",
ast.toStringTree());
}
@Test
public void testSelectStarFromAnonymousVirtTable1Row() throws ParseException {
try {
parse("select * from values (3,4)");
Assert.assertFalse("Expected ParseException", true);
}
catch(ParseException ex) {
Assert.assertEquals("Failure didn't match.", "line 1:21 missing EOF at '(' near 'values'",ex.getMessage());
}
}
@Test
public void testSelectStarFromVirtTable1Row() throws ParseException {
ASTNode ast = parse("select * from (values (3,4)) as VC(a,b)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF (TOK_TABNAME VC) (TOK_COL_NAME a b)) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW 3 4)))) " +
"(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))",
ast.toStringTree());
}
@Test
public void testSelectStarFromVirtTable2Row() throws ParseException {
ASTNode ast = parse("select * from (values (1,2),(3,4)) as VC(a,b)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF (TOK_TABNAME VC) (TOK_COL_NAME a b)) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW 1 2) (TOK_VALUE_ROW 3 4)))) " +
"(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))",
ast.toStringTree());
}
@Test
public void testSelectStarFromVirtTable2RowNamedProjections() throws ParseException {
ASTNode ast = parse("select a as c, b as d from (values (1,2),(3,4)) as VC(a,b)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF (TOK_TABNAME VC) (TOK_COL_NAME a b)) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW 1 2) (TOK_VALUE_ROW 3 4)))) " +
"(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) " +
"(TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a) c) (TOK_SELEXPR (TOK_TABLE_OR_COL b) d))))",
ast.toStringTree());
}
@Test
public void testInsertIntoTableAsSelectFromNamedVirtTable() throws ParseException {
ASTNode ast = parse("insert into page_view select a,b as c from (values (1,2),(3,4)) as VC(a,b) where b = 9");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF (TOK_TABNAME VC) (TOK_COL_NAME a b)) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW 1 2) (TOK_VALUE_ROW 3 4)))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view))) " +
"(TOK_SELECT " +
"(TOK_SELEXPR (TOK_TABLE_OR_COL a)) " +
"(TOK_SELEXPR (TOK_TABLE_OR_COL b) c)) " +
"(TOK_WHERE (= (TOK_TABLE_OR_COL b) 9))))",
ast.toStringTree());
}
@Test
public void testInsertIntoTableFromAnonymousTable1Row() throws ParseException {
ASTNode ast = parse("insert into page_view values(1,2)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF TOK_ANONYMOUS) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW 1 2)))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view))) " +
"(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))",
ast.toStringTree());
}
@Test
public void testInsertIntoTableFromAnonymousTable() throws ParseException {
ASTNode ast = parse("insert into table page_view values(-1,2),(3,+4)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF TOK_ANONYMOUS) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW (- 1) 2) (TOK_VALUE_ROW 3 (+ 4))))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view))) " +
"(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))",
ast.toStringTree());
//same query as above less the "table" keyword KW_TABLE
ast = parse("insert into page_view values(-1,2),(3,+4)");
Assert.assertEquals("AST doesn't match",
"(TOK_QUERY " +
"(TOK_FROM " +
"(TOK_VIRTUAL_TABLE " +
"(TOK_VIRTUAL_TABREF TOK_ANONYMOUS) " +
"(TOK_VALUES_TABLE (TOK_VALUE_ROW (- 1) 2) (TOK_VALUE_ROW 3 (+ 4))))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view))) " +
"(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))",
ast.toStringTree());
}
@Test
public void testMultiInsert() throws ParseException {
ASTNode ast = parse("from S insert into T1 select a, b insert into T2 select c, d");
Assert.assertEquals("AST doesn't match", "(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME S))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME T1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a)) (TOK_SELEXPR (TOK_TABLE_OR_COL b)))) " +
"(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME T2))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL c)) (TOK_SELEXPR (TOK_TABLE_OR_COL d)))))", ast.toStringTree());
}
}
| |
/*
* Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Business Objects nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* GemEntity.java
* Creation date: Oct 3, 2002.
* By: Edward Lam
*/
package org.openquark.cal.services;
import java.util.List;
import java.util.Locale;
import org.openquark.cal.compiler.DataConstructor;
import org.openquark.cal.compiler.FunctionalAgent;
import org.openquark.cal.compiler.QualifiedName;
import org.openquark.cal.compiler.Scope;
import org.openquark.cal.compiler.ScopedEntityNamingPolicy;
import org.openquark.cal.compiler.TypeExpr;
import org.openquark.cal.metadata.FunctionalAgentMetadata;
import org.openquark.cal.metadata.MetadataManager;
/**
* A GemEntity encapsulates information for an envEntity, including its source, design, and metadata.
* @author Edward Lam
*/
public class GemEntity {
/** The FunctionalAgent encapsulated by this GemEntity. */
private final FunctionalAgent functionalAgent;
/**
* The type of the entity, for fast lookup.
* FunctionalAgent.getTypeExpr() always returns a copy, but its type doesn't change when type checking is done.
*
* This field is lazily computed on first access.
*/
private TypeExpr entityType;
/** The resource manager to provide metadata and designs for this entity. */
private final VirtualResourceManager virtualResourceManager;
/**
* Constructor for a GemEntity.
* @param envEntity
* @param virtualResourceManager
*/
public GemEntity(FunctionalAgent envEntity, VirtualResourceManager virtualResourceManager) {
this.virtualResourceManager = virtualResourceManager;
Assert.isNotNullArgument(envEntity, "envEntity");
this.functionalAgent = envEntity;
}
/**
* Return the FunctionalAgent backing this GemEntity
* @return FunctionalAgent
*/
public FunctionalAgent getFunctionalAgent() {
return functionalAgent;
}
/**
* Get the name of this entity.
* @return QualifiedName the name of the entity.
*/
public final QualifiedName getName() {
return functionalAgent.getName();
}
/**
* Get the metadata for a scoped entity in this module.
* @param locale the locale associated with the metadata.
* @return the metadata for this entity. If the entity has no metadata, then default metadata is returned.
*/
public FunctionalAgentMetadata getMetadata(Locale locale) {
MetadataManager metadataManager = virtualResourceManager.getMetadataManager(functionalAgent.getName().getModuleName());
if (metadataManager == null) {
return (FunctionalAgentMetadata)MetadataManager.getEmptyMetadata(functionalAgent, locale);
}
return (FunctionalAgentMetadata)metadataManager.getMetadata(functionalAgent, locale);
}
/**
* @return an array of the metadata objects for this gem, across all locales.
*/
public FunctionalAgentMetadata[] getMetadataForAllLocales() {
MetadataManager metadataManager = virtualResourceManager.getMetadataManager(functionalAgent.getName().getModuleName());
if (metadataManager == null) {
return new FunctionalAgentMetadata[0];
}
CALFeatureName featureName = CALFeatureName.getScopedEntityFeatureName(functionalAgent);
List<ResourceName> listOfResourceNames = metadataManager.getMetadataResourceNamesForAllLocales(featureName);
int n = listOfResourceNames.size();
FunctionalAgentMetadata[] result = new FunctionalAgentMetadata[n];
for (int i = 0; i < n; i++) {
ResourceName resourceName = listOfResourceNames.get(i);
result[i] = (FunctionalAgentMetadata)metadataManager.getMetadata(featureName, LocalizedResourceName.localeOf(resourceName));
}
return result;
}
/**
* @return true if there is a saved design associated with this entity.
*/
public boolean hasDesign() {
GemDesignManager designManager = virtualResourceManager.getDesignManager(functionalAgent.getName().getModuleName());
if (designManager == null) {
return false;
}
return designManager.hasGemDesign(this);
}
/**
* @return the gem design for this entity, if any.
*/
public GemDesign getDesign(Status loadStatus) {
GemDesignManager designManager = virtualResourceManager.getDesignManager(functionalAgent.getName().getModuleName());
if (designManager == null) {
return null;
}
return designManager.getGemDesign(getName(), loadStatus);
}
/**
* Returns the scope of the entity.
* @return FunctionalAgent.Scope
*/
public Scope getScope() {
return functionalAgent.getScope();
}
/**
* The type arity is defined to be the number of arguments that this
* gem appears to accept when placed on the GemCutter tabletop. This is equal to the
* number of application nodes in the type of the gem. It is one of several notion of what the
* "arity" of a gem means. Note, because of hidden overloaded arguments, the actual number
* of arguments required to fully saturate and evaluate a gem can be greater than this
* number. For example, the add gem has type Num a => a -> a -> a. Its type arity is 2,
* but 3 arguments are required to fully saturate it because of the hidden dictionary argument.
* Also, there is yet another notion of arity: If we say "f = sin" then f has runtime arity 0
* since it is a CAF, even though the nApplications (type arity) is 1.
*
* @return int
*/
public int getTypeArity() {
return getTypeExpr().getArity();
}
/**
* Returns a copy of the TypeExpr of this entity. For example, if the TypeExpr held
* by the entity is (a -> (Int, b)) -> (a, b) then the returned TypeExpr is
* (a' -> (Int, b')) -> (a', b').
*
* @return TypeExpr
*/
public synchronized TypeExpr getTypeExpr() {
if (entityType == null) {
entityType = functionalAgent.getTypeExpr();
}
return entityType;
}
/**
* Return whether this entity is a data constructor.
* @return boolean true if this is a data constructor, false otherwise.
*/
public boolean isDataConstructor() {
return functionalAgent instanceof DataConstructor;
}
/**
* Returns the number of arguments of the entity that have names, for example, as specified
* in the CAL source. The number of named arguments will be less than or equal to the number
* of actual arguments for the entity. If an entity (such as a function) takes 5 arguments,
* and has 3 named arguments, then the 4th and 5th argument are unnamed.
* @return int number of named arguments
*/
public int getNNamedArguments() {
return functionalAgent.getNArgumentNames();
}
/**
* Returns the name of the given named argument.
* @param argN int index into the named arguments.
* @return unqualified name of the named argument
*/
public String getNamedArgument(int argN) {
return functionalAgent.getArgumentName(argN);
}
/**
* Returns a String representing the name of the entity with respect to a particular naming policy.
* For example, one such naming policy might be to always return the fully qualified name.
* @param namingPolicy
* @return String
*/
public String getAdaptedName(ScopedEntityNamingPolicy namingPolicy) {
return functionalAgent.getAdaptedName(namingPolicy);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "GemEntity: " + functionalAgent.toString();
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.pkgcache;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.flags.InvocationPolicyEnforcer;
import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.packages.Preprocessor;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.skyframe.DiffAwareness;
import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy;
import com.google.devtools.build.lib.skyframe.PackageLookupValue.BuildFileName;
import com.google.devtools.build.lib.skyframe.PrecomputedValue;
import com.google.devtools.build.lib.skyframe.SequencedSkyframeExecutor;
import com.google.devtools.build.lib.skyframe.SkyValueDirtinessChecker;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor;
import com.google.devtools.build.lib.syntax.BuildFileAST;
import com.google.devtools.build.lib.testutil.FoundationTestCase;
import com.google.devtools.build.lib.testutil.MoreAsserts;
import com.google.devtools.build.lib.util.BlazeClock;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.common.options.OptionsParser;
import com.google.devtools.common.options.OptionsParsingException;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for package loading.
*/
@RunWith(JUnit4.class)
public class PackageCacheTest extends FoundationTestCase {
private AnalysisMock analysisMock;
private ConfiguredRuleClassProvider ruleClassProvider;
private SkyframeExecutor skyframeExecutor;
@Before
public final void initializeSkyframeExecutor() throws Exception {
analysisMock = AnalysisMock.get();
ruleClassProvider = analysisMock.createRuleClassProvider();
BlazeDirectories directories =
new BlazeDirectories(outputBase, outputBase, rootDirectory, analysisMock.getProductName());
skyframeExecutor =
SequencedSkyframeExecutor.create(
analysisMock
.getPackageFactoryForTesting()
.create(ruleClassProvider, scratch.getFileSystem()),
directories,
null, /* BinTools */
null, /* workspaceStatusActionFactory */
ruleClassProvider.getBuildInfoFactories(),
ImmutableList.<DiffAwareness.Factory>of(),
Predicates.<PathFragment>alwaysFalse(),
Preprocessor.Factory.Supplier.NullSupplier.INSTANCE,
AnalysisMock.get().getSkyFunctions(),
ImmutableList.<PrecomputedValue.Injected>of(),
ImmutableList.<SkyValueDirtinessChecker>of(),
analysisMock.getProductName(),
CrossRepositoryLabelViolationStrategy.ERROR,
ImmutableList.of(BuildFileName.BUILD_DOT_BAZEL, BuildFileName.BUILD));
setUpSkyframe(parsePackageCacheOptions());
}
private void setUpSkyframe(PackageCacheOptions packageCacheOptions) {
PathPackageLocator pkgLocator = PathPackageLocator.create(
null, packageCacheOptions.packagePath, reporter, rootDirectory, rootDirectory);
packageCacheOptions.showLoadingProgress = true;
packageCacheOptions.globbingThreads = 7;
skyframeExecutor.preparePackageLoading(
pkgLocator,
packageCacheOptions,
analysisMock.getDefaultsPackageContent(),
UUID.randomUUID(),
ImmutableMap.<String, String>of(),
new TimestampGranularityMonitor(BlazeClock.instance()));
skyframeExecutor.setDeletedPackages(
ImmutableSet.copyOf(packageCacheOptions.getDeletedPackages()));
}
private PackageCacheOptions parsePackageCacheOptions(String... options) throws Exception {
OptionsParser parser = OptionsParser.newOptionsParser(PackageCacheOptions.class);
parser.parse(new String[] { "--default_visibility=public" });
parser.parse(options);
InvocationPolicyEnforcer optionsPolicyEnforcer = analysisMock.getInvocationPolicyEnforcer();
try {
optionsPolicyEnforcer.enforce(parser);
} catch (OptionsParsingException e) {
throw new IllegalStateException(e);
}
return parser.getOptions(PackageCacheOptions.class);
}
protected void setOptions(String... options) throws Exception {
setUpSkyframe(parsePackageCacheOptions(options));
}
private PackageManager getPackageManager() {
return skyframeExecutor.getPackageManager();
}
private void invalidatePackages() throws InterruptedException {
skyframeExecutor.invalidateFilesUnderPathForTesting(
reporter, ModifiedFileSet.EVERYTHING_MODIFIED, rootDirectory);
}
private Package getPackage(String packageName)
throws NoSuchPackageException, InterruptedException {
return getPackageManager().getPackage(reporter,
PackageIdentifier.createInMainRepo(packageName));
}
private Target getTarget(Label label)
throws NoSuchPackageException, NoSuchTargetException, InterruptedException {
return getPackageManager().getTarget(reporter, label);
}
private Target getTarget(String label) throws Exception {
return getTarget(Label.parseAbsolute(label));
}
private void createPkg1() throws IOException {
scratch.file("pkg1/BUILD", "cc_library(name = 'foo') # a BUILD file");
}
// Check that a substring is present in an error message.
private void checkGetPackageFails(String packageName, String expectedMessage) throws Exception {
try {
getPackage(packageName);
fail();
} catch (NoSuchPackageException e) {
assertThat(e.getMessage()).contains(expectedMessage);
}
}
@Test
public void testGetPackage() throws Exception {
createPkg1();
Package pkg1 = getPackage("pkg1");
assertEquals("pkg1", pkg1.getName());
assertEquals("/workspace/pkg1/BUILD",
pkg1.getFilename().toString());
assertSame(pkg1, getPackageManager().getPackage(reporter,
PackageIdentifier.createInMainRepo("pkg1")));
}
@Test
public void testASTIsNotRetained() throws Exception {
createPkg1();
Package pkg1 = getPackage("pkg1");
MoreAsserts.assertInstanceOfNotReachable(pkg1, BuildFileAST.class);
}
@Test
public void testGetNonexistentPackage() throws Exception {
checkGetPackageFails("not-there",
"no such package 'not-there': "
+ "BUILD file not found on package path");
}
@Test
public void testGetPackageWithInvalidName() throws Exception {
scratch.file("invalidpackagename&42/BUILD", "cc_library(name = 'foo') # a BUILD file");
checkGetPackageFails(
"invalidpackagename&42",
"no such package 'invalidpackagename&42': Invalid package name 'invalidpackagename&42'");
}
@Test
public void testGetTarget() throws Exception {
createPkg1();
Label label = Label.parseAbsolute("//pkg1:foo");
Target target = getTarget(label);
assertEquals(label, target.getLabel());
}
@Test
public void testGetNonexistentTarget() throws Exception {
createPkg1();
try {
getTarget("//pkg1:not-there");
fail();
} catch (NoSuchTargetException e) {
assertThat(e).hasMessage("no such target '//pkg1:not-there': target 'not-there' "
+ "not declared in package 'pkg1' defined by /workspace/pkg1/BUILD");
}
}
/**
* A missing package is one for which no BUILD file can be found. The
* PackageCache caches failures of this kind until the next sync.
*/
@Test
public void testRepeatedAttemptsToParseMissingPackage() throws Exception {
checkGetPackageFails("missing",
"no such package 'missing': "
+ "BUILD file not found on package path");
// Still missing:
checkGetPackageFails("missing",
"no such package 'missing': "
+ "BUILD file not found on package path");
// Update the BUILD file on disk so "missing" is no longer missing:
scratch.file("missing/BUILD",
"# an ok build file");
// Still missing:
checkGetPackageFails("missing",
"no such package 'missing': "
+ "BUILD file not found on package path");
invalidatePackages();
// Found:
Package missing = getPackage("missing");
assertEquals("missing", missing.getName());
}
/**
* A broken package is one that exists but contains lexer/parser/evaluator errors. The
* PackageCache only makes one attempt to parse each package once found.
*
* <p>Depending on the strictness of the PackageFactory, parsing a broken package may cause a
* Package object to be returned (possibly missing some rules) or an exception to be thrown. For
* this test we need that strict behavior.
*
* <p>Note: since the PackageCache.setStrictPackageCreation method was deleted (since it wasn't
* used by any significant clients) creating a "broken" build file got trickier--syntax errors are
* not enough. For now, we create an unreadable BUILD file, which will cause an IOException to be
* thrown. This test seems less valuable than it once did.
*/
@Test
public void testParseBrokenPackage() throws Exception {
reporter.removeHandler(failFastHandler);
Path brokenBuildFile = scratch.file("broken/BUILD");
brokenBuildFile.setReadable(false);
try {
getPackage("broken");
fail();
} catch (BuildFileContainsErrorsException e) {
assertThat(e.getMessage()).contains("/workspace/broken/BUILD (Permission denied)");
}
eventCollector.clear();
// Update the BUILD file on disk so "broken" is no longer broken:
scratch.overwriteFile("broken/BUILD",
"# an ok build file");
invalidatePackages(); // resets cache of failures
Package broken = getPackage("broken");
assertEquals("broken", broken.getName());
assertNoEvents();
}
@Test
public void testPackageInErrorReloadedWhenFixed() throws Exception {
reporter.removeHandler(failFastHandler);
Path build = scratch.file("a/BUILD", "cc_library(name='a', feet='stinky')");
build.setLastModifiedTime(1);
Package a1 = getPackage("a");
assertTrue(a1.containsErrors());
assertContainsEvent("//a:a: no such attribute 'feet'");
eventCollector.clear();
build.delete();
build = scratch.file("a/BUILD", "cc_library(name='a', srcs=['a.cc'])");
build.setLastModifiedTime(2);
invalidatePackages();
Package a2 = getPackage("a");
assertNotSame(a1, a2);
assertFalse(a2.containsErrors());
assertNoEvents();
}
@Test
public void testModifiedBuildFileCausesReloadAfterSync() throws Exception {
Path path = scratch.file("pkg/BUILD",
"cc_library(name = 'foo')");
path.setLastModifiedTime(1000);
Package oldPkg = getPackage("pkg");
// modify BUILD file (and change its timestamp)
path.delete();
scratch.file("pkg/BUILD", "cc_library(name = 'bar')");
path.setLastModifiedTime(999); // earlier; mtime doesn't have to advance
assertSame(oldPkg, getPackage("pkg")); // change not yet visible
invalidatePackages();
Package newPkg = getPackage("pkg");
assertNotSame(oldPkg, newPkg);
assertNotNull(newPkg.getTarget("bar"));
}
@Test
public void testTouchedBuildFileCausesReloadAfterSync() throws Exception {
Path path = scratch.file("pkg/BUILD",
"cc_library(name = 'foo')");
path.setLastModifiedTime(1000);
Package oldPkg = getPackage("pkg");
path.setLastModifiedTime(1001);
assertSame(oldPkg, getPackage("pkg")); // change not yet visible
invalidatePackages();
Package newPkg = getPackage("pkg");
assertNotSame(oldPkg, newPkg);
}
@Test
public void testMovedBuildFileCausesReloadAfterSync() throws Exception {
Path buildFile1 = scratch.file("pkg/BUILD",
"cc_library(name = 'foo')");
Path buildFile2 = scratch.file("/otherroot/pkg/BUILD",
"cc_library(name = 'bar')");
setOptions("--package_path=/workspace:/otherroot");
Package oldPkg = getPackage("pkg");
assertSame(oldPkg, getPackage("pkg")); // change not yet visible
assertEquals(buildFile1, oldPkg.getFilename());
assertEquals(rootDirectory, oldPkg.getSourceRoot());
buildFile1.delete();
invalidatePackages();
Package newPkg = getPackage("pkg");
assertNotSame(oldPkg, newPkg);
assertEquals(buildFile2, newPkg.getFilename());
assertEquals(scratch.dir("/otherroot"), newPkg.getSourceRoot());
// TODO(bazel-team): (2009) test BUILD file moves in the other direction too.
}
private Path rootDir1;
private Path rootDir2;
private void setUpCacheWithTwoRootLocator() throws Exception {
// Root 1:
// /a/BUILD
// /b/BUILD
// /c/d
// /c/e
//
// Root 2:
// /b/BUILD
// /c/BUILD
// /c/d/BUILD
// /f/BUILD
// /f/g
// /f/g/h/BUILD
rootDir1 = scratch.dir("/workspace");
rootDir2 = scratch.dir("/otherroot");
createBuildFile(rootDir1, "a", "foo.txt", "bar/foo.txt");
createBuildFile(rootDir1, "b", "foo.txt", "bar/foo.txt");
rootDir1.getRelative("c").createDirectory();
rootDir1.getRelative("c/d").createDirectory();
rootDir1.getRelative("c/e").createDirectory();
createBuildFile(rootDir2, "c", "d", "d/foo.txt", "foo.txt", "bar/foo.txt", "e", "e/foo.txt");
createBuildFile(rootDir2, "c/d", "foo.txt");
createBuildFile(rootDir2, "f", "g/foo.txt", "g/h", "g/h/foo.txt", "foo.txt");
createBuildFile(rootDir2, "f/g/h", "foo.txt");
setOptions("--package_path=/workspace:/otherroot");
}
protected Path createBuildFile(Path workspace, String packageName,
String... targets) throws IOException {
String[] lines = new String[targets.length];
for (int i = 0; i < targets.length; i++) {
lines[i] = "sh_library(name='" + targets[i] + "')";
}
return scratch.file(workspace + "/" + packageName + "/BUILD", lines);
}
private void assertLabelValidity(boolean expected, String labelString)
throws Exception {
Label label = Label.parseAbsolute(labelString);
boolean actual = false;
String error = null;
try {
getTarget(label);
actual = true;
} catch (NoSuchPackageException | NoSuchTargetException e) {
error = e.getMessage();
}
if (actual != expected) {
fail("assertLabelValidity(" + label + ") "
+ actual + ", not equal to expected value " + expected
+ " (error=" + error + ")");
}
}
private void assertPackageLoadingFails(String pkgName, String expectedError) throws Exception {
Package pkg = getPackage(pkgName);
assertTrue(pkg.containsErrors());
assertContainsEvent(expectedError);
}
@Test
public void testLocationForLabelCrossingSubpackage() throws Exception {
scratch.file("e/f/BUILD");
scratch.file("e/BUILD",
"# Whatever",
"filegroup(name='fg', srcs=['f/g'])");
reporter.removeHandler(failFastHandler);
List<Event> events = getPackage("e").getEvents();
assertThat(events).hasSize(1);
assertEquals(2, events.get(0).getLocation().getStartLineAndColumn().getLine());
}
/** Static tests (i.e. no changes to filesystem, nor calls to sync). */
@Test
public void testLabelValidity() throws Exception {
reporter.removeHandler(failFastHandler);
setUpCacheWithTwoRootLocator();
scratch.file(rootDir2 + "/c/d/foo.txt");
assertLabelValidity(true, "//a:foo.txt");
assertLabelValidity(true, "//a:bar/foo.txt");
assertLabelValidity(false, "//a/bar:foo.txt"); // no such package a/bar
assertLabelValidity(true, "//b:foo.txt");
assertLabelValidity(true, "//b:bar/foo.txt");
assertLabelValidity(false, "//b/bar:foo.txt"); // no such package b/bar
assertLabelValidity(true, "//c:foo.txt");
assertLabelValidity(true, "//c:bar/foo.txt");
assertLabelValidity(false, "//c/bar:foo.txt"); // no such package c/bar
assertLabelValidity(true, "//c:foo.txt");
assertLabelValidity(false, "//c:d/foo.txt"); // crosses boundary of c/d
assertLabelValidity(true, "//c/d:foo.txt");
assertLabelValidity(true, "//c:foo.txt");
assertLabelValidity(true, "//c:e");
assertLabelValidity(true, "//c:e/foo.txt");
assertLabelValidity(false, "//c/e:foo.txt"); // no such package c/e
assertLabelValidity(true, "//f:foo.txt");
assertLabelValidity(true, "//f:g/foo.txt");
assertLabelValidity(false, "//f/g:foo.txt"); // no such package f/g
assertLabelValidity(false, "//f:g/h/foo.txt"); // crosses boundary of f/g/h
assertLabelValidity(false, "//f/g:h/foo.txt"); // no such package f/g
assertLabelValidity(true, "//f/g/h:foo.txt");
}
/** Dynamic tests of label validity. */
@Test
public void testAddedBuildFileCausesLabelToBecomeInvalid() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("pkg/BUILD",
" cc_library(name = 'foo', ",
" srcs = ['x/y.cc'])");
assertLabelValidity(true, "//pkg:x/y.cc");
// The existence of this file makes 'x/y.cc' an invalid reference.
scratch.file("pkg/x/BUILD");
// but not yet...
assertLabelValidity(true, "//pkg:x/y.cc");
invalidatePackages();
// now:
assertPackageLoadingFails("pkg",
"Label '//pkg:x/y.cc' crosses boundary of subpackage 'pkg/x' "
+ "(perhaps you meant to put the colon here: '//pkg/x:y.cc'?)");
}
@Test
public void testDeletedPackages() throws Exception {
reporter.removeHandler(failFastHandler);
setUpCacheWithTwoRootLocator();
createBuildFile(rootDir1, "c", "d/x");
// Now package c exists in both roots, and c/d exists in only in the second
// root. It's as if we've merged c and c/d in the first root.
// c/d is still a subpackage--found in the second root:
assertEquals(rootDir2.getRelative("c/d/BUILD"),
getPackage("c/d").getFilename());
// Subpackage labels are still valid...
assertLabelValidity(true, "//c/d:foo.txt");
// ...and this crosses package boundaries:
assertLabelValidity(false, "//c:d/x");
assertPackageLoadingFails("c",
"Label '//c:d/x' crosses boundary of subpackage 'c/d' (have you deleted c/d/BUILD? "
+ "If so, use the --deleted_packages=c/d option)");
assertTrue(getPackageManager().isPackage(
reporter, PackageIdentifier.createInMainRepo("c/d")));
setOptions("--deleted_packages=c/d");
invalidatePackages();
assertFalse(getPackageManager().isPackage(
reporter, PackageIdentifier.createInMainRepo("c/d")));
// c/d is no longer a subpackage--even though there's a BUILD file in the
// second root:
try {
getPackage("c/d");
fail();
} catch (NoSuchPackageException e) {
assertThat(e).hasMessage(
"no such package 'c/d': Package is considered deleted due to --deleted_packages");
}
// Labels in the subpackage are no longer valid...
assertLabelValidity(false, "//c/d:x");
// ...and now d is just a subdirectory of c:
assertLabelValidity(true, "//c:d/x");
}
@Test
public void testPackageFeatures() throws Exception {
scratch.file("peach/BUILD",
"package(features = ['crosstool_default_false'])",
"cc_library(name = 'cc', srcs = ['cc.cc'])");
Rule cc = (Rule) getTarget("//peach:cc");
assertThat(cc.getFeatures()).hasSize(1);
}
@Test
public void testBrokenPackageOnMultiplePackagePathEntries() throws Exception {
reporter.removeHandler(failFastHandler);
setOptions("--package_path=.:.");
scratch.file("x/y/BUILD");
scratch.file("x/BUILD",
"genrule(name = 'x',",
"srcs = [],",
"outs = ['y/z.h'],",
"cmd = '')");
Package p = getPackage("x");
assertTrue(p.containsErrors());
}
}
| |
// Copyright 2019 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.buildtool.util;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.eventbus.SubscriberExceptionContext;
import com.google.common.eventbus.SubscriberExceptionHandler;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.ActionGraph;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.ServerDirectories;
import com.google.devtools.build.lib.analysis.TopLevelArtifactContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.WorkspaceStatusAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.analysis.starlark.StarlarkTransition.TransitionException;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.analysis.util.AnalysisTestUtil;
import com.google.devtools.build.lib.analysis.util.AnalysisTestUtil.DummyWorkspaceStatusActionContext;
import com.google.devtools.build.lib.buildtool.BuildRequest;
import com.google.devtools.build.lib.buildtool.BuildResult;
import com.google.devtools.build.lib.buildtool.BuildTool;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.events.NullEventHandler;
import com.google.devtools.build.lib.events.util.EventCollectionApparatus;
import com.google.devtools.build.lib.exec.BinTools;
import com.google.devtools.build.lib.exec.ModuleActionContextRegistry;
import com.google.devtools.build.lib.includescanning.IncludeScanningModule;
import com.google.devtools.build.lib.integration.util.IntegrationMock;
import com.google.devtools.build.lib.network.ConnectivityStatusProvider;
import com.google.devtools.build.lib.network.NoOpConnectivityModule;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.util.MockToolsConfig;
import com.google.devtools.build.lib.pkgcache.PackageManager;
import com.google.devtools.build.lib.rules.repository.RepositoryDelegatorFunction;
import com.google.devtools.build.lib.runtime.BlazeModule;
import com.google.devtools.build.lib.runtime.BlazeRuntime;
import com.google.devtools.build.lib.runtime.BlazeServerStartupOptions;
import com.google.devtools.build.lib.runtime.BlazeWorkspace;
import com.google.devtools.build.lib.runtime.CommandEnvironment;
import com.google.devtools.build.lib.runtime.WorkspaceBuilder;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.server.FailureDetails.Spawn;
import com.google.devtools.build.lib.server.FailureDetails.Spawn.Code;
import com.google.devtools.build.lib.shell.AbnormalTerminationException;
import com.google.devtools.build.lib.shell.Command;
import com.google.devtools.build.lib.shell.CommandException;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData;
import com.google.devtools.build.lib.skyframe.PrecomputedValue;
import com.google.devtools.build.lib.skyframe.PrecomputedValue.Injected;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor;
import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils;
import com.google.devtools.build.lib.standalone.StandaloneModule;
import com.google.devtools.build.lib.testutil.Suite;
import com.google.devtools.build.lib.testutil.TestConstants;
import com.google.devtools.build.lib.testutil.TestConstants.InternalTestExecutionMode;
import com.google.devtools.build.lib.testutil.TestFileOutErr;
import com.google.devtools.build.lib.testutil.TestSpec;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.util.CommandBuilder;
import com.google.devtools.build.lib.util.CommandUtils;
import com.google.devtools.build.lib.util.LoggingUtil;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.build.lib.util.io.RecordingOutErr;
import com.google.devtools.build.lib.vfs.DigestHashFunction;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.util.FileSystems;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
/**
* A base class for integration tests that use the {@link BuildTool}. These tests basically run a
* little build and check what happens.
*
* <p>All integration tests are at least size medium.
*/
@TestSpec(size = Suite.MEDIUM_TESTS)
public abstract class BuildIntegrationTestCase {
/** Thrown when an integration test case fails. */
@Ignore
public static class IntegrationTestExecException extends ExecException {
public IntegrationTestExecException(String message) {
super(message);
}
public IntegrationTestExecException(String message, Throwable cause) {
super(message, cause);
}
@Override
protected FailureDetail getFailureDetail(String message) {
return FailureDetail.newBuilder()
.setSpawn(Spawn.newBuilder().setCode(Code.NON_ZERO_EXIT))
.setMessage(message)
.build();
}
}
protected FileSystem fileSystem;
protected EventCollectionApparatus events = new EventCollectionApparatus();
protected OutErr outErr = OutErr.SYSTEM_OUT_ERR;
protected Path testRoot;
protected ServerDirectories serverDirectories;
protected BlazeDirectories directories;
protected MockToolsConfig mockToolsConfig;
protected BinTools binTools;
protected BlazeRuntimeWrapper runtimeWrapper;
protected Path outputBase;
protected String outputBaseName = "outputBase";
private Path workspace;
protected RecordingExceptionHandler subscriberException = new RecordingExceptionHandler();
@Before
public final void createFilesAndMocks() throws Exception {
runPriorToBeforeMethods();
events.setFailFast(false);
// TODO(mschaller): This will ignore any attempt by Blaze modules to provide a filesystem;
// consider something better.
this.fileSystem = createFileSystem();
this.testRoot = createTestRoot(fileSystem);
outputBase = testRoot.getRelative(outputBaseName);
outputBase.createDirectoryAndParents();
workspace = testRoot.getRelative(getDesiredWorkspaceRelative());
beforeCreatingWorkspace(workspace);
workspace.createDirectoryAndParents();
serverDirectories = createServerDirectories();
directories =
new BlazeDirectories(
serverDirectories,
workspace,
/* defaultSystemJavabase= */ null,
TestConstants.PRODUCT_NAME);
binTools =
IntegrationMock.get().getIntegrationBinTools(fileSystem, directories);
mockToolsConfig = new MockToolsConfig(workspace, realFileSystem());
setupMockTools();
createRuntimeWrapper();
}
protected ServerDirectories createServerDirectories() {
return new ServerDirectories(
/*installBase=*/ outputBase,
/*outputBase=*/ outputBase,
/*outputUserRoot=*/ outputBase,
/*execRootBase=*/ outputBase.getRelative("execroot"),
// Arbitrary install base hash.
/*installMD5=*/ "83bc4458738962b9b77480bac76164a9");
}
protected void createRuntimeWrapper() throws Exception {
runtimeWrapper =
new BlazeRuntimeWrapper(
events,
serverDirectories,
directories,
binTools,
getRuntimeBuilder().setEventBusExceptionHandler(subscriberException)) {
@Override
protected void finalizeBuildResult(BuildResult result) {
finishBuildResult(result);
}
};
setupOptions();
}
protected void runPriorToBeforeMethods() throws Exception {
// Allows tests such as SkyframeIntegrationInvalidationTest to execute code before all @Before
// methods are being run.
}
@After
public final void cleanUp() throws Exception {
if (subscriberException.getException() != null) {
throwIfUnchecked(subscriberException.getException());
throw new RuntimeException(subscriberException.getException());
}
LoggingUtil.installRemoteLoggerForTesting(null);
testRoot.deleteTreesBelow(); // (comment out during debugging)
}
/**
* A helper class that can be used to record exceptions that occur on the event bus, by passing an
* instance of it to BlazeRuntime#setEventBusExceptionHandler.
*/
@Ignore
public static final class RecordingExceptionHandler implements SubscriberExceptionHandler {
private Throwable exception;
@Override
public void handleException(Throwable exception, SubscriberExceptionContext context) {
System.err.println("subscriber exception: ");
exception.printStackTrace();
if (this.exception == null) {
this.exception = exception;
}
}
public Throwable getException() {
return exception;
}
}
/**
* Returns the relative path (from {@code testRoot}) to the desired workspace. This method may be
* called in {@link #createFilesAndMocks}, so overrides this method should not use any variables
* that may not have been initialized yet.
*/
protected PathFragment getDesiredWorkspaceRelative() {
return PathFragment.create(TestConstants.WORKSPACE_NAME);
}
protected InternalTestExecutionMode getInternalTestExecutionMode() {
return InternalTestExecutionMode.NORMAL;
}
/**
* Called in #setUp before creating the workspace directory. Subclasses should override this
* if they want to a non-standard filesystem setup, e.g. introduce symlinked directories.
*/
protected void beforeCreatingWorkspace(@SuppressWarnings("unused") Path workspace)
throws Exception {}
protected void finishBuildResult(@SuppressWarnings("unused") BuildResult result) {}
protected boolean realFileSystem() {
return true;
}
protected FileSystem createFileSystem() throws Exception {
return FileSystems.getNativeFileSystem(getDigestHashFunction());
}
protected DigestHashFunction getDigestHashFunction() {
return DigestHashFunction.SHA256;
}
protected Path createTestRoot(FileSystem fileSystem) {
return fileSystem.getPath(TestUtils.tmpDir());
}
// This is only here to support HaskellNonIntegrationTest. You should not call or override this
// method.
protected void setupMockTools() throws IOException {
// (Almost) every integration test calls BuildView.doLoadingPhase, which loads the default
// crosstool, etc. So we create these package here.
AnalysisMock.get().setupMockClient(mockToolsConfig);
}
protected FileSystem getFileSystem() {
return fileSystem;
}
protected BlazeModule getBuildInfoModule() {
return new BlazeModule() {
@Override
public void workspaceInit(
BlazeRuntime runtime, BlazeDirectories directories, WorkspaceBuilder builder) {
builder.setWorkspaceStatusActionFactory(
new AnalysisTestUtil.DummyWorkspaceStatusActionFactory());
}
@Override
public void registerActionContexts(
ModuleActionContextRegistry.Builder registryBuilder,
CommandEnvironment env,
BuildRequest buildRequest) {
registryBuilder.register(
WorkspaceStatusAction.Context.class, new DummyWorkspaceStatusActionContext());
}
};
}
// There should be one getSpawnModule at a time, as we lack infrastructure to decide from
// which Module to take the SpawnActionContext for specific actions.
protected BlazeModule getSpawnModule() {
return new StandaloneModule();
}
/** Gets a module containing rules (by default, using the TestRuleClassProvider) */
protected BlazeModule getRulesModule() {
return TestRuleModule.getModule();
}
/** Gets a module to set up the strategies. */
protected BlazeModule getStrategyModule() {
return TestStrategyModule.getModule();
}
private static BlazeModule getNoResolvedFileModule() {
return new BlazeModule() {
@Override
public ImmutableList<Injected> getPrecomputedValues() {
return ImmutableList.of(
PrecomputedValue.injected(
RepositoryDelegatorFunction.RESOLVED_FILE_INSTEAD_OF_WORKSPACE, Optional.empty()));
}
};
}
/**
* Gets a module that returns a connectivity status.
*
* @return a Blaze module that implements {@link ConnectivityStatusProvider}
*/
protected BlazeModule getConnectivityModule() {
return new NoOpConnectivityModule();
}
protected BlazeRuntime.Builder getRuntimeBuilder() throws Exception {
OptionsParser startupOptionsParser =
OptionsParser.builder().optionsClasses(getStartupOptionClasses()).build();
startupOptionsParser.parse(getStartupOptions());
BlazeModule connectivityModule = getConnectivityModule();
checkState(
connectivityModule instanceof ConnectivityStatusProvider,
"Module returned by getConnectivityModule() does not implement ConnectivityStatusProvider");
return new BlazeRuntime.Builder()
.setFileSystem(fileSystem)
.setProductName(TestConstants.PRODUCT_NAME)
.setStartupOptionsProvider(startupOptionsParser)
.addBlazeModule(connectivityModule)
.addBlazeModule(getNoResolvedFileModule())
.addBlazeModule(getSpawnModule())
.addBlazeModule(new IncludeScanningModule())
.addBlazeModule(getBuildInfoModule())
.addBlazeModule(getRulesModule())
.addBlazeModule(getStrategyModule());
}
protected List<String> getStartupOptions() {
return ImmutableList.of();
}
protected ImmutableList<Class<? extends OptionsBase>> getStartupOptionClasses() {
return ImmutableList.of(BlazeServerStartupOptions.class);
}
protected void setupOptions() throws Exception {
runtimeWrapper.resetOptions();
runtimeWrapper.addOptions(
// Set visibility to public so that test cases don't have to bother
// with visibility declarations
"--default_visibility=public",
// Don't show progress messages unless we need to, to keep the noise down.
"--noshow_progress",
// Don't use ijars, because we don't have the executable in these tests
"--nouse_ijars");
runtimeWrapper.addOptions("--experimental_extended_sanity_checks");
runtimeWrapper.addOptions(TestConstants.PRODUCT_SPECIFIC_FLAGS);
// TODO(rosica): Remove this once g3 is migrated.
runtimeWrapper.addOptions("--noincompatible_use_specific_tool_files");
// TODO(rosica): Remove this once g3 is migrated.
runtimeWrapper.addOptions("--noincompatible_make_thinlto_command_lines_standalone");
}
protected void resetOptions() {
runtimeWrapper.resetOptions();
}
protected void addOptions(String... args) {
runtimeWrapper.addOptions(args);
}
protected OptionsParser createOptionsParser() {
return runtimeWrapper.createOptionsParser();
}
protected Action getGeneratingAction(Artifact artifact) {
ActionAnalysisMetadata action = getActionGraph().getGeneratingAction(artifact);
if (action != null) {
checkState(
action instanceof Action, "%s is not a proper Action object", action.prettyPrint());
return (Action) action;
} else {
return null;
}
}
/**
* Returns the path to the executable that label "target" identifies.
*
* <p>Assumes that the specified target is executable, i.e. defines getExecutable; use {@link
* #getArtifacts} instead if this is not the case.
*
* @param target the label of the target whose executable location is requested.
*/
protected Path getExecutableLocation(String target)
throws LabelSyntaxException, NoSuchPackageException, NoSuchTargetException,
InterruptedException, TransitionException, InvalidConfigurationException {
return getExecutable(getConfiguredTarget(target)).getPath();
}
/**
* Given a label (which has typically, but not necessarily, just been built), returns the
* collection of files that it produces.
*
* @param target the label of the target whose artifacts are requested.
*/
protected Iterable<Artifact> getArtifacts(String target)
throws LabelSyntaxException, NoSuchPackageException, NoSuchTargetException,
InterruptedException, TransitionException, InvalidConfigurationException {
return getFilesToBuild(getConfiguredTarget(target)).toList();
}
/**
* Given a label (which has typically, but not necessarily, just been built), returns the
* configured target for it using the request configuration.
*
* @param target the label of the requested target.
*/
protected ConfiguredTarget getConfiguredTarget(String target)
throws LabelSyntaxException, NoSuchPackageException, NoSuchTargetException,
InterruptedException, TransitionException, InvalidConfigurationException {
getPackageManager()
.getTarget(events.reporter(), Label.parseAbsolute(target, ImmutableMap.of()));
return getSkyframeExecutor()
.getConfiguredTargetForTesting(events.reporter(), label(target), getTargetConfiguration());
}
protected ConfiguredTarget getConfiguredTarget(
ExtendedEventHandler eventHandler, Label label, BuildConfiguration config)
throws TransitionException, InvalidConfigurationException, InterruptedException {
return getSkyframeExecutor().getConfiguredTargetForTesting(eventHandler, label, config);
}
/**
* Gets all the already computed configured targets.
*/
protected Iterable<ConfiguredTarget> getAllConfiguredTargets() {
return SkyframeExecutorTestUtils.getAllExistingConfiguredTargets(getSkyframeExecutor());
}
/** Gets an existing configured target. */
protected ConfiguredTarget getExistingConfiguredTarget(String target)
throws InterruptedException, LabelSyntaxException {
ConfiguredTarget existingConfiguredTarget =
SkyframeExecutorTestUtils.getExistingConfiguredTarget(
getSkyframeExecutor(), label(target), getTargetConfiguration());
assertWithMessage(target).that(existingConfiguredTarget).isNotNull();
return existingConfiguredTarget;
}
protected BuildConfigurationCollection getConfigurationCollection() {
return runtimeWrapper.getConfigurationCollection();
}
/**
* Returns the target configuration for the most recent build, as created in Blaze's master
* configuration creation phase.
*
* <p>Tries to find the configuration used by all of the top-level targets in the last invocation.
* If they used multiple different configurations, or if none of them had a configuration, then
* falls back to the base top-level configuration.
*/
protected BuildConfiguration getTargetConfiguration() {
BuildConfiguration baseConfiguration =
Iterables.getOnlyElement(getConfigurationCollection().getTargetConfigurations());
BuildResult result = getResult();
if (result == null) {
return baseConfiguration;
}
Set<BuildConfiguration> topLevelTargetConfigurations =
result
.getActualTargets()
.stream()
.map((ct) -> getConfiguration(ct))
.filter((config) -> config != null)
.collect(toImmutableSet());
if (topLevelTargetConfigurations.size() != 1) {
return baseConfiguration;
}
return Iterables.getOnlyElement(topLevelTargetConfigurations);
}
protected BuildConfiguration getHostConfiguration() {
return getConfigurationCollection().getHostConfiguration();
}
protected TopLevelArtifactContext getTopLevelArtifactContext() {
return getRequest().getTopLevelArtifactContext();
}
/**
* Convenience wrapper around buildTool.syncPackageCache() and buildTool.build() that creates and
* executes a BuildRequest. Returns the BuildRequest on success (it is also subsequently
* accessible via {@link #getRequest}, even in case of abnormal termination). Also redirects the
* output from the reporter's event handler to go to this.OutErr during the build, and redirects
* System.out/System.err to go via the reporter (and hence to this.OutErr) during the build.
*/
public BuildResult buildTarget(String... targets) throws Exception {
events.setOutErr(this.outErr);
runtimeWrapper.executeBuild(Arrays.asList(targets));
return runtimeWrapper.getLastResult();
}
/**
* Creates a BuildRequest for either blaze build or blaze analyze, using the
* currently-installed request options.
* @param commandName blaze build or analyze command
* @param targets the targets to be built
*/
protected BuildRequest createNewRequest(String commandName, String... targets) throws Exception {
runtimeWrapper.initializeOptionsParser();
return runtimeWrapper.createRequest(commandName, Arrays.asList(targets));
}
/**
* Utility function: parse a string as a label.
*/
protected static Label label(String labelString) throws LabelSyntaxException {
return Label.parseAbsolute(labelString, ImmutableMap.of());
}
protected String run(Artifact executable, String... arguments) throws Exception {
Map<String, String> environment = null;
return run(executable.getPath(), null, environment, arguments);
}
/**
* This runs an executable using the executor instance configured for
* this test.
*/
protected String run(Path executable, String... arguments) throws Exception {
Map<String, String> environment = null;
return run(executable, null, environment, arguments);
}
protected String run(Path executable, Path workingDirectory, String... arguments)
throws ExecException, InterruptedException {
return run(executable, workingDirectory, null, arguments);
}
protected String run(
Path executable, Path workingDirectory, Map<String, String> environment, String... arguments)
throws ExecException, InterruptedException {
RecordingOutErr outErr = new RecordingOutErr();
try {
run(executable, workingDirectory, outErr, environment, arguments);
} catch (ExecException e) {
throw new IntegrationTestExecException(
"failed to execute '"
+ executable.getPathString()
+ "'\n----- captured stdout:\n"
+ outErr.outAsLatin1()
+ "\n----- captured stderr:"
+ outErr.errAsLatin1()
+ "\n----- Reason",
e.getCause());
}
return outErr.outAsLatin1();
}
protected void run(Path executable, OutErr outErr, String... arguments) throws Exception {
run(executable, null, outErr, null, arguments);
}
private void run(
Path executable,
Path workingDirectory,
OutErr outErr,
Map<String, String> environment,
String... arguments)
throws ExecException, InterruptedException {
if (workingDirectory == null) {
workingDirectory = directories.getWorkspace();
}
List<String> argv = Lists.newArrayList(arguments);
argv.add(0, executable.toString());
Map<String, String> env =
(environment != null ? environment : getTargetConfiguration().getLocalShellEnvironment());
TestFileOutErr testOutErr = new TestFileOutErr();
try {
execute(workingDirectory, env, argv, testOutErr, /* verboseFailures= */ false);
} finally {
testOutErr.dumpOutAsLatin1(outErr.getOutputStream());
testOutErr.dumpErrAsLatin1(outErr.getErrorStream());
}
}
/**
* Writes a number of lines of text to a source file using Latin-1 encoding.
*
* @param relativePath the path relative to the workspace root.
* @param lines the lines of text to write to the file.
* @return the path of the created file.
* @throws IOException if the file could not be written.
*/
public Path write(String relativePath, String... lines) throws IOException {
Path path = getWorkspace().getRelative(relativePath);
return writeAbsolute(path, lines);
}
/**
* Same as {@link #write}, but with an absolute path.
*/
protected Path writeAbsolute(Path path, String... lines) throws IOException {
FileSystemUtils.writeIsoLatin1(path, lines);
return path;
}
/**
* Creates folders on the path to {@code relativeLinkPath} and a symlink to {@code target} at
* {@code relativeLinkPath} (equivalent to {@code ln -s <target> <relativeLinkPath>}).
*/
protected void createSymlink(String target, String relativeLinkPath) throws IOException {
Path path = getWorkspace().getRelative(relativeLinkPath);
path.getParentDirectory().createDirectoryAndParents();
path.createSymbolicLink(PathFragment.create(target));
}
/**
* The TimestampGranularityMonitor operates on the files created by the
* request and thus does not help here. Calling this method ensures that files
* we modify as part of the test environment are considered as changed.
*/
protected static void waitForTimestampGranularity() throws Exception {
// Ext4 has a nanosecond granularity. Empirically, tmpfs supports ~5ms increments on
// Ubuntu Trusty.
Thread.sleep(10 /*ms*/);
}
/**
* Performs a local direct spawn execution given spawn information broken out into individual
* arguments. Directs standard out/err to {@code outErr}.
*
* @param workingDirectory the directory from which to execute the subprocess
* @param environment the environment map to provide to the subprocess. If null, the environment
* is inherited from the parent process.
* @param argv the argument vector including the command itself
* @param outErr the out+err stream pair to receive stdout and stderr from the subprocess
* @throws ExecException if any kind of abnormal termination or command exception occurs
*/
public static void execute(
Path workingDirectory,
Map<String, String> environment,
List<String> argv,
FileOutErr outErr,
boolean verboseFailures)
throws ExecException, InterruptedException {
Command command =
new CommandBuilder()
.addArgs(argv)
.setEnv(environment)
.setWorkingDir(workingDirectory)
.build();
try {
command.execute(outErr.getOutputStream(), outErr.getErrorStream());
} catch (AbnormalTerminationException e) { // non-zero exit or signal or I/O problem
IntegrationTestExecException e2 =
new IntegrationTestExecException(CommandUtils.describeCommandFailure(verboseFailures, e));
e2.initCause(e); // We don't pass cause=e to the ExecException constructor
// since we don't want it to contribute to the exception
// message again; it's already in describeCommandFailure().
throw e2;
} catch (CommandException e) {
IntegrationTestExecException e2 =
new IntegrationTestExecException(CommandUtils.describeCommandFailure(verboseFailures, e));
e2.initCause(e); // We don't pass cause=e to the ExecException constructor
// since we don't want it to contribute to the exception
// message again; it's already in describeCommandFailure().
throw e2;
}
}
protected String readContentAsLatin1String(Artifact artifact) throws IOException {
return new String(FileSystemUtils.readContentAsLatin1(artifact.getPath()));
}
/**
* Given a collection of Artifacts, returns a corresponding set of strings of the form "<root>
* <relpath>", such as "bin x/libx.a". Such strings make assertions easier to write.
*
* <p>The returned set preserves the order of the input.
*/
protected Set<String> artifactsToStrings(NestedSet<Artifact> artifacts) {
return AnalysisTestUtil.artifactsToStrings(getConfigurationCollection(), artifacts.toList());
}
protected ActionsTestUtil actionsTestUtil() {
return new ActionsTestUtil(getActionGraph());
}
protected Artifact getExecutable(TransitiveInfoCollection target) {
return target.getProvider(FilesToRunProvider.class).getExecutable();
}
protected NestedSet<Artifact> getFilesToBuild(TransitiveInfoCollection target) {
return target.getProvider(FileProvider.class).getFilesToBuild();
}
protected final BuildConfiguration getConfiguration(ConfiguredTarget ct) {
return getSkyframeExecutor()
.getConfiguration(NullEventHandler.INSTANCE, ct.getConfigurationKey());
}
/**
* Returns the BuildRequest of the last call to buildTarget().
*/
protected BuildRequest getRequest() {
return runtimeWrapper.getLastRequest();
}
/**
* Returns the BuildResultof the last call to buildTarget().
*/
protected BuildResult getResult() {
return runtimeWrapper.getLastResult();
}
/**
* Returns the {@link BlazeRuntime} in use.
*/
protected BlazeRuntime getRuntime() {
return runtimeWrapper.getRuntime();
}
protected BlazeWorkspace getBlazeWorkspace() {
return runtimeWrapper.getRuntime().getWorkspace();
}
protected ConfiguredTargetAndData getConfiguredTargetAndTarget(
ExtendedEventHandler eventHandler, Label label, BuildConfiguration config)
throws TransitionException, InvalidConfigurationException, InterruptedException {
return getSkyframeExecutor().getConfiguredTargetAndDataForTesting(eventHandler, label, config);
}
protected ActionGraph getActionGraph() {
return getSkyframeExecutor().getActionGraph(events.reporter());
}
protected CommandEnvironment getCommandEnvironment() {
return runtimeWrapper.getCommandEnvironment();
}
public SkyframeExecutor getSkyframeExecutor() {
return runtimeWrapper.getSkyframeExecutor();
}
protected PackageManager getPackageManager() {
return getSkyframeExecutor().getPackageManager();
}
protected Path getOutputBase() {
return outputBase;
}
protected Path getWorkspace() {
return workspace;
}
/**
* Assertion-checks that the expected error was reported,
*/
protected void assertContainsError(String expectedError) {
for (Event error : events.errors()) {
if (error.getMessage().contains(expectedError)) {
return;
}
}
fail("didn't find expected error: \"" + expectedError + "\"");
}
}
| |
package app;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import app.entities.Address;
import app.entities.City;
import app.entities.Employee;
import app.entities.EmployeeDto;
import app.entities.EmployeeNamesDto;
import app.entities.EmployeeNamesInterface;
import app.entities.EmployeeWithManagerDto;
import app.entities.ManagedEmployeeDto;
import app.entities.ManagerDto;
import app.services.api.EmployeeService;
import app.utils.dto.ConverterDTO;
@Component
public class ConsoleRunner implements CommandLineRunner{
private SimpleDateFormat sdf = new SimpleDateFormat();
private EmployeeService employeeService;
@Autowired
public ConsoleRunner(EmployeeService employeeService) {
this.employeeService = employeeService;
}
@Override
public void run(String... args) throws Exception {
List<Employee> employees;
List<ManagerDto> managersDto;
List<EmployeeWithManagerDto> employeeWithManagerDtos;
String command;
Long id;
Employee employee;
Employee employeeFromDto;
EmployeeDto employeeDto;
Scanner scanner = new Scanner(System.in);
while(true){
printMenu();
command = scanner.nextLine();
if (command.equals("e")) {
break;
}
switch (command) {
case "0":
initializeDatabase();
break;
case "1":
System.out.println("Enter Employee Id:");
id = Long.parseLong(scanner.nextLine());
employee = employeeService.findById(id);
System.out.println("Employee Data Before Convert:");
System.out.println(employee.toString());
employeeDto = ConverterDTO.convert(employee, EmployeeDto.class);
System.out.println("EmployeeDto Data:");
System.out.println(employeeDto.toString());
employeeFromDto = ConverterDTO.convert(employeeDto, Employee.class);
System.out.println("Employee Data From Dto:");
System.out.println(employeeFromDto.toString());
System.out.println("Try to save new employee, created from the Dto'? (y/n)");
command = scanner.nextLine().toLowerCase();
if (command.equals("y")) {
employeeService.save(employeeFromDto);
}
break;
case "2":
employees = employeeService.findAll();
managersDto = new ArrayList<>();
for (Employee tempEmployee : employees) {
if (tempEmployee.getManagedEmployees() != null
&& tempEmployee.getManagedEmployees().size() > 0) {
managersDto.add(ConverterDTO.convert(tempEmployee, ManagerDto.class));
}
}
for (ManagerDto managerDto : managersDto) {
System.out.print(managerDto);
for (ManagedEmployeeDto managedEmployeeDto : managerDto.getManagedEmployees()) {
System.out.print(" - " + managedEmployeeDto);
}
}
break;
case "3":
employees = employeeService.findAll();
employeeWithManagerDtos = ConverterDTO.convertToEmployeeWithManager(employees, EmployeeWithManagerDto.class);
for (EmployeeWithManagerDto employeeWithManagerDto : employeeWithManagerDtos) {
System.out.print(employeeWithManagerDto);
}
break;
case "4":
List<EmployeeNamesInterface> employeeNames = employeeService.findAllEmployeeNames();
List<EmployeeNamesDto> employeeNamesDtos = ConverterDTO.convert(employeeNames, EmployeeNamesDto.class);
for (EmployeeNamesDto e : employeeNamesDtos) {
System.out.print(e);
}
break;
default:
break;
}
}
scanner.close();
}
private void printMenu() {
System.out.println("e = Exit");
System.out.println("0 = Initialize Database");
System.out.println("1 = Create EmployeeDto From Employee and again in Employee (check data trace)");
System.out.println("2 = Print ManagerDto and managed employeeDto list");
System.out.println("3 = Print employeeWithManagerDto List");
System.out.println("4 = Fetch names of employees and convert them in DTOs");
}
public void initializeDatabase() throws ParseException {
sdf.applyPattern("dd MM yyyy");
City city1 = new City("Sofia");
City city2 = new City("Varna");
City city3 = new City("Lom");
Address address1 = new Address("1 Sofiiska Str.", city1);
Address address2 = new Address("1 Varnenska Str.", city2);
Address address3 = new Address("1 Lomska Str.", city3);
Address address4 = new Address("2 Varnenska Str.", city2);
Address address5 = new Address("2 Sofiiska Str.", city1);
Address address6 = new Address("2 Lomska Str.", city3);
Employee employee1 = new Employee("Gosho", "Goshansky", new BigDecimal("1024.24"), sdf.parse("01 01 1981"), address1);
Employee employee2 = new Employee("Pesho", "Peshansky", new BigDecimal("500.24"), sdf.parse("11 02 1982"), address2);
Employee employee3 = new Employee("Tosho", "Toshansky", new BigDecimal("300.24"), sdf.parse("21 03 1983"), address3);
Employee employee4 = new Employee("Goshka", "Goshanska", new BigDecimal("1023.24"), sdf.parse("01 01 1984"), address1);
Employee employee5 = new Employee("Pesha", "Peshanska", new BigDecimal("508.24"), sdf.parse("11 02 1985"), address2);
Employee employee6 = new Employee("Rasho", "Rashovsky", new BigDecimal("558.74"), sdf.parse("11 02 1995"), address4);
Employee employee7 = new Employee("Gesho", "Geshansky", new BigDecimal("548.24"), sdf.parse("11 02 1986"), address5);
Employee employee8 = new Employee("Masha", "Mashanska", new BigDecimal("638.84"), sdf.parse("19 03 1985"), address6);
Employee employee9 = new Employee("Chasha", "Chashanska", new BigDecimal("908.25"), sdf.parse("21 11 1996"), address6);
Employee employee10 = new Employee("Vesha", "Geshanska", new BigDecimal("708.44"), sdf.parse("11 02 1985"), address5);
employee2.setManager(employee1);
employee3.setManager(employee1);
employee4.setManager(employee2);
employee5.setManager(employee2);
employee6.setManager(employee3);
employee7.setManager(employee4);
employee8.setManager(employee2);
employee9.setManager(employee2);
employee10.setManager(employee1);
Set<Employee> managedEmployees1 = new HashSet<>();
managedEmployees1.add(employee2);
managedEmployees1.add(employee3);
managedEmployees1.add(employee10);
employee1.setManagedEmployees(managedEmployees1);
employee2.addManagedEmployee(employee4);
employee2.addManagedEmployee(employee5);
employee2.addManagedEmployee(employee8);
employee2.addManagedEmployee(employee9);
employee3.addManagedEmployee(employee6);
employee4.addManagedEmployee(employee7);
List<Employee> employees = new ArrayList<>();
employees.add(employee1);
employees.add(employee2);
employees.add(employee3);
employees.add(employee4);
employees.add(employee5);
employees.add(employee6);
employees.add(employee7);
employees.add(employee8);
employees.add(employee9);
employees.add(employee10);
employeeService.save(employees);
}
}
| |
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.sql.dml;
import java.sql.*;
import java.util.*;
import java.util.logging.Logger;
import java.util.function.Supplier;
import org.jetbrains.annotations.Nullable;
import com.querydsl.core.util.CollectionUtils;
import com.querydsl.core.*;
import com.querydsl.core.QueryFlag.Position;
import com.querydsl.core.dml.StoreClause;
import com.querydsl.core.types.*;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.util.ResultSetAdapter;
import com.querydsl.sql.*;
import com.querydsl.sql.types.Null;
/**
* {@code SQLMergeClause} defines an MERGE INTO clause
*
* @author tiwe
*
*/
public class SQLMergeClause extends AbstractSQLClause<SQLMergeClause> implements StoreClause<SQLMergeClause> {
protected static final Logger logger = Logger.getLogger(SQLMergeClause.class.getName());
protected final List<Path<?>> columns = new ArrayList<Path<?>>();
protected final RelationalPath<?> entity;
protected final QueryMetadata metadata = new DefaultQueryMetadata();
protected final List<Path<?>> keys = new ArrayList<Path<?>>();
@Nullable
protected SubQueryExpression<?> subQuery;
protected final List<SQLMergeBatch> batches = new ArrayList<SQLMergeBatch>();
protected final List<Expression<?>> values = new ArrayList<Expression<?>>();
protected transient String queryString;
protected transient List<Object> constants;
public SQLMergeClause(Connection connection, SQLTemplates templates, RelationalPath<?> entity) {
this(connection, new Configuration(templates), entity);
}
public SQLMergeClause(Connection connection, Configuration configuration, RelationalPath<?> entity) {
super(configuration, connection);
this.entity = entity;
metadata.addJoin(JoinType.DEFAULT, entity);
}
public SQLMergeClause(Supplier<Connection> connection, Configuration configuration, RelationalPath<?> entity) {
super(configuration, connection);
this.entity = entity;
metadata.addJoin(JoinType.DEFAULT, entity);
}
/**
* Add the given String literal at the given position as a query flag
*
* @param position position
* @param flag query flag
* @return the current object
*/
public SQLMergeClause addFlag(Position position, String flag) {
metadata.addFlag(new QueryFlag(position, flag));
return this;
}
/**
* Add the given Expression at the given position as a query flag
*
* @param position position
* @param flag query flag
* @return the current object
*/
public SQLMergeClause addFlag(Position position, Expression<?> flag) {
metadata.addFlag(new QueryFlag(position, flag));
return this;
}
protected List<? extends Path<?>> getKeys() {
if (!keys.isEmpty()) {
return keys;
} else if (entity.getPrimaryKey() != null) {
return entity.getPrimaryKey().getLocalColumns();
} else {
throw new IllegalStateException("No keys were defined, invoke keys(..) to add keys");
}
}
/**
* Add the current state of bindings as a batch item
*
* @return the current object
*/
public SQLMergeClause addBatch() {
if (!configuration.getTemplates().isNativeMerge()) {
throw new IllegalStateException("batch only supported for databases that support native merge");
}
batches.add(new SQLMergeBatch(keys, columns, values, subQuery));
columns.clear();
values.clear();
keys.clear();
subQuery = null;
return this;
}
@Override
public void clear() {
batches.clear();
columns.clear();
values.clear();
keys.clear();
subQuery = null;
}
public SQLMergeClause columns(Path<?>... columns) {
this.columns.addAll(Arrays.asList(columns));
return this;
}
/**
* Execute the clause and return the generated key with the type of the given path.
* If no rows were created, null is returned, otherwise the key of the first row is returned.
*
* @param <T>
* @param path path for key
* @return generated key
*/
@SuppressWarnings("unchecked")
@Nullable
public <T> T executeWithKey(Path<T> path) {
return executeWithKey((Class<T>) path.getType(), path);
}
/**
* Execute the clause and return the generated key cast to the given type.
* If no rows were created, null is returned, otherwise the key of the first row is returned.
*
* @param <T>
* @param type type of key
* @return generated key
*/
public <T> T executeWithKey(Class<T> type) {
return executeWithKey(type, null);
}
protected <T> T executeWithKey(Class<T> type, @Nullable Path<T> path) {
ResultSet rs = executeWithKeys();
try {
if (rs.next()) {
return configuration.get(rs, path, 1, type);
} else {
return null;
}
} catch (SQLException e) {
throw configuration.translate(e);
} finally {
close(rs);
}
}
/**
* Execute the clause and return the generated key with the type of the given path.
* If no rows were created, or the referenced column is not a generated key, null is returned.
* Otherwise, the key of the first row is returned.
*
* @param <T>
* @param path path for key
* @return generated keys
*/
@SuppressWarnings("unchecked")
public <T> List<T> executeWithKeys(Path<T> path) {
return executeWithKeys((Class<T>) path.getType(), path);
}
public <T> List<T> executeWithKeys(Class<T> type) {
return executeWithKeys(type, null);
}
protected <T> List<T> executeWithKeys(Class<T> type, @Nullable Path<T> path) {
ResultSet rs = null;
try {
rs = executeWithKeys();
List<T> rv = new ArrayList<T>();
while (rs.next()) {
rv.add(configuration.get(rs, path, 1, type));
}
return rv;
} catch (SQLException e) {
throw configuration.translate(e);
} finally {
if (rs != null) {
close(rs);
}
reset();
}
}
/**
* Execute the clause and return the generated keys as a ResultSet
*
* @return result set with generated keys
*/
public ResultSet executeWithKeys() {
context = startContext(connection(), metadata, entity);
try {
if (configuration.getTemplates().isNativeMerge()) {
PreparedStatement stmt = null;
if (batches.isEmpty()) {
stmt = createStatement(true);
listeners.notifyMerge(entity, metadata, keys, columns, values, subQuery);
listeners.preExecute(context);
stmt.executeUpdate();
listeners.executed(context);
} else {
Collection<PreparedStatement> stmts = createStatements(true);
if (stmts != null && stmts.size() > 1) {
throw new IllegalStateException("executeWithKeys called with batch statement and multiple SQL strings");
}
stmt = stmts.iterator().next();
listeners.notifyMerges(entity, metadata, batches);
listeners.preExecute(context);
stmt.executeBatch();
listeners.executed(context);
}
final Statement stmt2 = stmt;
ResultSet rs = stmt.getGeneratedKeys();
return new ResultSetAdapter(rs) {
@Override
public void close() throws SQLException {
try {
super.close();
} finally {
stmt2.close();
reset();
endContext(context);
}
}
};
} else {
if (hasRow()) {
// update
SQLUpdateClause update = new SQLUpdateClause(connection(), configuration, entity);
update.addListener(listeners);
populate(update);
addKeyConditions(update);
reset();
endContext(context);
return EmptyResultSet.DEFAULT;
} else {
// insert
SQLInsertClause insert = new SQLInsertClause(connection(), configuration, entity);
insert.addListener(listeners);
populate(insert);
return insert.executeWithKeys();
}
}
} catch (SQLException e) {
onException(context,e);
reset();
endContext(context);
throw configuration.translate(queryString, constants, e);
}
}
@Override
public long execute() {
if (configuration.getTemplates().isNativeMerge()) {
return executeNativeMerge();
} else {
return executeCompositeMerge();
}
}
@Override
public List<SQLBindings> getSQL() {
if (batches.isEmpty()) {
SQLSerializer serializer = createSerializer();
serializer.serializeMerge(metadata, entity, keys, columns, values, subQuery);
return Collections.singletonList(createBindings(metadata, serializer));
} else {
List<SQLBindings> builder = new ArrayList<>();
for (SQLMergeBatch batch : batches) {
SQLSerializer serializer = createSerializer();
serializer.serializeMerge(metadata, entity, batch.getKeys(), batch.getColumns(), batch.getValues(), batch.getSubQuery());
builder.add(createBindings(metadata, serializer));
}
return CollectionUtils.unmodifiableList(builder);
}
}
protected boolean hasRow() {
SQLQuery<?> query = new SQLQuery<Void>(connection(), configuration).from(entity);
for (SQLListener listener : listeners.getListeners()) {
query.addListener(listener);
}
query.addListener(SQLNoCloseListener.DEFAULT);
addKeyConditions(query);
return query.select(Expressions.ONE).fetchFirst() != null;
}
@SuppressWarnings("unchecked")
protected void addKeyConditions(FilteredClause<?> query) {
List<? extends Path<?>> keys = getKeys();
for (int i = 0; i < columns.size(); i++) {
if (keys.contains(columns.get(i))) {
if (values.get(i) instanceof NullExpression) {
query.where(ExpressionUtils.isNull(columns.get(i)));
} else {
query.where(ExpressionUtils.eq(columns.get(i),(Expression) values.get(i)));
}
}
}
}
@SuppressWarnings("unchecked")
protected long executeCompositeMerge() {
if (hasRow()) {
// update
SQLUpdateClause update = new SQLUpdateClause(connection(), configuration, entity);
populate(update);
addListeners(update);
addKeyConditions(update);
return update.execute();
} else {
// insert
SQLInsertClause insert = new SQLInsertClause(connection(), configuration, entity);
addListeners(insert);
populate(insert);
return insert.execute();
}
}
protected void addListeners(AbstractSQLClause<?> clause) {
for (SQLListener listener : listeners.getListeners()) {
clause.addListener(listener);
}
}
@SuppressWarnings("unchecked")
protected void populate(StoreClause<?> clause) {
for (int i = 0; i < columns.size(); i++) {
clause.set((Path) columns.get(i), (Object) values.get(i));
}
}
protected PreparedStatement createStatement(boolean withKeys) throws SQLException {
boolean addBatches = !configuration.getUseLiterals();
listeners.preRender(context);
SQLSerializer serializer = createSerializer();
PreparedStatement stmt = null;
if (batches.isEmpty()) {
serializer.serializeMerge(metadata, entity, keys, columns, values, subQuery);
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
listeners.prePrepare(context);
stmt = prepareStatementAndSetParameters(serializer, withKeys);
context.addPreparedStatement(stmt);
listeners.prepared(context);
} else {
serializer.serializeMerge(metadata, entity,
batches.get(0).getKeys(), batches.get(0).getColumns(),
batches.get(0).getValues(), batches.get(0).getSubQuery());
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
stmt = prepareStatementAndSetParameters(serializer, withKeys);
// add first batch
if (addBatches) {
stmt.addBatch();
}
// add other batches
for (int i = 1; i < batches.size(); i++) {
SQLMergeBatch batch = batches.get(i);
listeners.preRender(context);
serializer = createSerializer();
serializer.serializeMerge(metadata, entity, batch.getKeys(), batch.getColumns(), batch.getValues(), batch.getSubQuery());
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
setParameters(stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
if (addBatches) {
stmt.addBatch();
}
}
}
return stmt;
}
protected Collection<PreparedStatement> createStatements(boolean withKeys) throws SQLException {
boolean addBatches = !configuration.getUseLiterals();
Map<String, PreparedStatement> stmts = new HashMap<>();
// add first batch
listeners.preRender(context);
SQLSerializer serializer = createSerializer();
serializer.serializeMerge(metadata, entity,
batches.get(0).getKeys(), batches.get(0).getColumns(),
batches.get(0).getValues(), batches.get(0).getSubQuery());
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
PreparedStatement stmt = prepareStatementAndSetParameters(serializer, withKeys);
stmts.put(serializer.toString(), stmt);
if (addBatches) {
stmt.addBatch();
}
// add other batches
for (int i = 1; i < batches.size(); i++) {
SQLMergeBatch batch = batches.get(i);
serializer = createSerializer();
serializer.serializeMerge(metadata, entity,
batch.getKeys(), batch.getColumns(), batch.getValues(), batch.getSubQuery());
stmt = stmts.get(serializer.toString());
if (stmt == null) {
stmt = prepareStatementAndSetParameters(serializer, withKeys);
stmts.put(serializer.toString(), stmt);
} else {
setParameters(stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
}
if (addBatches) {
stmt.addBatch();
}
}
return stmts.values();
}
protected PreparedStatement prepareStatementAndSetParameters(SQLSerializer serializer,
boolean withKeys) throws SQLException {
listeners.prePrepare(context);
queryString = serializer.toString();
constants = serializer.getConstants();
logQuery(logger, queryString, constants);
PreparedStatement stmt;
if (withKeys) {
String[] target = new String[keys.size()];
for (int i = 0; i < target.length; i++) {
target[i] = ColumnMetadata.getName(getKeys().get(i));
}
stmt = connection().prepareStatement(queryString, target);
} else {
stmt = connection().prepareStatement(queryString);
}
setParameters(stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
context.addPreparedStatement(stmt);
listeners.prepared(context);
return stmt;
}
protected long executeNativeMerge() {
context = startContext(connection(), metadata, entity);
PreparedStatement stmt = null;
Collection<PreparedStatement> stmts = null;
try {
if (batches.isEmpty()) {
stmt = createStatement(false);
listeners.notifyMerge(entity, metadata, keys, columns, values, subQuery);
listeners.preExecute(context);
int rc = stmt.executeUpdate();
listeners.executed(context);
return rc;
} else {
stmts = createStatements(false);
listeners.notifyMerges(entity, metadata, batches);
listeners.preExecute(context);
long rc = executeBatch(stmts);
listeners.executed(context);
return rc;
}
} catch (SQLException e) {
onException(context,e);
throw configuration.translate(queryString, constants, e);
} finally {
if (stmt != null) {
close(stmt);
}
if (stmts != null) {
close(stmts);
}
reset();
endContext(context);
}
}
/**
* Set the keys to be used in the MERGE clause
*
* @param paths keys
* @return the current object
*/
public SQLMergeClause keys(Path<?>... paths) {
keys.addAll(Arrays.asList(paths));
return this;
}
public SQLMergeClause select(SubQueryExpression<?> subQuery) {
this.subQuery = subQuery;
return this;
}
@Override
public <T> SQLMergeClause set(Path<T> path, @Nullable T value) {
columns.add(path);
if (value != null) {
values.add(ConstantImpl.create(value));
} else {
values.add(Null.CONSTANT);
}
return this;
}
@Override
public <T> SQLMergeClause set(Path<T> path, Expression<? extends T> expression) {
columns.add(path);
values.add(expression);
return this;
}
@Override
public <T> SQLMergeClause setNull(Path<T> path) {
columns.add(path);
values.add(Null.CONSTANT);
return this;
}
@Override
public String toString() {
SQLSerializer serializer = createSerializer();
serializer.serializeMerge(metadata, entity, keys, columns, values, subQuery);
return serializer.toString();
}
public SQLMergeClause values(Object... v) {
for (Object value : v) {
if (value instanceof Expression<?>) {
values.add((Expression<?>) value);
} else if (value != null) {
values.add(ConstantImpl.create(value));
} else {
values.add(Null.CONSTANT);
}
}
return this;
}
@Override
public boolean isEmpty() {
return values.isEmpty() && batches.isEmpty();
}
@Override
public int getBatchCount() {
return batches.size();
}
}
| |
package microbrowser;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.logging.Logger;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import microbrowser.ui.AskQuestionDialog;
import microbrowser.ui.CreatePatternDialog;
import microbrowser.ui.DiscussionDetailsPane;
import microbrowser.ui.DiscussionOverviewByPatternPane;
import microbrowser.ui.DiscussionOverviewBySimilarityPane;
import microbrowser.ui.DiscussionOverviewPane;
import microbrowser.ui.LeaderBoardDialog;
import microbrowser.ui.LegendPanel;
import microbrowser.ui.PatternsDetailsPane;
import microbrowser.util.TraceService;
import prefuse.data.Node;
import prefuse.util.ui.UILib;
public class MicroBrowserApplication extends JPanel {
private static final long serialVersionUID = -2310029621999732548L;
private static Logger logger = Logger.getLogger(MicroBrowserApplication.class.getName());
private static MicroBrowserApplication _instance = null;
private static JFrame _frame = null;
public JMenuBar menubar = new JMenuBar();
public JTabbedPane tabbedPane = new JTabbedPane();
public DiscussionOverviewPane discussionOverviewPane;
public MicroBrowserApplication() {
super();
_instance = this;
Logger.getLogger("prefuse").setLevel(VisualDBConfig.LOGGING_LEVEL_PREFUSE);
Logger.getLogger("microbrowser").setLevel(VisualDBConfig.LOGGING_LEVEL_MICROPROBE);
}
public static void main(String[] argv)
{
TraceService.log(TraceService.EVENT_APPLICATION_START);
// Prompt for options
// 1 - prompt for experiment mode
// Object[] possibilities = {
// VisualDBConstants.EXPERIMENT_MODE_PATTERN_LEADERBOARD,
// VisualDBConstants.EXPERIMENT_MODE_PATTERN_ONLY,
// VisualDBConstants.EXPERIMENT_MODE_LEADERBOARD_ONLY,
// VisualDBConstants.EXPERIMENT_MODE_NO_CONDITION};
Object[] possibilities = {"S01", "S02", "S03", "S04", "S05", "S06", "S07", "S08", "S09", "S10", "S11", "S12", ""};
// initialize the experiment mode
//VisualDBConfig.EXPERIMENT_MODE = (String)JOptionPane.showInputDialog(
VisualDBConfig.EXPERIMENT_SUBJECT = (String)JOptionPane.showInputDialog(
null,
"Select the experiment mode:",
"Experiment Mode Selection",
JOptionPane.PLAIN_MESSAGE,
null,
possibilities,
"ham");
_frame = new JFrame("MicroBrowser - A visual browser of discussion threads");
_frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
final MicroBrowserApplication panel = createVisualization();
//frame.setSize(new Dimension(VisualDBConfig.WINDOW_SIZE_WIDTH, VisualDBConfig.WINDOW_SIZE_HEIGHT));
_frame.setContentPane(panel);
_frame.setJMenuBar(panel.menubar);
_frame.setResizable(true);
_frame.addComponentListener(new ComponentAdapter() {
public void componentResized(java.awt.event.ComponentEvent evt){
JFrame f = (JFrame)evt.getSource();
logger.info("Window size=" + f.getSize());
VisualDBConfig.DISPLAY_SIZE_WIDTH = f.getWidth() > 480 ? f.getWidth() - 480 : 200;
VisualDBConfig.DISPLAY_SIZE_HEIGHT = f.getHeight() > 400 ? f.getHeight() - 175 : 50;
// VisualDBConfig.DISPLAY_SIZE_WIDTH = f.getWidth() > 480 ? f.getWidth() - 500 : 600;
// VisualDBConfig.DISPLAY_SIZE_HEIGHT = f.getHeight() > 400 ? f.getHeight() - 300 : 400;
}
});
_frame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e)
{
e.getWindow().dispose();
TraceService.log(TraceService.EVENT_APPLICATION_STOP);
}
});
_frame.pack();
_frame.setVisible(true);
_frame.setLocationRelativeTo(null);
_frame.setExtendedState(java.awt.Frame.MAXIMIZED_BOTH);
}
public void setupUI() {
this.setLayout(new BorderLayout());
// create center panel
if ( VisualDBConfig.DISPLAY_MODE.equals(VisualDBConstants.DISPLAY_MODE_PATTERN)) {
this.discussionOverviewPane = DiscussionOverviewByPatternPane.demo(this);
} else {
this.discussionOverviewPane = DiscussionOverviewBySimilarityPane.demo(this);
}
tabbedPane.addTab("Overview", this.discussionOverviewPane);
// create main window layout
this.add(BorderLayout.NORTH, UILib.getBox(new Component[] { new LegendPanel()}, true, 0, 10, 25));
this.add(BorderLayout.CENTER, tabbedPane); // place the visualization in the center
JMenu fileMenu = new JMenu("File");
JMenu createMenu = new JMenu("Create");
JMenu viewMenu = new JMenu("View");
JMenuItem fileMenu_exit = new JMenuItem("Exit");
fileMenu_exit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
System.exit(0);
}
});
fileMenu.add(fileMenu_exit);
JMenuItem askQuestionActionButton = new JMenuItem("New Question");
createMenu.add(askQuestionActionButton);
final AskQuestionDialog askQuestionDialog = new AskQuestionDialog((JFrame) this.getParent(), discussionOverviewPane.m_vis, discussionOverviewPane.m_graph);
askQuestionDialog.addWindowListener(new WindowAdapter() {
@Override
public void windowDeactivated(WindowEvent e) {
TraceService.log(TraceService.EVENT_QUESTION_CREATE_CLOSE);
}
});
askQuestionActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
TraceService.log(TraceService.EVENT_QUESTION_CREATE_OPEN);
askQuestionDialog.reset();
askQuestionDialog.setVisible(true);
// reload the graph by reopening the panel
//openDiscussionOverview();
discussionOverviewPane.m_vis.run("filter");
discussionOverviewPane.m_vis.run("updateList");
}
});
JMenuItem createPatternActionButton = new JMenuItem("New Pattern");
final CreatePatternDialog createPatternDialog = new CreatePatternDialog((JFrame) this.getParent(), discussionOverviewPane.m_vis, discussionOverviewPane.m_graph);
createPatternDialog.addWindowListener(new WindowAdapter() {
@Override
public void windowDeactivated(WindowEvent e) {
TraceService.log(TraceService.EVENT_PATTERN_CREATE_CLOSE);
// reload the graph by reopening the panel
//openDiscussionOverview();
}
});
createPatternDialog.addPropertyChangeListener("patternName", new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
System.out.println("complete");
TraceService.log("TESTING");
openDiscussionOverview();
}
});
createPatternActionButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e) {
TraceService.log(TraceService.EVENT_PATTERN_CREATE_OPEN);
createPatternDialog.reset();
createPatternDialog.setVisible(true);
}
});
JMenuItem viewLeaderboardActionButton = new JMenuItem("Leaderboard");
final LeaderBoardDialog leaderBoardDialog = new LeaderBoardDialog((JFrame) this.getParent());
leaderBoardDialog.addWindowListener(new WindowAdapter() {
@Override
public void windowDeactivated(WindowEvent e) {
TraceService.log(TraceService.EVENT_LEADERBOARD_CLOSE);
}
});
viewLeaderboardActionButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e) {
leaderBoardDialog.updateRankings(/*userid*/ 893);
leaderBoardDialog.setVisible(true);
TraceService.log(TraceService.EVENT_LEADERBOARD_OPEN);
}
});
JMenuItem viewRadialLayoutActionButton = new JMenuItem("Discussions by pattern");
viewRadialLayoutActionButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
logger.info("Display discussions by pattern");
VisualDBConfig.DISPLAY_MODE = VisualDBConstants.DISPLAY_MODE_PATTERN;
_instance.discussionOverviewPane = null;
_instance.discussionOverviewPane = DiscussionOverviewByPatternPane.demo(_instance);
tabbedPane.setComponentAt(0, _instance.discussionOverviewPane);
_frame.pack();
}
});
JMenuItem viewSimilarityLayoutActionButton = new JMenuItem("Discussions by similarity");
viewSimilarityLayoutActionButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
logger.info("Display discussions by similarity");
VisualDBConfig.DISPLAY_MODE = VisualDBConstants.DISPLAY_MODE_SIMILARITY;
_instance.discussionOverviewPane = null;
_instance.discussionOverviewPane = DiscussionOverviewBySimilarityPane.demo(_instance);
tabbedPane.setComponentAt(0, _instance.discussionOverviewPane);
_frame.pack();
}
});
// build the menubar
menubar.add(fileMenu);
menubar.add(createMenu);
if ( VisualDBConfig.EXPERIMENT_MODE == VisualDBConstants.EXPERIMENT_MODE_LEADERBOARD_ONLY ||
VisualDBConfig.EXPERIMENT_MODE == VisualDBConstants.EXPERIMENT_MODE_PATTERN_LEADERBOARD) {
viewMenu.add(viewRadialLayoutActionButton);
viewMenu.add(viewSimilarityLayoutActionButton);
viewMenu.add(viewLeaderboardActionButton);
menubar.add(viewMenu);
}
if ( VisualDBConfig.EXPERIMENT_MODE == VisualDBConstants.EXPERIMENT_MODE_PATTERN_ONLY ||
VisualDBConfig.EXPERIMENT_MODE == VisualDBConstants.EXPERIMENT_MODE_PATTERN_LEADERBOARD) {
createMenu.add(createPatternActionButton);
}
return;
}
public void openDiscussionDetails(Node item) {
if ( tabbedPane.getTabCount() > 1 ) {
tabbedPane.remove(1);
}
int type = item.getInt("type");
String event = "";
switch ( type ) {
case VisualDBConstants.NODE_TYPE_DISCUSSION:
event = TraceService.EVENT_DISCUSSION_OPEN;
break;
case VisualDBConstants.NODE_TYPE_PATTERN:
event = TraceService.EVENT_PATTERN_OPEN;
break;
}
TraceService.log(event, item.getString("id"));
if ( VisualDBConstants.NODE_TYPE_PATTERN == type ) {
tabbedPane.addTab(item.getString("title"), PatternsDetailsPane.demo(this,item.getInt("id")));
} else {
tabbedPane.addTab(item.getString("title"), DiscussionDetailsPane.demo(this,item.getInt("id")));
}
tabbedPane.setSelectedIndex(tabbedPane.getTabCount()-1);
return;
}
public void openDiscussionOverview() {
openDiscussionOverview(true);
}
public void openDiscussionOverview(boolean showOverview) {
tabbedPane.remove(0);
if ( VisualDBConfig.DISPLAY_MODE.equals(VisualDBConstants.DISPLAY_MODE_PATTERN)) {
this.discussionOverviewPane = DiscussionOverviewByPatternPane.demo(this);
} else {
this.discussionOverviewPane = DiscussionOverviewBySimilarityPane.demo(this);
}
tabbedPane.insertTab("Overview", null, this.discussionOverviewPane , "", 0);
if ( showOverview ) {
tabbedPane.setSelectedIndex(0);
}
}
/**********************************************
* Initialize the demo so it can be tested
* from stand alone or embedded in Applet
*
* @return VisualDBPanel object with visualization information
*/
public static MicroBrowserApplication createVisualization() {
MicroBrowserApplication visualDBPanel = new MicroBrowserApplication();
visualDBPanel.setupUI();
return visualDBPanel;
}
public void run() {
// this.setUpData();
// this.setUpVisualization();
// this.setUpRenderers();
// this.setUpActions();
// this.setUpDisplay();
// launch the visualization -------------------------------------
// The following is standard java.awt.
// A JFrame is the basic window element in awt.
// It has a menu (minimize, maximize, close) and can hold
// other gui elements.
// Create a new window to hold the visualization.
// We pass the text value to be displayed in the menubar to the constructor.
JFrame frame = new JFrame("prefuse example");
// Prepares the window.
frame.pack();
// Shows the window.
frame.setVisible(true);
// We have to start the ActionLists that we added to the visualization
// this.m_vis.run("draw");
}
}
| |
/*
* Copyright (c) 2019 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.prism.impl.item;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.xml.namespace.QName;
import com.evolveum.midpoint.prism.CloneStrategy;
import com.evolveum.midpoint.prism.ConsistencyCheckScope;
import com.evolveum.midpoint.prism.Item;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.Itemable;
import com.evolveum.midpoint.prism.PartiallyResolvedItem;
import com.evolveum.midpoint.prism.PrismContainerValue;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismProperty;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.PrismValue;
import com.evolveum.midpoint.prism.Visitor;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.PropertyDelta;
import com.evolveum.midpoint.prism.equivalence.EquivalenceStrategy;
import com.evolveum.midpoint.prism.equivalence.ParameterizedEquivalenceStrategy;
import com.evolveum.midpoint.prism.path.ItemName;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.util.exception.SchemaException;
import org.jetbrains.annotations.NotNull;
/**
* @author semancik
*
*/
public class DummyPropertyImpl<T> implements PrismProperty<T> {
private static final long serialVersionUID = 1L;
@NotNull private final ItemPath path;
private final PrismProperty<T> realProperty;
public DummyPropertyImpl(PrismProperty<T> realProperty, @NotNull ItemPath path) {
super();
this.path = path;
this.realProperty = realProperty;
}
public void accept(Visitor visitor) {
realProperty.accept(visitor);
}
public void accept(Visitor visitor, ItemPath path, boolean recursive) {
realProperty.accept(visitor, path, recursive);
}
public PrismPropertyDefinition<T> getDefinition() {
return realProperty.getDefinition();
}
public boolean hasCompleteDefinition() {
return realProperty.hasCompleteDefinition();
}
public void setDefinition(PrismPropertyDefinition<T> definition) {
realProperty.setDefinition(definition);
}
public ItemName getElementName() {
return realProperty.getElementName();
}
public <X> List<PrismPropertyValue<X>> getValues(Class<X> type) {
return realProperty.getValues(type);
}
public PrismPropertyValue<T> getValue() {
return realProperty.getValue();
}
@NotNull
public Collection<T> getRealValues() {
return realProperty.getRealValues();
}
public void setElementName(QName elementName) {
realProperty.setElementName(elementName);
}
public <X> Collection<X> getRealValues(Class<X> type) {
return realProperty.getRealValues(type);
}
public T getAnyRealValue() {
return realProperty.getAnyRealValue();
}
public T getRealValue() {
return realProperty.getRealValue();
}
public <X> X getRealValue(Class<X> type) {
return realProperty.getRealValue(type);
}
public <X> X[] getRealValuesArray(Class<X> type) {
return realProperty.getRealValuesArray(type);
}
public <X> PrismPropertyValue<X> getValue(Class<X> type) {
return realProperty.getValue(type);
}
public void setValue(PrismPropertyValue<T> value) {
realProperty.setValue(value);
}
public String getDisplayName() {
return realProperty.getDisplayName();
}
public void setRealValue(T realValue) {
realProperty.setRealValue(realValue);
}
public void setRealValues(T... realValues) {
realProperty.setRealValues(realValues);
}
public void addValues(Collection<PrismPropertyValue<T>> pValuesToAdd) {
realProperty.addValues(pValuesToAdd);
}
public void addValue(PrismPropertyValue<T> pValueToAdd) {
realProperty.addValue(pValueToAdd);
}
public void addRealValue(T valueToAdd) {
realProperty.addRealValue(valueToAdd);
}
@Override
public void addRealValueSkipUniquenessCheck(T valueToAdd) {
realProperty.addRealValueSkipUniquenessCheck(valueToAdd);
}
public void addRealValues(T... valuesToAdd) {
realProperty.addRealValues(valuesToAdd);
}
public String getHelp() {
return realProperty.getHelp();
}
public boolean deleteValues(Collection<PrismPropertyValue<T>> pValuesToDelete) {
return realProperty.deleteValues(pValuesToDelete);
}
public boolean deleteValue(PrismPropertyValue<T> pValueToDelete) {
return realProperty.deleteValue(pValueToDelete);
}
public void replaceValues(Collection<PrismPropertyValue<T>> valuesToReplace) {
realProperty.replaceValues(valuesToReplace);
}
public boolean hasRealValue(PrismPropertyValue<T> value) {
return realProperty.hasRealValue(value);
}
public Class<T> getValueClass() {
return realProperty.getValueClass();
}
public PropertyDelta<T> createDelta() {
return realProperty.createDelta();
}
public PropertyDelta<T> createDelta(ItemPath path) {
return realProperty.createDelta(path);
}
public boolean isIncomplete() {
return realProperty.isIncomplete();
}
public Object find(ItemPath path) {
return realProperty.find(path);
}
public <IV extends PrismValue, ID extends ItemDefinition> PartiallyResolvedItem<IV, ID> findPartial(
ItemPath path) {
return realProperty.findPartial(path);
}
public PropertyDelta<T> diff(PrismProperty<T> other) {
return realProperty.diff(other);
}
public PropertyDelta<T> diff(PrismProperty<T> other, ParameterizedEquivalenceStrategy strategy) {
return realProperty.diff(other, strategy);
}
public PrismProperty<T> clone() {
return realProperty.clone();
}
public PrismProperty<T> cloneComplex(CloneStrategy strategy) {
return realProperty.cloneComplex(strategy);
}
public String toString() {
return "Dummy" + realProperty.toString();
}
public String debugDump(int indent) {
return realProperty.debugDump(indent);
}
public String toHumanReadableString() {
return realProperty.toHumanReadableString();
}
public void setIncomplete(boolean incomplete) {
realProperty.setIncomplete(incomplete);
}
public PrismContainerValue<?> getParent() {
throw new UnsupportedOperationException();
}
public void setParent(PrismContainerValue<?> parentValue) {
throw new UnsupportedOperationException();
}
@NotNull
public ItemPath getPath() {
return path;
}
@NotNull
public Map<String, Object> getUserData() {
return realProperty.getUserData();
}
public <T> T getUserData(String key) {
return realProperty.getUserData(key);
}
public void setUserData(String key, Object value) {
realProperty.setUserData(key, value);
}
@NotNull
public List<PrismPropertyValue<T>> getValues() {
return realProperty.getValues();
}
public int size() {
return realProperty.size();
}
public PrismPropertyValue<T> getAnyValue() {
return realProperty.getAnyValue();
}
public boolean isSingleValue() {
return realProperty.isSingleValue();
}
public boolean add(@NotNull PrismPropertyValue<T> newValue, boolean checkUniqueness) throws SchemaException {
return realProperty.add(newValue, checkUniqueness);
}
public boolean add(@NotNull PrismPropertyValue<T> newValue) throws SchemaException {
return realProperty.add(newValue);
}
public boolean add(@NotNull PrismPropertyValue<T> newValue, @NotNull EquivalenceStrategy equivalenceStrategy)
throws SchemaException {
return realProperty.add(newValue, equivalenceStrategy);
}
public boolean addAll(Collection<PrismPropertyValue<T>> newValues) throws SchemaException {
return realProperty.addAll(newValues);
}
public boolean addAll(Collection<PrismPropertyValue<T>> newValues, EquivalenceStrategy strategy)
throws SchemaException {
return realProperty.addAll(newValues, strategy);
}
@Override
public boolean addAll(Collection<PrismPropertyValue<T>> newValues, boolean checkUniqueness, EquivalenceStrategy strategy)
throws SchemaException {
return realProperty.addAll(newValues, checkUniqueness, strategy);
}
public boolean remove(PrismPropertyValue<T> value) {
return realProperty.remove(value);
}
public boolean remove(PrismPropertyValue<T> value, @NotNull EquivalenceStrategy strategy) {
return realProperty.remove(value, strategy);
}
public boolean removeAll(Collection<PrismPropertyValue<T>> values) {
return realProperty.removeAll(values);
}
public void clear() {
realProperty.clear();
}
public void replaceAll(Collection<PrismPropertyValue<T>> newValues, EquivalenceStrategy strategy)
throws SchemaException {
realProperty.replaceAll(newValues, strategy);
}
public void replace(PrismPropertyValue<T> newValue) throws SchemaException {
realProperty.replace(newValue);
}
public boolean equals(Object obj) {
return realProperty.equals(obj);
}
public boolean equals(Object obj, @NotNull EquivalenceStrategy equivalenceStrategy) {
return realProperty.equals(obj, equivalenceStrategy);
}
public boolean equals(Object obj, @NotNull ParameterizedEquivalenceStrategy equivalenceStrategy) {
return realProperty.equals(obj, equivalenceStrategy);
}
public int hashCode() {
return realProperty.hashCode();
}
public int hashCode(@NotNull EquivalenceStrategy equivalenceStrategy) {
return realProperty.hashCode(equivalenceStrategy);
}
public int hashCode(@NotNull ParameterizedEquivalenceStrategy equivalenceStrategy) {
return realProperty.hashCode(equivalenceStrategy);
}
public boolean contains(PrismPropertyValue<T> value) {
return realProperty.contains(value);
}
public boolean contains(PrismPropertyValue<T> value, @NotNull EquivalenceStrategy strategy) {
return realProperty.contains(value, strategy);
}
public boolean contains(PrismPropertyValue<T> value, EquivalenceStrategy strategy,
Comparator<PrismPropertyValue<T>> comparator) {
return realProperty.contains(value, strategy, comparator);
}
public boolean containsEquivalentValue(PrismPropertyValue<T> value) {
return realProperty.containsEquivalentValue(value);
}
public boolean containsEquivalentValue(PrismPropertyValue<T> value,
Comparator<PrismPropertyValue<T>> comparator) {
return realProperty.containsEquivalentValue(value, comparator);
}
public PrismPropertyValue<T> findValue(PrismPropertyValue<T> value, @NotNull EquivalenceStrategy strategy) {
return realProperty.findValue(value, strategy);
}
public boolean valuesEqual(Collection<PrismPropertyValue<T>> matchValues,
Comparator<PrismPropertyValue<T>> comparator) {
return realProperty.valuesEqual(matchValues, comparator);
}
public ItemDelta<PrismPropertyValue<T>, PrismPropertyDefinition<T>> diff(
Item<PrismPropertyValue<T>, PrismPropertyDefinition<T>> other) {
return realProperty.diff(other);
}
public ItemDelta<PrismPropertyValue<T>, PrismPropertyDefinition<T>> diff(
Item<PrismPropertyValue<T>, PrismPropertyDefinition<T>> other,
@NotNull ParameterizedEquivalenceStrategy strategy) {
return realProperty.diff(other, strategy);
}
public Collection<PrismPropertyValue<T>> getClonedValues() {
return realProperty.getClonedValues();
}
public void normalize() {
realProperty.normalize();
}
public void merge(Item<PrismPropertyValue<T>, PrismPropertyDefinition<T>> otherItem)
throws SchemaException {
realProperty.merge(otherItem);
}
public void acceptParentVisitor(@NotNull Visitor visitor) {
realProperty.acceptParentVisitor(visitor);
}
public void recomputeAllValues() {
realProperty.recomputeAllValues();
}
public void filterValues(Function<PrismPropertyValue<T>, Boolean> function) {
realProperty.filterValues(function);
}
public void applyDefinition(PrismPropertyDefinition<T> definition) throws SchemaException {
realProperty.applyDefinition(definition);
}
public void applyDefinition(PrismPropertyDefinition<T> definition, boolean force) throws SchemaException {
realProperty.applyDefinition(definition, force);
}
public void revive(PrismContext prismContext) throws SchemaException {
realProperty.revive(prismContext);
}
public void checkConsistence(boolean requireDefinitions, ConsistencyCheckScope scope) {
realProperty.checkConsistence(requireDefinitions, scope);
}
public void checkConsistence(boolean requireDefinitions, boolean prohibitRaw) {
realProperty.checkConsistence(requireDefinitions, prohibitRaw);
}
public void checkConsistence(boolean requireDefinitions, boolean prohibitRaw,
ConsistencyCheckScope scope) {
realProperty.checkConsistence(requireDefinitions, prohibitRaw, scope);
}
public void checkConsistence() {
realProperty.checkConsistence();
}
public void checkConsistence(ConsistencyCheckScope scope) {
realProperty.checkConsistence(scope);
}
public void checkConsistenceInternal(Itemable rootItem, boolean requireDefinitions, boolean prohibitRaw,
ConsistencyCheckScope scope) {
realProperty.checkConsistenceInternal(rootItem, requireDefinitions, prohibitRaw, scope);
}
public void assertDefinitions() throws SchemaException {
realProperty.assertDefinitions();
}
public void assertDefinitions(String sourceDescription) throws SchemaException {
realProperty.assertDefinitions(sourceDescription);
}
public void assertDefinitions(boolean tolerateRawValues, String sourceDescription)
throws SchemaException {
realProperty.assertDefinitions(tolerateRawValues, sourceDescription);
}
public boolean isRaw() {
return realProperty.isRaw();
}
public boolean hasRaw() {
return realProperty.hasRaw();
}
public boolean isEmpty() {
return realProperty.isEmpty();
}
public boolean hasNoValues() {
return realProperty.hasNoValues();
}
public boolean isOperational() {
return realProperty.isOperational();
}
public boolean isImmutable() {
return realProperty.isImmutable();
}
public void setImmutable(boolean immutable) {
realProperty.setImmutable(immutable);
}
public void checkImmutability() {
realProperty.checkImmutability();
}
public void modifyUnfrozen(Runnable mutator) {
realProperty.modifyUnfrozen(mutator);
}
public void modifyUnfrozen(Consumer<Item<PrismPropertyValue<T>, PrismPropertyDefinition<T>>> mutator) {
realProperty.modifyUnfrozen(mutator);
}
@NotNull
public Collection<PrismValue> getAllValues(ItemPath path) {
return realProperty.getAllValues(path);
}
public PrismContext getPrismContext() {
return realProperty.getPrismContext();
}
public PrismContext getPrismContextLocal() {
return realProperty.getPrismContextLocal();
}
public void setPrismContext(PrismContext prismContext) {
realProperty.setPrismContext(prismContext);
}
public Long getHighestId() {
return realProperty.getHighestId();
}
}
| |
package s3.entities;
//import java.awt.Rectangle;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import s3.base.S3;
import s3.base.S3Action;
public abstract class S3Entity
{
public int entityID;
public String owner;
public S3Entity(int iEntityID, String iOwner)
{
entityID = iEntityID;
owner = iOwner;
}
public S3Entity()
{
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public int getEntityID() {
return entityID;
}
public void setEntityID(int entityID) {
this.entityID = entityID;
}
public S3Entity( S3Entity incoming)
{
this.entityID = incoming.entityID;
this.owner = incoming.owner;
}
public abstract Object clone();
public boolean greater(S3Entity incoming)
{
boolean returnFlag = true;
//System.out.println(this.getClass());
//System.out.println(incoming.getClass());
try
{
if ( this.getClass().equals(incoming.getClass()) )
{
//System.out.println("Classes are equal: Can Compare");
Field[] fx = this.getClass().getFields();
for ( Field a : fx)
{
String fieldType = a.getType().toString();
//System.out.println(" ---->" + fieldType);
if ( fieldType.equals("class java.lang.String") )
//do nothing u MORON!!!
continue;
if ( fieldType.equals("boolean") )
//U tell me how to make true > false, and I'l give u the Nobel prize for PEACE!
continue;
if ( fieldType.equals("char") )
{
if ( a.getChar(this) <= a.getChar(incoming))
returnFlag = returnFlag && false;
continue;
}
//System.out.println("Is " + a.getDouble(this) + " > " + a.getDouble(incoming));
if ( a.getDouble(this) <= a.getDouble(incoming) )
{
//System.out.println("lesser than satisfied...");
returnFlag = returnFlag && false;
//still wanna run this thru, as after one touch here, you have to break out
//as it makes no sense in continuing on
//break;
}
}
}
return returnFlag;
}
catch ( Exception e )
{
System.out.println("Now you're screwed! " + e);
}
return true;
}
public boolean lesser(S3Entity incoming)
{
boolean returnFlag = true;
//System.out.println(this.getClass());
//System.out.println(incoming.getClass());
try
{
if ( this.getClass().equals(incoming.getClass()) )
{
//System.out.println("Classes are equal: Can Compare");
Field[] fx = this.getClass().getFields();
for ( Field a : fx)
{
String fieldType = a.getType().toString();
//System.out.println(" ---->" + fieldType);
if ( fieldType.equals("class java.lang.String") )
//do nothing u MORON!!!
continue;
if ( fieldType.equals("boolean") )
//U tell me how to make true > false, and I'l give u the Nobel prize for PEACE!
continue;
if ( fieldType.equals("char") )
{
if ( a.getChar(this) >= a.getChar(incoming))
returnFlag = returnFlag && false;
continue;
}
//System.out.println("Is " + a.getDouble(this) + " > " + a.getDouble(incoming));
if ( a.getDouble(this) >= a.getDouble(incoming) )
{
//System.out.println("lesser than satisfied...");
returnFlag = returnFlag && false;
//still wanna run this thru, as after one touch here, you have to break out
//as it makes no sense in continuing on
//break;
}
}
}
return returnFlag;
}
catch ( Exception e )
{
System.out.println("Now you're screwed! " + e);
}
return true;
}
public boolean equals(S3Entity incoming)
{
boolean returnFlag = true;
//System.out.println(this.getClass());
//System.out.println(incoming.getClass());
try
{
if ( this.getClass().equals(incoming.getClass()) )
{
//System.out.println("Classes are equal: Can Compare");
Field[] fx = this.getClass().getFields();
for ( Field a : fx)
{
String fieldType = a.getType().toString();
//System.out.println(" ---->" + fieldType);
if ( fieldType.equals("class java.lang.String") )
{
if (!a.get(this).equals(a.get(incoming)))
returnFlag = returnFlag && false;
continue;
}
if ( fieldType.equals("boolean") )
//U tell me how to make true > false, and I'l give u the Nobel prize for PEACE!
continue;
if ( fieldType.equals("char") )
{
if ( a.getChar(this) != a.getChar(incoming))
returnFlag = returnFlag && false;
continue;
}
//System.out.println("Is " + a.getDouble(this) + " > " + a.getDouble(incoming));
if ( a.getDouble(this) != a.getDouble(incoming) )
{
//System.out.println("lesser than satisfied...");
returnFlag = returnFlag && false;
//still wanna run this thru, as after one touch here, you have to break out
//as it makes no sense in continuing on
//break;
}
}
}
return returnFlag;
}
catch ( Exception e )
{
System.out.println("Now you're screwed! " + e);
}
return true;
}
static private HashMap<String,List<String>> m_listOfFeaturesHash = new HashMap<String,List<String>>();
@SuppressWarnings("unchecked")
public List<String> listOfFeatures() {
Class c = getClass();
String c_name = c.getName();
List<String> features;
features = m_listOfFeaturesHash.get(c_name);
if (features==null) {
features = new LinkedList<String>();
do {
for(Method m:c.getDeclaredMethods()) {
if (m.getName().startsWith("get") && m.getParameterTypes().length==0 &&
!m.getName().equals("getAllowedUnits") &&
!m.getName().equals("getActionList")) {
features.add(m.getName().substring(3));
// System.out.println("found feature '" + m.getName().substring(3) + "' for " + c_name);
}
}
c = c.getSuperclass();
}while(c!=null && !c.getSimpleName().equals("Object"));
m_listOfFeaturesHash.put(c_name, features);
}
return features;
}
public Object featureValue(String feature) {
if (feature.equals("type")) return getClass().getSimpleName();
if (feature.equals("id")) return entityID;
Method m;
try {
feature = feature.substring(0, 1).toUpperCase() + feature.substring(1);
m = getClass().getMethod("get"+feature, (Class[])null);
if (m!=null) return m.invoke(this, (Object[])null);
} catch (Exception e) {
e.printStackTrace();
return null;
}
return null;
}
public void setfeatureValue(String feature,String value) {
Method m;
try {
m = getClass().getMethod("set"+feature, String.class);
m.invoke(this, value);
} catch (Exception e) {
e.printStackTrace();
}
}
public boolean equivalents(S3Entity e) {
if (!getClass().equals(e.getClass())) return false;
for(String f:listOfFeatures()) {
// We require them to be the same in all features except the ID:
if (!f.equals("entityID")) {
Object v = featureValue(f);
if (v==null) {
if (e.featureValue(f)!=null) return false;
} else {
if (!v.equals(e.featureValue(f))) return false;
}
}
}
return true;
}
public String toString() {
String out = "Entity(" + entityID +"): " + getClass().getSimpleName() + " [ ";
for(String f:listOfFeatures())
out += "(" + f + " = " + featureValue(f) + ") ";
return out + "]";
}
public void cycle(int m_cycle, S3 m_game, List<S3Action> failedActions) {
}
public S3Entity newEntity(String type) {
S3Entity ent = null;
try {
Class c = Class.forName("s3.entities." + type);
if (c==null) return null;
ent = (S3Entity) c.newInstance();
} catch(Exception e) {
}
return ent;
}
public abstract gatech.mmpm.Entity toD2Entity();
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.java.decompiler.main;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.main.ClassesProcessor.ClassNode;
import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer;
import org.jetbrains.java.decompiler.main.extern.IFernflowerLogger;
import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences;
import org.jetbrains.java.decompiler.main.rels.ClassWrapper;
import org.jetbrains.java.decompiler.main.rels.MethodWrapper;
import org.jetbrains.java.decompiler.modules.decompiler.typeann.FormalParameterTarget;
import org.jetbrains.java.decompiler.modules.decompiler.typeann.TargetInfo;
import org.jetbrains.java.decompiler.modules.decompiler.typeann.TypePathWriteProgress;
import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.exps.*;
import org.jetbrains.java.decompiler.modules.decompiler.stats.RootStatement;
import org.jetbrains.java.decompiler.modules.decompiler.typeann.TypeAnnotation;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarTypeProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarVersionPair;
import org.jetbrains.java.decompiler.modules.renamer.PoolInterceptor;
import org.jetbrains.java.decompiler.struct.*;
import org.jetbrains.java.decompiler.struct.attr.*;
import org.jetbrains.java.decompiler.struct.consts.PrimitiveConstant;
import org.jetbrains.java.decompiler.struct.gen.FieldDescriptor;
import org.jetbrains.java.decompiler.struct.gen.MethodDescriptor;
import org.jetbrains.java.decompiler.struct.gen.VarType;
import org.jetbrains.java.decompiler.struct.gen.generics.*;
import org.jetbrains.java.decompiler.util.InterpreterUtil;
import org.jetbrains.java.decompiler.util.TextBuffer;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ClassWriter {
private final PoolInterceptor interceptor;
public ClassWriter() {
interceptor = DecompilerContext.getPoolInterceptor();
}
private static void invokeProcessors(ClassNode node) {
ClassWrapper wrapper = node.getWrapper();
StructClass cl = wrapper.getClassStruct();
InitializerProcessor.extractInitializers(wrapper);
if (node.type == ClassNode.CLASS_ROOT &&
!cl.isVersion5() &&
DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_CLASS_1_4)) {
ClassReference14Processor.processClassReferences(node);
}
if (cl.hasModifier(CodeConstants.ACC_ENUM) && DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM)) {
EnumProcessor.clearEnum(wrapper);
}
if (DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ASSERTIONS)) {
AssertProcessor.buildAssertions(node);
}
}
public void classLambdaToJava(ClassNode node, TextBuffer buffer, Exprent method_object, int indent, BytecodeMappingTracer origTracer) {
ClassWrapper wrapper = node.getWrapper();
if (wrapper == null) {
return;
}
boolean lambdaToAnonymous = DecompilerContext.getOption(IFernflowerPreferences.LAMBDA_TO_ANONYMOUS_CLASS);
ClassNode outerNode = (ClassNode)DecompilerContext.getProperty(DecompilerContext.CURRENT_CLASS_NODE);
DecompilerContext.setProperty(DecompilerContext.CURRENT_CLASS_NODE, node);
BytecodeMappingTracer tracer = new BytecodeMappingTracer(origTracer.getCurrentSourceLine());
try {
StructClass cl = wrapper.getClassStruct();
DecompilerContext.getLogger().startWriteClass(node.simpleName);
if (node.lambdaInformation.is_method_reference) {
if (!node.lambdaInformation.is_content_method_static && method_object != null) {
// reference to a virtual method
buffer.append(method_object.toJava(indent, tracer));
}
else {
// reference to a static method
buffer.append(ExprProcessor.getCastTypeName(new VarType(node.lambdaInformation.content_class_name, true), Collections.emptyList()));
}
buffer.append("::")
.append(CodeConstants.INIT_NAME.equals(node.lambdaInformation.content_method_name) ? "new" : node.lambdaInformation.content_method_name);
}
else {
// lambda method
StructMethod mt = cl.getMethod(node.lambdaInformation.content_method_key);
MethodWrapper methodWrapper = wrapper.getMethodWrapper(mt.getName(), mt.getDescriptor());
MethodDescriptor md_content = MethodDescriptor.parseDescriptor(node.lambdaInformation.content_method_descriptor);
MethodDescriptor md_lambda = MethodDescriptor.parseDescriptor(node.lambdaInformation.method_descriptor);
if (!lambdaToAnonymous) {
buffer.append('(');
boolean firstParameter = true;
int index = node.lambdaInformation.is_content_method_static ? 0 : 1;
int start_index = md_content.params.length - md_lambda.params.length;
for (int i = 0; i < md_content.params.length; i++) {
if (i >= start_index) {
if (!firstParameter) {
buffer.append(", ");
}
String parameterName = methodWrapper.varproc.getVarName(new VarVersionPair(index, 0));
buffer.append(parameterName == null ? "param" + index : parameterName); // null iff decompiled with errors
firstParameter = false;
}
index += md_content.params[i].stackSize;
}
buffer.append(") ->");
}
buffer.append(" {").appendLineSeparator();
tracer.incrementCurrentSourceLine();
methodLambdaToJava(node, wrapper, mt, buffer, indent + 1, !lambdaToAnonymous, tracer);
buffer.appendIndent(indent).append("}");
addTracer(cl, mt, tracer);
}
}
finally {
DecompilerContext.setProperty(DecompilerContext.CURRENT_CLASS_NODE, outerNode);
}
DecompilerContext.getLogger().endWriteClass();
}
public void classToJava(ClassNode node, TextBuffer buffer, int indent, BytecodeMappingTracer tracer) {
ClassNode outerNode = (ClassNode)DecompilerContext.getProperty(DecompilerContext.CURRENT_CLASS_NODE);
DecompilerContext.setProperty(DecompilerContext.CURRENT_CLASS_NODE, node);
int startLine = tracer != null ? tracer.getCurrentSourceLine() : 0;
BytecodeMappingTracer dummy_tracer = new BytecodeMappingTracer(startLine);
try {
// last minute processing
invokeProcessors(node);
ClassWrapper wrapper = node.getWrapper();
StructClass cl = wrapper.getClassStruct();
DecompilerContext.getLogger().startWriteClass(cl.qualifiedName);
// write class definition
int start_class_def = buffer.length();
writeClassDefinition(node, buffer, indent);
boolean hasContent = false;
boolean enumFields = false;
dummy_tracer.incrementCurrentSourceLine(buffer.countLines(start_class_def));
List<StructRecordComponent> components = cl.getRecordComponents();
for (StructField fd : cl.getFields()) {
boolean hide = fd.isSynthetic() && DecompilerContext.getOption(IFernflowerPreferences.REMOVE_SYNTHETIC) ||
wrapper.getHiddenMembers().contains(InterpreterUtil.makeUniqueKey(fd.getName(), fd.getDescriptor()));
if (hide) continue;
if (components != null && fd.getAccessFlags() == (CodeConstants.ACC_FINAL | CodeConstants.ACC_PRIVATE) &&
components.stream().anyMatch(c -> c.getName().equals(fd.getName()) && c.getDescriptor().equals(fd.getDescriptor()))) {
// Record component field: skip it
continue;
}
boolean isEnum = fd.hasModifier(CodeConstants.ACC_ENUM) && DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM);
if (isEnum) {
if (enumFields) {
buffer.append(',').appendLineSeparator();
dummy_tracer.incrementCurrentSourceLine();
}
enumFields = true;
}
else if (enumFields) {
buffer.append(';');
buffer.appendLineSeparator();
buffer.appendLineSeparator();
dummy_tracer.incrementCurrentSourceLine(2);
enumFields = false;
}
fieldToJava(wrapper, cl, fd, buffer, indent + 1, dummy_tracer); // FIXME: insert real tracer
hasContent = true;
}
if (enumFields) {
buffer.append(';').appendLineSeparator();
dummy_tracer.incrementCurrentSourceLine();
}
// FIXME: fields don't matter at the moment
startLine += buffer.countLines(start_class_def);
// methods
for (StructMethod mt : cl.getMethods()) {
boolean hide = mt.isSynthetic() && DecompilerContext.getOption(IFernflowerPreferences.REMOVE_SYNTHETIC) ||
mt.hasModifier(CodeConstants.ACC_BRIDGE) && DecompilerContext.getOption(IFernflowerPreferences.REMOVE_BRIDGE) ||
wrapper.getHiddenMembers().contains(InterpreterUtil.makeUniqueKey(mt.getName(), mt.getDescriptor()));
if (hide) continue;
int position = buffer.length();
int storedLine = startLine;
if (hasContent) {
buffer.appendLineSeparator();
startLine++;
}
BytecodeMappingTracer method_tracer = new BytecodeMappingTracer(startLine);
boolean methodSkipped = !methodToJava(node, mt, buffer, indent + 1, method_tracer);
if (!methodSkipped) {
hasContent = true;
addTracer(cl, mt, method_tracer);
startLine = method_tracer.getCurrentSourceLine();
}
else {
buffer.setLength(position);
startLine = storedLine;
}
}
// member classes
for (ClassNode inner : node.nested) {
if (inner.type == ClassNode.CLASS_MEMBER) {
StructClass innerCl = inner.classStruct;
boolean isSynthetic = (inner.access & CodeConstants.ACC_SYNTHETIC) != 0 || innerCl.isSynthetic();
boolean hide = isSynthetic && DecompilerContext.getOption(IFernflowerPreferences.REMOVE_SYNTHETIC) ||
wrapper.getHiddenMembers().contains(innerCl.qualifiedName);
if (hide) continue;
if (hasContent) {
buffer.appendLineSeparator();
startLine++;
}
BytecodeMappingTracer class_tracer = new BytecodeMappingTracer(startLine);
classToJava(inner, buffer, indent + 1, class_tracer);
startLine = buffer.countLines();
hasContent = true;
}
}
buffer.appendIndent(indent).append('}');
if (node.type != ClassNode.CLASS_ANONYMOUS) {
buffer.appendLineSeparator();
}
}
finally {
DecompilerContext.setProperty(DecompilerContext.CURRENT_CLASS_NODE, outerNode);
}
DecompilerContext.getLogger().endWriteClass();
}
@SuppressWarnings("SpellCheckingInspection")
private static boolean isSyntheticRecordMethod(StructClass cl, StructMethod mt, TextBuffer code) {
if (cl.getRecordComponents() != null) {
String name = mt.getName(), descriptor = mt.getDescriptor();
if (name.equals("equals") && descriptor.equals("(Ljava/lang/Object;)Z") ||
name.equals("hashCode") && descriptor.equals("()I") ||
name.equals("toString") && descriptor.equals("()Ljava/lang/String;")) {
if (code.countLines() == 1) {
String str = code.toString().trim();
return str.startsWith("return this." + name + "<invokedynamic>(this");
}
}
}
return false;
}
public static void packageInfoToJava(StructClass cl, TextBuffer buffer) {
appendAnnotations(buffer, 0, 0, cl, -1);
int index = cl.qualifiedName.lastIndexOf('/');
String packageName = cl.qualifiedName.substring(0, index).replace('/', '.');
buffer.append("package ").append(packageName).append(';').appendLineSeparator().appendLineSeparator();
}
public static void moduleInfoToJava(StructClass cl, TextBuffer buffer) {
appendAnnotations(buffer, 0, 0, cl, -1);
StructModuleAttribute moduleAttribute = cl.getAttribute(StructGeneralAttribute.ATTRIBUTE_MODULE);
if ((moduleAttribute.moduleFlags & CodeConstants.ACC_OPEN) != 0) {
buffer.append("open ");
}
buffer.append("module ").append(moduleAttribute.moduleName).append(" {").appendLineSeparator();
writeModuleInfoBody(buffer, moduleAttribute);
buffer.append('}').appendLineSeparator();
}
private static void writeModuleInfoBody(TextBuffer buffer, StructModuleAttribute moduleAttribute) {
boolean newLineNeeded = false;
List<StructModuleAttribute.RequiresEntry> requiresEntries = moduleAttribute.requires;
if (!requiresEntries.isEmpty()) {
for (StructModuleAttribute.RequiresEntry requires : requiresEntries) {
if (!isGenerated(requires.flags)) {
buffer.appendIndent(1).append("requires ").append(requires.moduleName.replace('/', '.')).append(';').appendLineSeparator();
newLineNeeded = true;
}
}
}
List<StructModuleAttribute.ExportsEntry> exportsEntries = moduleAttribute.exports;
if (!exportsEntries.isEmpty()) {
if (newLineNeeded) buffer.appendLineSeparator();
for (StructModuleAttribute.ExportsEntry exports : exportsEntries) {
if (!isGenerated(exports.flags)) {
buffer.appendIndent(1).append("exports ").append(exports.packageName.replace('/', '.'));
List<String> exportToModules = exports.exportToModules;
if (exportToModules.size() > 0) {
buffer.append(" to").appendLineSeparator();
appendFQClassNames(buffer, exportToModules);
}
buffer.append(';').appendLineSeparator();
newLineNeeded = true;
}
}
}
List<StructModuleAttribute.OpensEntry> opensEntries = moduleAttribute.opens;
if (!opensEntries.isEmpty()) {
if (newLineNeeded) buffer.appendLineSeparator();
for (StructModuleAttribute.OpensEntry opens : opensEntries) {
if (!isGenerated(opens.flags)) {
buffer.appendIndent(1).append("opens ").append(opens.packageName.replace('/', '.'));
List<String> opensToModules = opens.opensToModules;
if (opensToModules.size() > 0) {
buffer.append(" to").appendLineSeparator();
appendFQClassNames(buffer, opensToModules);
}
buffer.append(';').appendLineSeparator();
newLineNeeded = true;
}
}
}
List<String> usesEntries = moduleAttribute.uses;
if (!usesEntries.isEmpty()) {
if (newLineNeeded) buffer.appendLineSeparator();
for (String uses : usesEntries) {
buffer.appendIndent(1).append("uses ").append(ExprProcessor.buildJavaClassName(uses)).append(';').appendLineSeparator();
}
newLineNeeded = true;
}
List<StructModuleAttribute.ProvidesEntry> providesEntries = moduleAttribute.provides;
if (!providesEntries.isEmpty()) {
if (newLineNeeded) buffer.appendLineSeparator();
for (StructModuleAttribute.ProvidesEntry provides : providesEntries) {
buffer.appendIndent(1).append("provides ").append(ExprProcessor.buildJavaClassName(provides.interfaceName)).append(" with").appendLineSeparator();
appendFQClassNames(buffer, provides.implementationNames.stream().map(ExprProcessor::buildJavaClassName).collect(Collectors.toList()));
buffer.append(';').appendLineSeparator();
}
}
}
private static boolean isGenerated(int flags) {
return (flags & (CodeConstants.ACC_SYNTHETIC | CodeConstants.ACC_MANDATED)) != 0;
}
private static void addTracer(StructClass cls, StructMethod method, BytecodeMappingTracer tracer) {
StructLineNumberTableAttribute table = method.getAttribute(StructGeneralAttribute.ATTRIBUTE_LINE_NUMBER_TABLE);
tracer.setLineNumberTable(table);
String key = InterpreterUtil.makeUniqueKey(method.getName(), method.getDescriptor());
DecompilerContext.getBytecodeSourceMapper().addTracer(cls.qualifiedName, key, tracer);
}
private void writeClassDefinition(ClassNode node, TextBuffer buffer, int indent) {
if (node.type == ClassNode.CLASS_ANONYMOUS) {
buffer.append(" {").appendLineSeparator();
return;
}
ClassWrapper wrapper = node.getWrapper();
StructClass cl = wrapper.getClassStruct();
int flags = node.type == ClassNode.CLASS_ROOT ? cl.getAccessFlags() : node.access;
boolean isDeprecated = cl.hasAttribute(StructGeneralAttribute.ATTRIBUTE_DEPRECATED);
boolean isSynthetic = (flags & CodeConstants.ACC_SYNTHETIC) != 0 || cl.hasAttribute(StructGeneralAttribute.ATTRIBUTE_SYNTHETIC);
boolean isEnum = DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM) && (flags & CodeConstants.ACC_ENUM) != 0;
boolean isInterface = (flags & CodeConstants.ACC_INTERFACE) != 0;
boolean isAnnotation = (flags & CodeConstants.ACC_ANNOTATION) != 0;
if (isDeprecated) {
appendDeprecation(buffer, indent);
}
if (interceptor != null) {
String oldName = interceptor.getOldName(cl.qualifiedName);
appendRenameComment(buffer, oldName, MType.CLASS, indent);
}
if (isSynthetic) {
appendComment(buffer, "synthetic class", indent);
}
appendAnnotations(buffer, 0, indent, cl, -1);
buffer.appendIndent(indent);
if (isEnum) {
// remove abstract and final flags (JLS 8.9 Enums)
flags &= ~CodeConstants.ACC_ABSTRACT;
flags &= ~CodeConstants.ACC_FINAL;
}
List<StructRecordComponent> components = cl.getRecordComponents();
List<String> permittedSubclassQualifiedNames = cl.getPermittedSubclasses();
if (components != null) {
// records are implicitly final
flags &= ~CodeConstants.ACC_FINAL;
}
appendModifiers(buffer, flags, CLASS_ALLOWED, isInterface, CLASS_EXCLUDED);
if (permittedSubclassQualifiedNames != null) {
buffer.append("sealed ");
}
else if (node.isNonSealed()) {
buffer.append("non-sealed ");
}
if (isEnum) {
buffer.append("enum ");
}
else if (isInterface) {
if (isAnnotation) {
buffer.append('@');
}
buffer.append("interface ");
}
else if (components != null) {
buffer.append("record ");
}
else {
buffer.append("class ");
}
buffer.append(node.simpleName);
GenericClassDescriptor descriptor = getGenericClassDescriptor(cl);
if (descriptor != null && !descriptor.fparameters.isEmpty()) {
appendTypeParameters(buffer, descriptor.fparameters, descriptor.fbounds);
}
if (components != null) {
buffer.append('(');
for (int i = 0; i < components.size(); i++) {
StructRecordComponent cd = components.get(i);
if (i > 0) {
buffer.append(", ");
}
boolean varArgComponent = i == components.size() - 1 && isVarArgRecord(cl);
recordComponentToJava(cd, buffer, varArgComponent);
}
buffer.append(')');
}
buffer.append(' ');
if (!isEnum && !isInterface && components == null && cl.superClass != null) {
VarType supertype = new VarType(cl.superClass.getString(), true);
if (!VarType.VARTYPE_OBJECT.equals(supertype)) {
buffer.append("extends ");
if (descriptor != null) {
buffer.append(GenericMain.getGenericCastTypeName(descriptor.superclass, Collections.emptyList()));
}
else {
buffer.append(ExprProcessor.getCastTypeName(supertype, Collections.emptyList()));
}
buffer.append(' ');
}
}
if (!isAnnotation) {
int[] interfaces = cl.getInterfaces();
if (interfaces.length > 0) {
buffer.append(isInterface ? "extends " : "implements ");
for (int i = 0; i < interfaces.length; i++) {
if (i > 0) {
buffer.append(", ");
}
if (descriptor != null) {
buffer.append(GenericMain.getGenericCastTypeName(descriptor.superinterfaces.get(i), Collections.emptyList()));
}
else {
buffer.append(ExprProcessor.getCastTypeName(new VarType(cl.getInterface(i), true), Collections.emptyList()));
}
}
buffer.append(' ');
}
}
if (permittedSubclassQualifiedNames != null && !permittedSubclassQualifiedNames.isEmpty()) {
Set<String> qualifiedNested = node.nested.stream()
.map(nestedNode -> nestedNode.classStruct.qualifiedName)
.collect(Collectors.toSet());
boolean allSubClassesAreNested = qualifiedNested.containsAll(permittedSubclassQualifiedNames);
if (!allSubClassesAreNested) { // only generate permits lists for non-nested classes
buffer.append("permits ");
for (int i = 0; i < permittedSubclassQualifiedNames.size(); i++) {
String qualifiedName = permittedSubclassQualifiedNames.get(i);
boolean isNested = qualifiedNested.contains(qualifiedName);
if (!isNested) {
if (i > 0) {
buffer.append(", ");
}
DecompilerContext.getImportCollector().getShortName(qualifiedName);
String simpleName = qualifiedName.substring(qualifiedName.lastIndexOf('/') + 1);
buffer.append(simpleName);
}
}
buffer.append(' ');
}
}
buffer.append('{').appendLineSeparator();
}
private static boolean isVarArgRecord(StructClass cl) {
String canonicalConstructorDescriptor =
cl.getRecordComponents().stream().map(c -> c.getDescriptor()).collect(Collectors.joining("", "(", ")V"));
StructMethod init = cl.getMethod(CodeConstants.INIT_NAME, canonicalConstructorDescriptor);
return init != null && init.hasModifier(CodeConstants.ACC_VARARGS);
}
private void fieldToJava(ClassWrapper wrapper, StructClass cl, StructField fd, TextBuffer buffer, int indent, BytecodeMappingTracer tracer) {
int start = buffer.length();
boolean isInterface = cl.hasModifier(CodeConstants.ACC_INTERFACE);
boolean isDeprecated = fd.hasAttribute(StructGeneralAttribute.ATTRIBUTE_DEPRECATED);
boolean isEnum = fd.hasModifier(CodeConstants.ACC_ENUM) && DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM);
if (isDeprecated) {
appendDeprecation(buffer, indent);
}
if (interceptor != null) {
String oldName = interceptor.getOldName(cl.qualifiedName + " " + fd.getName() + " " + fd.getDescriptor());
appendRenameComment(buffer, oldName, MType.FIELD, indent);
}
if (fd.isSynthetic()) {
appendComment(buffer, "synthetic field", indent);
}
Map.Entry<VarType, GenericFieldDescriptor> fieldTypeData = getFieldTypeData(fd);
VarType fieldType = fieldTypeData.getKey();
appendAnnotations(buffer, fieldType.arrayDim, indent, fd, TypeAnnotation.FIELD);
buffer.appendIndent(indent);
if (!isEnum) {
appendModifiers(buffer, fd.getAccessFlags(), FIELD_ALLOWED, isInterface, FIELD_EXCLUDED);
}
GenericFieldDescriptor descriptor = fieldTypeData.getValue();
final List<TypePathWriteProgress> typeAnnWriteProgress = createTypeAnnWriteProgress(fd);
if (!isEnum) {
if (descriptor != null) {
buffer.append(GenericMain.getGenericCastTypeName(descriptor.type, typeAnnWriteProgress));
}
else {
buffer.append(ExprProcessor.getCastTypeName(fieldType, typeAnnWriteProgress));
}
buffer.append(' ');
}
buffer.append(fd.getName());
tracer.incrementCurrentSourceLine(buffer.countLines(start));
Exprent initializer;
if (fd.hasModifier(CodeConstants.ACC_STATIC)) {
initializer = wrapper.getStaticFieldInitializers().getWithKey(InterpreterUtil.makeUniqueKey(fd.getName(), fd.getDescriptor()));
}
else {
initializer = wrapper.getDynamicFieldInitializers().getWithKey(InterpreterUtil.makeUniqueKey(fd.getName(), fd.getDescriptor()));
}
if (initializer != null) {
if (isEnum && initializer.type == Exprent.EXPRENT_NEW) {
NewExprent expr = (NewExprent)initializer;
expr.setEnumConst(true);
buffer.append(expr.toJava(indent, tracer));
}
else {
buffer.append(" = ");
if (initializer.type == Exprent.EXPRENT_CONST) {
((ConstExprent) initializer).adjustConstType(fieldType);
}
// FIXME: special case field initializer. Can map to more than one method (constructor) and bytecode instruction
buffer.append(initializer.toJava(indent, tracer));
}
}
else if (fd.hasModifier(CodeConstants.ACC_FINAL) && fd.hasModifier(CodeConstants.ACC_STATIC)) {
StructConstantValueAttribute attr = fd.getAttribute(StructGeneralAttribute.ATTRIBUTE_CONSTANT_VALUE);
if (attr != null) {
PrimitiveConstant constant = cl.getPool().getPrimitiveConstant(attr.getIndex());
buffer.append(" = ");
buffer.append(new ConstExprent(fieldType, constant.value, null, fd).toJava(indent, tracer));
}
}
if (!isEnum) {
buffer.append(';').appendLineSeparator();
tracer.incrementCurrentSourceLine();
}
}
private static void recordComponentToJava(StructRecordComponent cd, TextBuffer buffer, boolean varArgComponent) {
appendAnnotations(buffer, 0, -1, cd, TypeAnnotation.FIELD);
Map.Entry<VarType, GenericFieldDescriptor> fieldTypeData = getFieldTypeData(cd);
VarType fieldType = fieldTypeData.getKey();
GenericFieldDescriptor descriptor = fieldTypeData.getValue();
final List<TypePathWriteProgress> typeAnnWriteProgress = createTypeAnnWriteProgress(cd);
if (descriptor != null) {
buffer.append(GenericMain.getGenericCastTypeName(varArgComponent ? descriptor.type.decreaseArrayDim() : descriptor.type, typeAnnWriteProgress));
}
else {
buffer.append(ExprProcessor.getCastTypeName(varArgComponent ? fieldType.decreaseArrayDim() : fieldType, typeAnnWriteProgress));
}
if (varArgComponent) {
buffer.append("...");
}
buffer.append(' ');
buffer.append(cd.getName());
}
private static void methodLambdaToJava(ClassNode lambdaNode,
ClassWrapper classWrapper,
StructMethod mt,
TextBuffer buffer,
int indent,
boolean codeOnly, BytecodeMappingTracer tracer) {
MethodWrapper methodWrapper = classWrapper.getMethodWrapper(mt.getName(), mt.getDescriptor());
MethodWrapper outerWrapper = (MethodWrapper)DecompilerContext.getProperty(DecompilerContext.CURRENT_METHOD_WRAPPER);
DecompilerContext.setProperty(DecompilerContext.CURRENT_METHOD_WRAPPER, methodWrapper);
try {
String method_name = lambdaNode.lambdaInformation.method_name;
MethodDescriptor md_content = MethodDescriptor.parseDescriptor(lambdaNode.lambdaInformation.content_method_descriptor);
MethodDescriptor md_lambda = MethodDescriptor.parseDescriptor(lambdaNode.lambdaInformation.method_descriptor);
if (!codeOnly) {
buffer.appendIndent(indent);
buffer.append("public ");
buffer.append(method_name);
buffer.append("(");
boolean firstParameter = true;
int index = lambdaNode.lambdaInformation.is_content_method_static ? 0 : 1;
int start_index = md_content.params.length - md_lambda.params.length;
for (int i = 0; i < md_content.params.length; i++) {
if (i >= start_index) {
if (!firstParameter) {
buffer.append(", ");
}
String typeName = ExprProcessor.getCastTypeName(md_content.params[i].copy(), Collections.emptyList());
if (ExprProcessor.UNDEFINED_TYPE_STRING.equals(typeName) &&
DecompilerContext.getOption(IFernflowerPreferences.UNDEFINED_PARAM_TYPE_OBJECT)) {
typeName = ExprProcessor.getCastTypeName(VarType.VARTYPE_OBJECT, Collections.emptyList());
}
buffer.append(typeName);
buffer.append(" ");
String parameterName = methodWrapper.varproc.getVarName(new VarVersionPair(index, 0));
buffer.append(parameterName == null ? "param" + index : parameterName); // null iff decompiled with errors
firstParameter = false;
}
index += md_content.params[i].stackSize;
}
buffer.append(") {").appendLineSeparator();
indent += 1;
}
RootStatement root = classWrapper.getMethodWrapper(mt.getName(), mt.getDescriptor()).root;
if (!methodWrapper.decompiledWithErrors) {
if (root != null) { // check for existence
try {
buffer.append(root.toJava(indent, tracer));
}
catch (Throwable t) {
String message = "Method " + mt.getName() + " " + mt.getDescriptor() + " couldn't be written.";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN, t);
methodWrapper.decompiledWithErrors = true;
}
}
}
if (methodWrapper.decompiledWithErrors) {
buffer.appendIndent(indent);
buffer.append("// $FF: Couldn't be decompiled");
buffer.appendLineSeparator();
}
if (root != null) {
tracer.addMapping(root.getDummyExit().bytecode);
}
if (!codeOnly) {
indent -= 1;
buffer.appendIndent(indent).append('}').appendLineSeparator();
}
}
finally {
DecompilerContext.setProperty(DecompilerContext.CURRENT_METHOD_WRAPPER, outerWrapper);
}
}
private static String toValidJavaIdentifier(String name) {
if (name == null || name.isEmpty()) return name;
boolean changed = false;
StringBuilder res = new StringBuilder(name.length());
for (int i = 0; i < name.length(); i++) {
char c = name.charAt(i);
if ((i == 0 && !Character.isJavaIdentifierStart(c))
|| (i > 0 && !Character.isJavaIdentifierPart(c))) {
changed = true;
res.append("_");
}
else {
res.append(c);
}
}
if (!changed) {
return name;
}
return res.append("/* $FF was: ").append(name).append("*/").toString();
}
private boolean methodToJava(ClassNode node, StructMethod mt, TextBuffer buffer, int indent, BytecodeMappingTracer tracer) {
ClassWrapper wrapper = node.getWrapper();
StructClass cl = wrapper.getClassStruct();
MethodWrapper methodWrapper = wrapper.getMethodWrapper(mt.getName(), mt.getDescriptor());
boolean hideMethod = false;
int start_index_method = buffer.length();
MethodWrapper outerWrapper = (MethodWrapper)DecompilerContext.getProperty(DecompilerContext.CURRENT_METHOD_WRAPPER);
DecompilerContext.setProperty(DecompilerContext.CURRENT_METHOD_WRAPPER, methodWrapper);
try {
boolean isInterface = cl.hasModifier(CodeConstants.ACC_INTERFACE);
boolean isAnnotation = cl.hasModifier(CodeConstants.ACC_ANNOTATION);
boolean isEnum = cl.hasModifier(CodeConstants.ACC_ENUM) && DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM);
boolean isDeprecated = mt.hasAttribute(StructGeneralAttribute.ATTRIBUTE_DEPRECATED);
boolean clInit = false, init = false, dInit = false;
MethodDescriptor md = MethodDescriptor.parseDescriptor(mt.getDescriptor());
int flags = mt.getAccessFlags();
if ((flags & CodeConstants.ACC_NATIVE) != 0) {
flags &= ~CodeConstants.ACC_STRICT; // compiler bug: a strictfp class sets all methods to strictfp
}
if (CodeConstants.CLINIT_NAME.equals(mt.getName())) {
flags &= CodeConstants.ACC_STATIC; // ignore all modifiers except 'static' in a static initializer
}
if (isDeprecated) {
appendDeprecation(buffer, indent);
}
if (interceptor != null) {
String oldName = interceptor.getOldName(cl.qualifiedName + " " + mt.getName() + " " + mt.getDescriptor());
appendRenameComment(buffer, oldName, MType.METHOD, indent);
}
boolean isSynthetic = (flags & CodeConstants.ACC_SYNTHETIC) != 0 || mt.hasAttribute(StructGeneralAttribute.ATTRIBUTE_SYNTHETIC);
boolean isBridge = (flags & CodeConstants.ACC_BRIDGE) != 0;
if (isSynthetic) {
appendComment(buffer, "synthetic method", indent);
}
if (isBridge) {
appendComment(buffer, "bridge method", indent);
}
appendAnnotations(buffer, 0, indent, mt, TypeAnnotation.METHOD_RETURN_TYPE);
buffer.appendIndent(indent);
appendModifiers(buffer, flags, METHOD_ALLOWED, isInterface, METHOD_EXCLUDED);
if (isInterface && !mt.hasModifier(CodeConstants.ACC_STATIC) && !mt.hasModifier(CodeConstants.ACC_PRIVATE) && mt.containsCode()) {
// 'default' modifier (Java 8)
buffer.append("default ");
}
String name = mt.getName();
if (CodeConstants.INIT_NAME.equals(name)) {
if (node.type == ClassNode.CLASS_ANONYMOUS) {
name = "";
dInit = true;
}
else {
name = node.simpleName;
init = true;
}
}
else if (CodeConstants.CLINIT_NAME.equals(name)) {
name = "";
clInit = true;
}
GenericMethodDescriptor descriptor = null;
if (DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_GENERIC_SIGNATURES)) {
StructGenericSignatureAttribute attr = mt.getAttribute(StructGeneralAttribute.ATTRIBUTE_SIGNATURE);
if (attr != null) {
descriptor = GenericMain.parseMethodSignature(attr.getSignature());
if (descriptor != null) {
long actualParams = md.params.length;
List<VarVersionPair> mask = methodWrapper.synthParameters;
if (mask != null) {
actualParams = mask.stream().filter(Objects::isNull).count();
}
else if (isEnum && init) {
actualParams -= 2;
}
if (actualParams != descriptor.parameterTypes.size()) {
String message = "Inconsistent generic signature in method " + mt.getName() + " " + mt.getDescriptor() + " in " + cl.qualifiedName;
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN);
descriptor = null;
}
}
}
}
boolean throwsExceptions = false;
int paramCount = 0;
if (!clInit && !dInit) {
if (descriptor != null && !descriptor.typeParameters.isEmpty()) {
appendTypeParameters(buffer, descriptor.typeParameters, descriptor.typeParameterBounds);
buffer.append(' ');
}
if (!init) {
List<TypePathWriteProgress> typePathWriteProgresses = createTypeAnnWriteProgress(mt);
if (descriptor != null) {
buffer.append(GenericMain.getGenericCastTypeName(descriptor.returnType, typePathWriteProgresses));
}
else {
buffer.append(ExprProcessor.getCastTypeName(md.ret, typePathWriteProgresses));
}
buffer.append(' ');
}
buffer.append(toValidJavaIdentifier(name));
buffer.append('(');
List<VarVersionPair> mask = methodWrapper.synthParameters;
int lastVisibleParameterIndex = -1;
for (int i = 0; i < md.params.length; i++) {
if (mask == null || mask.get(i) == null) {
lastVisibleParameterIndex = i;
}
}
int index = methodWrapper.varproc.getFirstParameterVarIndex();
for (int i = methodWrapper.varproc.getFirstParameterPosition(); i < md.params.length; i++) {
if (mask == null || mask.get(i) == null) {
if (paramCount > 0) {
buffer.append(", ");
}
appendParameterAnnotations(buffer, mt, paramCount);
VarVersionPair pair = new VarVersionPair(index, 0);
if (methodWrapper.varproc.isParameterFinal(pair) ||
methodWrapper.varproc.getVarFinal(pair) == VarTypeProcessor.VAR_EXPLICIT_FINAL) {
buffer.append("final ");
}
String typeName;
boolean isVarArg = i == lastVisibleParameterIndex && mt.hasModifier(CodeConstants.ACC_VARARGS);
final int it = i;
final List<TypePathWriteProgress> typeAnnWriteProgress = createTypeAnnWriteProgress(mt).stream().filter(typePathWriteProgress ->
typePathWriteProgress.getAnnotation().getTargetType() == TypeAnnotation.METHOD_PARAMETER &&
((FormalParameterTarget) typePathWriteProgress.getAnnotation().getTargetInfo()).getFormalParameterIndex() == it
).collect(Collectors.toList());
if (descriptor != null) {
GenericType parameterType = descriptor.parameterTypes.get(paramCount);
isVarArg &= parameterType.arrayDim > 0;
if (isVarArg) {
parameterType = parameterType.decreaseArrayDim();
}
typeName = GenericMain.getGenericCastTypeName(parameterType, typeAnnWriteProgress);
}
else {
VarType parameterType = md.params[i];
isVarArg &= parameterType.arrayDim > 0;
if (isVarArg) {
parameterType = parameterType.decreaseArrayDim();
}
typeName = ExprProcessor.getCastTypeName(parameterType, typeAnnWriteProgress);
}
if (ExprProcessor.UNDEFINED_TYPE_STRING.equals(typeName) &&
DecompilerContext.getOption(IFernflowerPreferences.UNDEFINED_PARAM_TYPE_OBJECT)) {
typeName = ExprProcessor.getCastTypeName(VarType.VARTYPE_OBJECT, typeAnnWriteProgress);
}
buffer.append(typeName);
if (isVarArg) {
buffer.append("...");
}
buffer.append(' ');
String parameterName = methodWrapper.varproc.getVarName(pair);
buffer.append(parameterName == null ? "param" + index : parameterName); // null iff decompiled with errors
paramCount++;
}
index += md.params[i].stackSize;
}
buffer.append(')');
StructExceptionsAttribute attr = mt.getAttribute(StructGeneralAttribute.ATTRIBUTE_EXCEPTIONS);
if ((descriptor != null && !descriptor.exceptionTypes.isEmpty()) || attr != null) {
throwsExceptions = true;
buffer.append(" throws ");
for (int i = 0; i < attr.getThrowsExceptions().size(); i++) {
if (i > 0) {
buffer.append(", ");
}
if (descriptor != null && !descriptor.exceptionTypes.isEmpty()) {
GenericType type = descriptor.exceptionTypes.get(i);
buffer.append(GenericMain.getGenericCastTypeName(type, Collections.emptyList()));
}
else {
VarType type = new VarType(attr.getExcClassname(i, cl.getPool()), true);
buffer.append(ExprProcessor.getCastTypeName(type, Collections.emptyList()));
}
}
}
}
tracer.incrementCurrentSourceLine(buffer.countLines(start_index_method));
if ((flags & (CodeConstants.ACC_ABSTRACT | CodeConstants.ACC_NATIVE)) != 0) { // native or abstract method (explicit or interface)
if (isAnnotation) {
StructAnnDefaultAttribute attr = mt.getAttribute(StructGeneralAttribute.ATTRIBUTE_ANNOTATION_DEFAULT);
if (attr != null) {
buffer.append(" default ");
buffer.append(attr.getDefaultValue().toJava(0, BytecodeMappingTracer.DUMMY));
}
}
buffer.append(';');
buffer.appendLineSeparator();
}
else {
if (!clInit && !dInit) {
buffer.append(' ');
}
// We do not have line information for method start, lets have it here for now
buffer.append('{').appendLineSeparator();
tracer.incrementCurrentSourceLine();
RootStatement root = wrapper.getMethodWrapper(mt.getName(), mt.getDescriptor()).root;
if (root != null && !methodWrapper.decompiledWithErrors) { // check for existence
try {
// to restore in case of an exception
BytecodeMappingTracer codeTracer = new BytecodeMappingTracer(tracer.getCurrentSourceLine());
TextBuffer code = root.toJava(indent + 1, codeTracer);
hideMethod = code.length() == 0 && (clInit || dInit || hideConstructor(node, init, throwsExceptions, paramCount, flags)) ||
isSyntheticRecordMethod(cl, mt, code);
buffer.append(code);
tracer.setCurrentSourceLine(codeTracer.getCurrentSourceLine());
tracer.addTracer(codeTracer);
}
catch (Throwable t) {
String message = "Method " + mt.getName() + " " + mt.getDescriptor() + " couldn't be written.";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN, t);
methodWrapper.decompiledWithErrors = true;
}
}
if (methodWrapper.decompiledWithErrors) {
buffer.appendIndent(indent + 1);
buffer.append("// $FF: Couldn't be decompiled");
buffer.appendLineSeparator();
tracer.incrementCurrentSourceLine();
}
else if (root != null) {
tracer.addMapping(root.getDummyExit().bytecode);
}
buffer.appendIndent(indent).append('}').appendLineSeparator();
}
tracer.incrementCurrentSourceLine();
}
finally {
DecompilerContext.setProperty(DecompilerContext.CURRENT_METHOD_WRAPPER, outerWrapper);
}
// save total lines
// TODO: optimize
//tracer.setCurrentSourceLine(buffer.countLines(start_index_method));
return !hideMethod;
}
private static boolean hideConstructor(ClassNode node, boolean init, boolean throwsExceptions, int paramCount, int methodAccessFlags) {
if (!init || throwsExceptions || paramCount > 0 || !DecompilerContext.getOption(IFernflowerPreferences.HIDE_DEFAULT_CONSTRUCTOR)) {
return false;
}
StructClass cl = node.getWrapper().getClassStruct();
int classAccessFlags = node.type == ClassNode.CLASS_ROOT ? cl.getAccessFlags() : node.access;
boolean isEnum = cl.hasModifier(CodeConstants.ACC_ENUM) && DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_ENUM);
// default constructor requires same accessibility flags. Exception: enum constructor which is always private
if (!isEnum && ((classAccessFlags & ACCESSIBILITY_FLAGS) != (methodAccessFlags & ACCESSIBILITY_FLAGS))) {
return false;
}
int count = 0;
for (StructMethod mt : cl.getMethods()) {
if (CodeConstants.INIT_NAME.equals(mt.getName()) && ++count > 1) {
return false;
}
}
return true;
}
private static Map.Entry<VarType, GenericFieldDescriptor> getFieldTypeData(StructField fd) {
VarType fieldType = new VarType(fd.getDescriptor(), false);
GenericFieldDescriptor descriptor = null;
if (DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_GENERIC_SIGNATURES)) {
StructGenericSignatureAttribute attr = fd.getAttribute(StructGeneralAttribute.ATTRIBUTE_SIGNATURE);
if (attr != null) {
descriptor = GenericMain.parseFieldSignature(attr.getSignature());
}
}
return new AbstractMap.SimpleImmutableEntry<>(fieldType, descriptor);
}
private static void appendDeprecation(TextBuffer buffer, int indent) {
buffer.appendIndent(indent).append("/** @deprecated */").appendLineSeparator();
}
private enum MType {CLASS, FIELD, METHOD}
private static void appendRenameComment(TextBuffer buffer, String oldName, MType type, int indent) {
if (oldName == null) return;
buffer.appendIndent(indent);
buffer.append("// $FF: renamed from: ");
switch (type) {
case CLASS:
buffer.append(ExprProcessor.buildJavaClassName(oldName));
break;
case FIELD:
String[] fParts = oldName.split(" ");
FieldDescriptor fd = FieldDescriptor.parseDescriptor(fParts[2]);
buffer.append(fParts[1]);
buffer.append(' ');
buffer.append(getTypePrintOut(fd.type));
break;
default:
String[] mParts = oldName.split(" ");
MethodDescriptor md = MethodDescriptor.parseDescriptor(mParts[2]);
buffer.append(mParts[1]);
buffer.append(" (");
boolean first = true;
for (VarType paramType : md.params) {
if (!first) {
buffer.append(", ");
}
first = false;
buffer.append(getTypePrintOut(paramType));
}
buffer.append(") ");
buffer.append(getTypePrintOut(md.ret));
}
buffer.appendLineSeparator();
}
private static String getTypePrintOut(VarType type) {
String typeText = ExprProcessor.getCastTypeName(type, false, Collections.emptyList());
if (ExprProcessor.UNDEFINED_TYPE_STRING.equals(typeText) &&
DecompilerContext.getOption(IFernflowerPreferences.UNDEFINED_PARAM_TYPE_OBJECT)) {
typeText = ExprProcessor.getCastTypeName(VarType.VARTYPE_OBJECT, false, Collections.emptyList());
}
return typeText;
}
private static void appendComment(TextBuffer buffer, String comment, int indent) {
buffer.appendIndent(indent).append("// $FF: ").append(comment).appendLineSeparator();
}
private static final StructGeneralAttribute.Key<?>[] ANNOTATION_ATTRIBUTES = {
StructGeneralAttribute.ATTRIBUTE_RUNTIME_VISIBLE_ANNOTATIONS, StructGeneralAttribute.ATTRIBUTE_RUNTIME_INVISIBLE_ANNOTATIONS};
private static final StructGeneralAttribute.Key<?>[] PARAMETER_ANNOTATION_ATTRIBUTES = {
StructGeneralAttribute.ATTRIBUTE_RUNTIME_VISIBLE_PARAMETER_ANNOTATIONS, StructGeneralAttribute.ATTRIBUTE_RUNTIME_INVISIBLE_PARAMETER_ANNOTATIONS};
private static final StructGeneralAttribute.Key<?>[] TYPE_ANNOTATION_ATTRIBUTES = {
StructGeneralAttribute.ATTRIBUTE_RUNTIME_VISIBLE_TYPE_ANNOTATIONS, StructGeneralAttribute.ATTRIBUTE_RUNTIME_INVISIBLE_TYPE_ANNOTATIONS};
private static List<TypePathWriteProgress> createTypeAnnWriteProgress(StructMember md) {
return Arrays.stream(TYPE_ANNOTATION_ATTRIBUTES)
.flatMap(attrKey -> {
StructTypeAnnotationAttribute attribute = (StructTypeAnnotationAttribute)md.getAttribute(attrKey);
if (attribute == null) {
return Stream.empty();
} else {
return attribute.getAnnotations().stream();
}
})
.map(annotation -> new TypePathWriteProgress(annotation, new ArrayDeque<>(annotation.getPaths())))
.collect(Collectors.toList());
}
private static void appendAnnotations(TextBuffer buffer, int dims, int indent, StructMember mb, int targetType) {
for (StructGeneralAttribute.Key<?> key : ANNOTATION_ATTRIBUTES) {
StructAnnotationAttribute attribute = (StructAnnotationAttribute)mb.getAttribute(key);
if (attribute != null) {
for (AnnotationExprent annotation : attribute.getAnnotations()) {
String text = annotation.toJava(indent, BytecodeMappingTracer.DUMMY).toString();
buffer.append(text);
if (indent < 0) {
buffer.append(' ');
}
else {
buffer.appendLineSeparator();
}
}
}
}
}
private static void appendParameterAnnotations(TextBuffer buffer, StructMethod mt, int param) {
for (StructGeneralAttribute.Key<?> key : PARAMETER_ANNOTATION_ATTRIBUTES) {
StructAnnotationParameterAttribute attribute = (StructAnnotationParameterAttribute)mt.getAttribute(key);
if (attribute != null) {
List<List<AnnotationExprent>> annotations = attribute.getParamAnnotations();
if (param < annotations.size()) {
for (AnnotationExprent annotation : annotations.get(param)) {
String text = annotation.toJava(-1, BytecodeMappingTracer.DUMMY).toString();
buffer.append(text).append(' ');
}
}
}
}
}
private static final Map<Integer, String> MODIFIERS;
static {
MODIFIERS = new LinkedHashMap<>();
MODIFIERS.put(CodeConstants.ACC_PUBLIC, "public");
MODIFIERS.put(CodeConstants.ACC_PROTECTED, "protected");
MODIFIERS.put(CodeConstants.ACC_PRIVATE, "private");
MODIFIERS.put(CodeConstants.ACC_ABSTRACT, "abstract");
MODIFIERS.put(CodeConstants.ACC_STATIC, "static");
MODIFIERS.put(CodeConstants.ACC_FINAL, "final");
MODIFIERS.put(CodeConstants.ACC_STRICT, "strictfp");
MODIFIERS.put(CodeConstants.ACC_TRANSIENT, "transient");
MODIFIERS.put(CodeConstants.ACC_VOLATILE, "volatile");
MODIFIERS.put(CodeConstants.ACC_SYNCHRONIZED, "synchronized");
MODIFIERS.put(CodeConstants.ACC_NATIVE, "native");
}
private static final int CLASS_ALLOWED =
CodeConstants.ACC_PUBLIC | CodeConstants.ACC_PROTECTED | CodeConstants.ACC_PRIVATE | CodeConstants.ACC_ABSTRACT |
CodeConstants.ACC_STATIC | CodeConstants.ACC_FINAL | CodeConstants.ACC_STRICT;
private static final int FIELD_ALLOWED =
CodeConstants.ACC_PUBLIC | CodeConstants.ACC_PROTECTED | CodeConstants.ACC_PRIVATE | CodeConstants.ACC_STATIC |
CodeConstants.ACC_FINAL | CodeConstants.ACC_TRANSIENT | CodeConstants.ACC_VOLATILE;
private static final int METHOD_ALLOWED =
CodeConstants.ACC_PUBLIC | CodeConstants.ACC_PROTECTED | CodeConstants.ACC_PRIVATE | CodeConstants.ACC_ABSTRACT |
CodeConstants.ACC_STATIC | CodeConstants.ACC_FINAL | CodeConstants.ACC_SYNCHRONIZED | CodeConstants.ACC_NATIVE |
CodeConstants.ACC_STRICT;
private static final int CLASS_EXCLUDED = CodeConstants.ACC_ABSTRACT | CodeConstants.ACC_STATIC;
private static final int FIELD_EXCLUDED = CodeConstants.ACC_PUBLIC | CodeConstants.ACC_STATIC | CodeConstants.ACC_FINAL;
private static final int METHOD_EXCLUDED = CodeConstants.ACC_PUBLIC | CodeConstants.ACC_ABSTRACT;
private static final int ACCESSIBILITY_FLAGS = CodeConstants.ACC_PUBLIC | CodeConstants.ACC_PROTECTED | CodeConstants.ACC_PRIVATE;
private static void appendModifiers(TextBuffer buffer, int flags, int allowed, boolean isInterface, int excluded) {
flags &= allowed;
if (!isInterface) excluded = 0;
for (int modifier : MODIFIERS.keySet()) {
if ((flags & modifier) == modifier && (modifier & excluded) == 0) {
buffer.append(MODIFIERS.get(modifier)).append(' ');
}
}
}
public static GenericClassDescriptor getGenericClassDescriptor(StructClass cl) {
if (DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_GENERIC_SIGNATURES)) {
StructGenericSignatureAttribute attr = cl.getAttribute(StructGeneralAttribute.ATTRIBUTE_SIGNATURE);
if (attr != null) {
return GenericMain.parseClassSignature(attr.getSignature());
}
}
return null;
}
public static void appendTypeParameters(TextBuffer buffer, List<String> parameters, List<? extends List<GenericType>> bounds) {
buffer.append('<');
for (int i = 0; i < parameters.size(); i++) {
if (i > 0) {
buffer.append(", ");
}
buffer.append(parameters.get(i));
List<GenericType> parameterBounds = bounds.get(i);
if (parameterBounds.size() > 1 || !"java/lang/Object".equals(parameterBounds.get(0).value)) {
buffer.append(" extends ");
buffer.append(GenericMain.getGenericCastTypeName(parameterBounds.get(0), Collections.emptyList()));
for (int j = 1; j < parameterBounds.size(); j++) {
buffer.append(" & ");
buffer.append(GenericMain.getGenericCastTypeName(parameterBounds.get(j), Collections.emptyList()));
}
}
}
buffer.append('>');
}
private static void appendFQClassNames(TextBuffer buffer, List<String> names) {
for (int i = 0; i < names.size(); i++) {
String name = names.get(i);
buffer.appendIndent(2).append(name);
if (i < names.size() - 1) {
buffer.append(',').appendLineSeparator();
}
}
}
}
| |
/***
Copyright (c) 2012 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
Covered in detail in the book _The Busy Coder's Guide to Android Development_
https://commonsware.com/Android
*/
package com.commonsware.android.mapsv2.location;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.GoogleMap.OnInfoWindowClickListener;
import com.google.android.gms.maps.LocationSource;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
public class MainActivity extends AbstractMapActivity implements
OnMapReadyCallback, OnInfoWindowClickListener, LocationSource,
LocationListener {
private static final String STATE_IN_PERMISSION="inPermission";
private static final String STATE_AUTO_FOLLOW="autoFollow";
private static final int REQUEST_PERMS=1337;
private boolean isInPermission=false;
private OnLocationChangedListener mapLocationListener=null;
private LocationManager locMgr=null;
private Criteria crit=new Criteria();
private boolean needsInit=false;
private GoogleMap map=null;
private boolean autoFollow=true;
@Override
protected void onCreate(Bundle state) {
super.onCreate(state);
if (state==null) {
needsInit=true;
}
else {
isInPermission=state.getBoolean(STATE_IN_PERMISSION, false);
autoFollow=state.getBoolean(STATE_AUTO_FOLLOW, true);
}
onCreateForRealz(canGetLocation());
}
@SuppressLint("MissingPermission")
@Override
public void onMapReady(final GoogleMap map) {
this.map=map;
if (needsInit) {
CameraUpdate center=
CameraUpdateFactory.newLatLng(new LatLng(40.76793169992044,
-73.98180484771729));
CameraUpdate zoom=CameraUpdateFactory.zoomTo(15);
map.moveCamera(center);
map.animateCamera(zoom);
}
addMarker(map, 40.748963847316034, -73.96807193756104,
R.string.un, R.string.united_nations);
addMarker(map, 40.76866299974387, -73.98268461227417,
R.string.lincoln_center,
R.string.lincoln_center_snippet);
addMarker(map, 40.765136435316755, -73.97989511489868,
R.string.carnegie_hall, R.string.practice_x3);
addMarker(map, 40.70686417491799, -74.01572942733765,
R.string.downtown_club, R.string.heisman_trophy);
map.setInfoWindowAdapter(new PopupAdapter(getLayoutInflater()));
map.setOnInfoWindowClickListener(this);
map.setMyLocationEnabled(true);
locMgr=(LocationManager)getSystemService(LOCATION_SERVICE);
crit.setAccuracy(Criteria.ACCURACY_FINE);
follow();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(STATE_IN_PERMISSION, isInPermission);
outState.putBoolean(STATE_AUTO_FOLLOW, autoFollow );
}
@Override
public void onRequestPermissionsResult(int requestCode,
String[] permissions,
int[] grantResults) {
isInPermission=false;
if (requestCode==REQUEST_PERMS) {
if (canGetLocation()) {
onCreateForRealz(true);
}
else {
finish(); // denied permission, so we're done
}
}
}
@Override
public void onStart() {
super.onStart();
follow();
}
@Override
public void onStop() {
map.setLocationSource(null);
locMgr.removeUpdates(this);
super.onStop();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.actions, menu);
menu.findItem(R.id.follow).setChecked(autoFollow);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId()==R.id.follow) {
item.setChecked(!item.isChecked());
autoFollow=item.isChecked();
follow();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onInfoWindowClick(Marker marker) {
Toast.makeText(this, marker.getTitle(), Toast.LENGTH_LONG).show();
}
@Override
public void activate(OnLocationChangedListener listener) {
this.mapLocationListener=listener;
}
@Override
public void deactivate() {
this.mapLocationListener=null;
}
@Override
public void onLocationChanged(Location location) {
if (mapLocationListener != null) {
mapLocationListener.onLocationChanged(location);
LatLng latlng=
new LatLng(location.getLatitude(), location.getLongitude());
CameraUpdate cu=CameraUpdateFactory.newLatLng(latlng);
map.animateCamera(cu);
}
}
@Override
public void onProviderDisabled(String provider) {
// unused
}
@Override
public void onProviderEnabled(String provider) {
// unused
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
// unused
}
private void addMarker(GoogleMap map, double lat, double lon,
int title, int snippet) {
map.addMarker(new MarkerOptions().position(new LatLng(lat, lon))
.title(getString(title))
.snippet(getString(snippet)));
}
private void onCreateForRealz(boolean canGetLocation) {
if (canGetLocation) {
if (readyToGo()) {
setContentView(R.layout.activity_main);
SupportMapFragment mapFrag=
(SupportMapFragment)getSupportFragmentManager().findFragmentById(R.id.map);
mapFrag.getMapAsync(this);
}
}
else if (!isInPermission) {
isInPermission=true;
ActivityCompat.requestPermissions(this,
new String[] {Manifest.permission.ACCESS_FINE_LOCATION},
REQUEST_PERMS);
}
}
private boolean canGetLocation() {
return(ContextCompat.checkSelfPermission(this,
Manifest.permission.ACCESS_FINE_LOCATION)==
PackageManager.PERMISSION_GRANTED);
}
@SuppressLint("MissingPermission")
private void follow() {
if (map!=null && locMgr!=null) {
if (autoFollow) {
locMgr.requestLocationUpdates(0L, 0.0f, crit, this, null);
map.setLocationSource(this);
map.getUiSettings().setMyLocationButtonEnabled(false);
}
else {
map.getUiSettings().setMyLocationButtonEnabled(true);
map.setLocationSource(null);
locMgr.removeUpdates(this);
}
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/edu-services/tags/sakai-10.1/cm-service/cm-impl/hibernate-impl/impl/src/test/org/sakaiproject/coursemanagement/test/CourseManagementServiceTest.java $
* $Id: CourseManagementServiceTest.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.coursemanagement.test;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.coursemanagement.api.AcademicSession;
import org.sakaiproject.coursemanagement.api.CourseManagementService;
import org.sakaiproject.coursemanagement.api.CourseOffering;
import org.sakaiproject.coursemanagement.api.CourseSet;
import org.sakaiproject.coursemanagement.api.Section;
import org.sakaiproject.coursemanagement.api.exception.IdNotFoundException;
import org.sakaiproject.coursemanagement.impl.CourseOfferingCmImpl;
import org.sakaiproject.coursemanagement.impl.DataLoader;
public class CourseManagementServiceTest extends CourseManagementTestBase {
private static final Log log = LogFactory.getLog(CourseManagementServiceTest.class);
private CourseManagementService cm;
private DataLoader loader;
protected void onSetUpBeforeTransaction() throws Exception {
}
protected void onSetUpInTransaction() throws Exception {
cm = (CourseManagementService)applicationContext.getBean(CourseManagementService.class.getName());
loader = (DataLoader)applicationContext.getBean(DataLoader.class.getName());
loader.load();
}
public void testGetAcademicSessions() throws Exception {
Assert.assertEquals(1, cm.getAcademicSessions().size());
}
public void testGetCurrentAcademicSessions() throws Exception {
Assert.assertEquals(1, cm.getCurrentAcademicSessions().size());
}
public void testGetAcademicSessionById() throws Exception {
AcademicSession term = cm.getAcademicSession("F2006");
Assert.assertEquals("Fall 2006", term.getTitle());
try {
cm.getAcademicSession("bad eid");
fail();
} catch (IdNotFoundException ide) {}
}
public void testGetCourseSets() throws Exception {
Assert.assertEquals(2, cm.getCourseSets().size());
}
public void testGetCourseSetsFromCourseOffering() throws Exception {
CourseOffering co = cm.getCourseOffering("BIO101_F2006_01");
CourseSet bio = cm.getCourseSet("BIO_DEPT");
CourseSet bioChem = cm.getCourseSet("BIO_CHEM_GROUP");
// Ensure that the CourseSet EIDs can be retrieved from the CourseOffering
Assert.assertEquals(2, co.getCourseSetEids().size());
// Ensure that the set of CourseSets contains the right objects
Set courseSetsFromCo = ((CourseOfferingCmImpl)co).getCourseSets();
Assert.assertTrue(courseSetsFromCo.contains(bio));
Assert.assertTrue(courseSetsFromCo.contains(bioChem));
}
public void testGetChildCourseSets() throws Exception {
CourseSet parent = (CourseSet)cm.getCourseSet("BIO_DEPT");
Assert.assertEquals(1, cm.getChildCourseSets(parent.getEid()).size());
try {
cm.getChildCourseSets("bad eid");
fail();
} catch (IdNotFoundException ide) {}
}
public void testGetCourseSetMembers() throws Exception {
Set members = cm.getCourseSetMemberships("BIO_DEPT");
Assert.assertEquals(1, members.size());
try {
cm.getCourseSetMemberships("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetCanonicalCourse() throws Exception {
Assert.assertEquals("Biology 101", cm.getCanonicalCourse("BIO101").getTitle());
try {
cm.getCanonicalCourse("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetEquivalentCanonicalCourses() throws Exception {
Set equivalents = cm.getEquivalentCanonicalCourses("BIO101");
Assert.assertEquals(1, equivalents.size());
Assert.assertTrue(!equivalents.contains(cm.getCanonicalCourse("BIO101")));
try {
cm.getEquivalentCanonicalCourses("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetCanonicalCoursesFromCourseSet() throws Exception {
Assert.assertEquals(1, cm.getCanonicalCourses("BIO_DEPT").size());
Assert.assertEquals(2, cm.getCanonicalCourses("BIO_CHEM_GROUP").size());
try {
cm.getCanonicalCourses("bad eid");
fail();
} catch (IdNotFoundException ide) {}
}
public void testGetCourseOfferingsFromCourseSet() throws Exception {
Assert.assertEquals(1, cm.getCourseOfferingsInCourseSet("BIO_DEPT").size());
Assert.assertEquals(2, cm.getCourseOfferingsInCourseSet("BIO_CHEM_GROUP").size());
try {
cm.getCourseOfferingsInCourseSet("bad eid");
fail();
} catch (IdNotFoundException ide) {}
}
public void testGetCourseOfferingsFromCanonicalCourse() throws Exception {
Assert.assertEquals(1, cm.getCourseOfferingsInCanonicalCourse("BIO101").size());
try {
cm.getCourseOfferingsInCanonicalCourse("bad eid");
fail();
} catch (IdNotFoundException ide) {}
}
public void testGetCourseOffering() throws Exception {
Assert.assertNotNull(cm.getCourseOffering("BIO101_F2006_01"));
try {
cm.getCourseOffering("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetEquivalentCourseOfferings() throws Exception {
Set equivalents = cm.getEquivalentCourseOfferings("BIO101_F2006_01");
Assert.assertEquals(1, equivalents.size());
try {
cm.getEquivalentCourseOfferings("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetSectionByEid() throws Exception {
Assert.assertNotNull(cm.getSection("BIO101_F2006_01_SEC01"));
}
public void testGetSectionMembers() throws Exception {
Assert.assertEquals(1, cm.getSectionMemberships("BIO101_F2006_01_SEC01").size());
try {
cm.getSectionMemberships("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetSectionsFromCourseOffering() throws Exception {
Assert.assertEquals(1, cm.getSections("BIO101_F2006_01").size());
try {
cm.getSections("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetChildSections() throws Exception {
Assert.assertEquals(1, cm.getChildSections("BIO101_F2006_01_SEC01").size());
try {
cm.getChildSections("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetEnrollmentSet() throws Exception {
Assert.assertNotNull(cm.getEnrollmentSet("BIO101_F2006_01_ES01"));
try {
cm.getEnrollmentSet("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetEnrollmentSetFromCourseOffering() throws Exception {
Assert.assertEquals(1, cm.getEnrollmentSets("BIO101_F2006_01").size());
try {
cm.getEnrollmentSets("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testGetEnrollments() throws Exception {
Assert.assertEquals(1, cm.getEnrollments("BIO101_F2006_01_ES01").size());
try {
cm.getEnrollmentSets("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testFindEnrolledSections() throws Exception {
// one of the two enrollment records is flagged as 'dropped'
Assert.assertEquals(1, cm.findEnrolledSections("josh").size());
}
public void testGetEnrollment() throws Exception {
Assert.assertNotNull(cm.findEnrollment("josh", "BIO101_F2006_01_ES01"));
Assert.assertNotNull(cm.findEnrollment("josh", "CHEM101_F2006_01_ES01"));
Assert.assertNull(cm.findEnrollment("josh", "bad eid"));
}
public void testGetOfficialGraders() throws Exception {
Set graders = cm.getInstructorsOfRecordIds("BIO101_F2006_01_ES01");
Assert.assertTrue(graders.contains("grader1"));
Assert.assertTrue(graders.contains("grader2"));
Assert.assertTrue( ! graders.contains("josh"));
try {
cm.getInstructorsOfRecordIds("bad eid");
fail();
} catch(IdNotFoundException ide) {}
}
public void testIsEnrolled() throws Exception {
Set enrollmentSetEids = new HashSet();
enrollmentSetEids.add("BIO101_F2006_01_ES01");
// We don't care about bad EnrollmentSet eids here... we're just interested in Enrollments
enrollmentSetEids.add("bad eid");
Assert.assertTrue(cm.isEnrolled("josh", enrollmentSetEids));
// Graders are not enrolled
Assert.assertTrue( ! cm.isEnrolled("grader1", enrollmentSetEids));
Assert.assertTrue( ! cm.isEnrolled("grader2", enrollmentSetEids));
}
public void testGetEnrolledEnrollmentSets() throws Exception {
// User "josh" is enrolled in two EnrollmentSets. One is only current in 2036.
// The other is always current.
Set enrSets = cm.findCurrentlyEnrolledEnrollmentSets("josh");
Assert.assertEquals(1, enrSets.size());
}
public void testGetGradableEnrollmentSets() throws Exception {
Set gradableEnrollmentSets = cm.findCurrentlyInstructingEnrollmentSets("grader1");
Assert.assertEquals(1, gradableEnrollmentSets.size());
}
public void testFindInstructingSections() throws Exception {
Section section = (Section)cm.getSections("BIO101_F2006_01").iterator().next();
log.debug(section.getTitle() + " contains these instructors: " + section.getEnrollmentSet().getOfficialInstructors());
Set sections = cm.findInstructingSections("grader1");
Assert.assertEquals(1, sections.size());
}
public void testFindInstructingSectionsByAcademicSession() throws Exception {
Set sections = cm.findInstructingSections("grader1", "F2006");
Assert.assertEquals(1, sections.size());
}
public void testGetCourseOfferingsByCourseSetAndAcademicSession() throws Exception {
Assert.assertEquals(1, cm.findCourseOfferings("BIO_DEPT", "F2006").size());
}
public void testIsCourseSetEmpty() throws Exception {
Assert.assertTrue(cm.isEmpty("EMPTY_COURSE_SET"));
Assert.assertFalse(cm.isEmpty("BIO_DEPT"));
Assert.assertFalse(cm.isEmpty("BIO_CHEM_GROUP"));
}
public void testFindCourseSetByCategory() throws Exception {
List courseSets = cm.findCourseSets("DEPT");
Assert.assertEquals(1, courseSets.size());
Assert.assertEquals("BIO_DEPT", ((CourseSet)courseSets.get(0)).getEid());
}
public void testFindSectionRoles() throws Exception {
Map joshMap = cm.findSectionRoles("josh");
// This user is both enrolled and has a membership. This method only returns membership roles.
Assert.assertEquals("student", joshMap.get("CHEM101_F2006_01_SEC01"));
Map entMap = cm.findSectionRoles("AN_ENTERPRISE_USER");
Assert.assertEquals("AN_ENTERPRISE_ROLE", entMap.get("BIO101_F2006_01_SEC01"));
}
public void testFindCourseOfferingRoles() throws Exception {
Map coUserMap = cm.findCourseOfferingRoles("coUser");
Assert.assertEquals("coRole1", coUserMap.get("BIO101_F2006_01"));
Assert.assertEquals("coRole2", coUserMap.get("CHEM101_F2006_01"));
}
public void testFindCourseSetRoles() throws Exception {
Map deptAdminMap = cm.findCourseSetRoles("user1");
Assert.assertEquals("departmentAdmin", deptAdminMap.get("BIO_DEPT"));
}
public void testFindCategories() throws Exception {
List categories = cm.getSectionCategories();
Assert.assertEquals(3, categories.size());
}
public void testFindCategoryDescription() throws Exception {
Assert.assertEquals("Lecture", cm.getSectionCategoryDescription("lct"));
}
public void testFindActiveCourseOfferings() {
List<CourseOffering> coList = cm.findActiveCourseOfferingsInCanonicalCourse("ENG101");
assertEquals(1, coList.size());
//check we have the right one
CourseOffering co = coList.get(0);
assertEquals("ENG101_F2006_02", co.getEid());
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.07 at 08:01:35 PM IST
//
package com.mozu.qbintegration.model.qbmodel.allgen;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{}VehicleRef"/>
* <element ref="{}CustomerRef" minOccurs="0"/>
* <element ref="{}ItemRef" minOccurs="0"/>
* <element ref="{}ClassRef" minOccurs="0"/>
* <element ref="{}TripStartDate" minOccurs="0"/>
* <element ref="{}TripEndDate" minOccurs="0"/>
* <choice>
* <group ref="{}OdometerReadingAdd"/>
* <element ref="{}TotalMiles"/>
* </choice>
* <element name="Notes" minOccurs="0">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="4095"/>
* </restriction>
* </simpleType>
* </element>
* <element ref="{}BillableStatus" minOccurs="0"/>
* </sequence>
* <attribute name="defMacro" type="{}MACROTYPE" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"vehicleRef",
"customerRef",
"itemRef",
"classRef",
"tripStartDate",
"tripEndDate",
"odometerStart",
"odometerEnd",
"totalMiles",
"notes",
"billableStatus"
})
@XmlRootElement(name = "VehicleMileageAdd")
public class VehicleMileageAdd {
@XmlElement(name = "VehicleRef", required = true)
protected VehicleRef vehicleRef;
@XmlElement(name = "CustomerRef")
protected CustomerRef customerRef;
@XmlElement(name = "ItemRef")
protected ItemRef itemRef;
@XmlElement(name = "ClassRef")
protected ClassRef classRef;
@XmlElement(name = "TripStartDate")
protected String tripStartDate;
@XmlElement(name = "TripEndDate")
protected String tripEndDate;
@XmlElement(name = "OdometerStart")
protected String odometerStart;
@XmlElement(name = "OdometerEnd")
protected String odometerEnd;
@XmlElement(name = "TotalMiles")
protected String totalMiles;
@XmlElement(name = "Notes")
protected String notes;
@XmlElement(name = "BillableStatus")
protected String billableStatus;
@XmlAttribute(name = "defMacro")
protected String defMacro;
/**
* Gets the value of the vehicleRef property.
*
* @return
* possible object is
* {@link VehicleRef }
*
*/
public VehicleRef getVehicleRef() {
return vehicleRef;
}
/**
* Sets the value of the vehicleRef property.
*
* @param value
* allowed object is
* {@link VehicleRef }
*
*/
public void setVehicleRef(VehicleRef value) {
this.vehicleRef = value;
}
/**
* Gets the value of the customerRef property.
*
* @return
* possible object is
* {@link CustomerRef }
*
*/
public CustomerRef getCustomerRef() {
return customerRef;
}
/**
* Sets the value of the customerRef property.
*
* @param value
* allowed object is
* {@link CustomerRef }
*
*/
public void setCustomerRef(CustomerRef value) {
this.customerRef = value;
}
/**
* Gets the value of the itemRef property.
*
* @return
* possible object is
* {@link ItemRef }
*
*/
public ItemRef getItemRef() {
return itemRef;
}
/**
* Sets the value of the itemRef property.
*
* @param value
* allowed object is
* {@link ItemRef }
*
*/
public void setItemRef(ItemRef value) {
this.itemRef = value;
}
/**
* Gets the value of the classRef property.
*
* @return
* possible object is
* {@link ClassRef }
*
*/
public ClassRef getClassRef() {
return classRef;
}
/**
* Sets the value of the classRef property.
*
* @param value
* allowed object is
* {@link ClassRef }
*
*/
public void setClassRef(ClassRef value) {
this.classRef = value;
}
/**
* Gets the value of the tripStartDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTripStartDate() {
return tripStartDate;
}
/**
* Sets the value of the tripStartDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTripStartDate(String value) {
this.tripStartDate = value;
}
/**
* Gets the value of the tripEndDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTripEndDate() {
return tripEndDate;
}
/**
* Sets the value of the tripEndDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTripEndDate(String value) {
this.tripEndDate = value;
}
/**
* Gets the value of the odometerStart property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOdometerStart() {
return odometerStart;
}
/**
* Sets the value of the odometerStart property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOdometerStart(String value) {
this.odometerStart = value;
}
/**
* Gets the value of the odometerEnd property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOdometerEnd() {
return odometerEnd;
}
/**
* Sets the value of the odometerEnd property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOdometerEnd(String value) {
this.odometerEnd = value;
}
/**
* Gets the value of the totalMiles property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTotalMiles() {
return totalMiles;
}
/**
* Sets the value of the totalMiles property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTotalMiles(String value) {
this.totalMiles = value;
}
/**
* Gets the value of the notes property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNotes() {
return notes;
}
/**
* Sets the value of the notes property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNotes(String value) {
this.notes = value;
}
/**
* Gets the value of the billableStatus property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBillableStatus() {
return billableStatus;
}
/**
* Sets the value of the billableStatus property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBillableStatus(String value) {
this.billableStatus = value;
}
/**
* Gets the value of the defMacro property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefMacro() {
return defMacro;
}
/**
* Sets the value of the defMacro property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefMacro(String value) {
this.defMacro = value;
}
}
| |
package org.myrobotlab.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.myrobotlab.codec.ArduinoMsgCodec;
import org.myrobotlab.codec.Codec;
import org.myrobotlab.framework.Service;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.service.Arduino.Sketch;
import org.slf4j.Logger;
/**
*
* @author GroG
*
*/
public class ArduinoTest {
public final static Logger log = LoggerFactory.getLogger(SerialTest.class);
static Arduino arduino = null;
static Serial serial = null;
static TestCatcher catcher = null;
static VirtualDevice virtual = null;
static Python logic = null;
static String vport = "vport";
static Serial uart = null;
int servoPin = 9;
static ArduinoMsgCodec codec = new ArduinoMsgCodec();
// FIXME - test for re-entrant !!!!
// FIXME - single switch for virtual versus "real" hardware
@BeforeClass
public static void setUpBeforeClass() throws Exception {
LoggingFactory.getInstance().configure();
LoggingFactory.getInstance().setLevel(Level.INFO);
log.info("setUpBeforeClass");
//Runtime.start("gui", "GUIService");
arduino = (Arduino) Runtime.start("arduino", "Arduino");
serial = arduino.getSerial();
catcher = (TestCatcher) Runtime.start("catcher", "TestCatcher");
virtual = (VirtualDevice) Runtime.start("virtual", "VirtualDevice");
virtual.createVirtualArduino(vport);
logic = virtual.getLogic();
catcher.subscribe(arduino, "publishError", "onError");
uart = virtual.getUART();
uart.setCodec("arduino");
Codec codec = uart.getRXCodec();
codec.setTimeout(1000);
uart.setTimeout(100); // don't want to hang when decoding results...
arduino.setBoard(Arduino.BOARD_TYPE_ATMEGA2560);
arduino.connect(vport);
Service.sleep(500);
// nice to be able to check messages
// uart.addByteListener(catcher);
log.info("here");
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Before
public void setUp() throws Exception {
catcher.clear();
catcher.isLocal = true;
uart.clear();
uart.setTimeout(100);
serial.clear();
serial.setTimeout(100);
/*
* arduino.clearLastError(); arduino.hasError();
*/
}
@After
public void tearDown() throws Exception {
}
@Test
public final void testReleaseService() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStartService() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStopService() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testTest() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testGetPeers() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testArduino() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testAddCustomMsgListener() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testAnalogReadPollingStart() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testAnalogReadPollingStop() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testAnalogWrite() throws InterruptedException, IOException {
log.info("testConnect - begin");
serial.clear();
arduino.analogWrite(10, 0);
String decoded = uart.decode();
assertEquals("analogWrite/10/0\n", decoded);
arduino.analogWrite(10, 127);
decoded = uart.decode();
assertEquals("analogWrite/10/127\n", decoded);
arduino.analogWrite(10, 128);
decoded = uart.decode();
assertEquals("analogWrite/10/128\n", decoded);
arduino.analogWrite(10, 255);
decoded = uart.decode();
assertEquals("analogWrite/10/255\n", decoded);
arduino.error("test");
log.info(String.format("errors %b", catcher.hasError()));
// Runtime.clearErrors();
/*
* if (Runtime.hasErrors()){ ArrayList<Status> errors =
* Runtime.getErrors(); //throw new IOException("problem with errors");
* }
*/
/*
* uart.decode(); codec.decode(newByte)
*
* catcher.checkMsg("bla");
*/
}
@Test
public final void testConnect() {
log.info("testConnect - begin");
arduino.connect(vport);
assertTrue(arduino.isConnected());
assertEquals(ArduinoMsgCodec.MRLCOMM_VERSION, arduino.getVersion().intValue());
log.info("testConnect - end");
}
@Test
public final void testCreatePinList() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testDigitalReadPollingStart() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testDigitalReadPollingStop() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testDigitalWrite() {
arduino.digitalWrite(10, 1);
assertEquals("digitalWrite/10/1\n", uart.decode());
arduino.digitalWrite(10, 0);
assertEquals("digitalWrite/10/0\n", uart.decode());
// arduino.digitalWrite(10, 255);
// assertEquals("digitalWrite/10/0", uart.decode());
}
@Test
public final void testDisconnect() {
arduino.disconnect();
assertTrue(!arduino.isConnected());
arduino.digitalWrite(10, 1);
assertEquals(0, uart.available());
arduino.connect(vport);
assertTrue(arduino.isConnected());
uart.clear();
arduino.digitalWrite(10, 1);
assertEquals("digitalWrite/10/1\n", uart.decode());
}
@Test
public final void testGetBoardType() {
//arduino.setBoardMega();
// fail("Not yet implemented"); // TODO
}
@Test
public final void testGetPinList() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testGetSerial() {
assertNotNull(arduino.getSerial());
}
@Test
public final void testGetSketch() {
Sketch sketch = arduino.getSketch();
assertNotNull(sketch);
assertTrue(sketch.data.length() > 0);
arduino.setSketch(null);
assertNull(arduino.getSketch());
arduino.setSketch(sketch);
assertEquals(sketch, arduino.getSketch());
}
@Test
public final void testGetVersion() {
assertEquals(ArduinoMsgCodec.MRLCOMM_VERSION, arduino.getVersion().intValue());
}
@Test
public final void testIsConnected() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorAttachStringIntegerInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorAttachStringStringIntegerInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorAttachStringStringIntegerIntegerInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorDetach() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorMove() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMotorMoveTo() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testOnByte() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testOnCustomMsg() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPinModeIntString() {
arduino.pinMode(8, "OUTPUT");
assertEquals("pinMode/8/1\n",uart.decode());
}
@Test
public final void testPinModeIntegerInteger() {
arduino.pinMode(8, Arduino.OUTPUT);
assertEquals("pinMode/8/1\n",uart.decode());
}
@Test
public final void testPublishCustomMsg() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishLoadTimingEvent() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishMRLCommError() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishPin() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishPulse() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishServoEvent() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishSesorData() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishStepperEvent() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishTrigger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPublishVersion() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPulseInIntInt() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPulseInIntIntIntInt() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPulseInIntIntIntString() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testPulseInIntIntInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSendMsg() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSensorAttachString() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSensorAttachUltrasonicSensor() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSensorPollingStart() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSensorPollingStop() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoAttachServoInteger() {
Servo servo = (Servo) Runtime.start("servo", "Servo");
// NOT THE WAY TO ATTACH SERVOS !!
// isAttached will not get set
// dont know a good fix - asside from not using it !
// arduino.servoAttach(servo, servoPin);
// re-entrant test
// arduino.servoAttach(servo, servoPin);
// common way
servo.attach(arduino, servoPin);
// another way
// servo.setPin(servoPin);
// servo.setController(arduino);
assertTrue(servo.isAttached());
//re-entrant test
servo.attach(arduino, servoPin);
assertTrue(servo.isAttached());
assertEquals(servoPin, servo.getPin().intValue());
assertEquals(arduino.getName(), servo.getControllerName());
assertEquals("servoAttach/7/9/5/115/101/114/118/111\n", uart.decode());
servo.moveTo(0);
assertEquals("servoWrite/7/0\n", uart.decode());
servo.moveTo(90);
assertEquals("servoWrite/7/90\n", uart.decode());
servo.moveTo(180);
assertEquals("servoWrite/7/180\n", uart.decode());
servo.moveTo(0);
assertEquals("servoWrite/7/0\n", uart.decode());
// detach
servo.detach();
assertEquals("servoDetach/7/0\n", uart.decode());
servo.moveTo(10);
String shouldBeNull = uart.decode();
assertNull(shouldBeNull);
// re-attach
servo.attach();
assertEquals("servoAttach/7/9/5/115/101/114/118/111\n", uart.decode());
assertTrue(servo.isAttached());
assertEquals(servoPin, servo.getPin().intValue());
assertEquals(arduino.getName(), servo.getControllerName());
servo.moveTo(90);
assertEquals("servoWrite/7/90\n", uart.decode());
servo.releaseService();
}
@Test
public final void testServoAttachStringInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoDetachServo() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoDetachString() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoSweepStart() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoSweepStop() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoWrite() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testServoWriteMicroseconds() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetBoard() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetDebounce() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetDigitalTriggerOnly() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetLoadTimingEnabled() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetPWMFrequency() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetSampleRate() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetSerialRate() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetServoEventsEnabled() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetServoSpeed() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetSketch() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetStepperSpeed() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetTriggerIntInt() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSetTriggerIntIntInt() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testSoftReset() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperAttachStepperControl() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperAttachString() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperDetach() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperMove() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperReset() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperStepStringInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperStepStringIntegerInteger() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testStepperStop() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testConnectVirtualUART() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testOnConnect() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testOnDisconnect() {
// fail("Not yet implemented"); // TODO
}
@Test
public final void testMain() {
// fail("Not yet implemented"); // TODO
}
}
| |
package common.parameter;
import java.util.Calendar;
import java.util.TimeZone;
import java.util.Timer;
import java.util.TimerTask;
import java.util.Vector;
import org.trianacode.taskgraph.Unit;
/**
* Trigger by crontab-like scheduling
*
* @author Rui Zhu
* @version $Revision: 2921 $
*/
public class TriggerCrontab extends Unit {
// parameter data type definitions
private String minute;
private String hour;
private String dayOfMonth;
private String month;
private String dayOfWeek;
private String zone;
private final int MINUTE = 1;
private final int HOUR = 2;
private final int DAY_OF_MONTH = 3;
private final int MONTH = 4;
private final int DAY_OF_WEEK = 5;
private final long ONE_YEAR_IN_MILLIS = 365 * 24 * 3600 * 1000;
private final int MAX_TARDINESS = 500; // 0.5s
/*
* Called whenever there is data for the unit to process
*/
public void process() throws Exception {
// Insert main algorithm for TriggerCrontab
Timer timer = new Timer();
Vector m, h, d, M, D;
// normalized form
if (!checkSkips()) {
System.err.println("only one time unit can have skips");
return;
}
System.err.println("checkSkips OK");
m = normalizeForm(minute, MINUTE);
if (m == null) {
System.err.println("minute format error");
return;
}
System.err.println("minute format OK");
h = normalizeForm(hour, HOUR);
if (h == null) {
System.err.println("hour format error");
return;
}
System.err.println("hour format OK");
d = normalizeForm(dayOfMonth, DAY_OF_MONTH);
if (d == null) {
System.err.println("day of month format error");
return;
}
System.err.println("date format OK");
M = normalizeForm(month, MONTH);
if (M == null) {
System.err.println("month format error");
return;
}
System.err.println("month format OK");
D = normalizeForm(dayOfWeek, DAY_OF_WEEK);
if (D == null) {
System.err.println("day of week format error");
return;
}
System.err.println("day format OK");
//...
Calendar cal;
final Calendar curr;
if (zone.equals("UTC")) {
cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
} else {
cal = Calendar.getInstance();
}
curr = (Calendar) cal.clone();
for (int ll = 0; ll < M.size(); ll++) {
for (int kk = 0; kk < d.size(); kk++) {
for (int jj = 0; jj < h.size(); jj++) {
for (int ii = 0; ii < m.size(); ii++) {
cal.setTime(curr.getTime());
// join
cal.set(Calendar.MONTH, ((Integer) M.get(ll)).intValue() - 1);
cal.set(Calendar.DAY_OF_MONTH, ((Integer) d.get(kk)).intValue());
cal.set(Calendar.HOUR, ((Integer) h.get(jj)).intValue());
cal.set(Calendar.MINUTE, ((Integer) m.get(ii)).intValue());
// intersection
for (int mm = 0, tmp = cal.get(Calendar.DAY_OF_WEEK); mm < D.size(); mm++) {
if (((Integer) D.get(mm)).intValue() % 7 + 1 == tmp) {
if (cal.before(curr)) {
cal.add(Calendar.YEAR, 1);
}
timer.schedule(new MyTimerTask(), cal.getTime(), ONE_YEAR_IN_MILLIS);
System.out.println("setting: " + cal.getTime());
}
}
}
}
}
}
try {
System.out.println("crontab now sleeping");
Thread.sleep(Long.MAX_VALUE);
} catch (InterruptedException e) {
System.out.println("pipeline reset, crontab canceled");
} finally {
timer.cancel();
}
}
private boolean checkSkips() {
int cnt = 0;
if (minute.indexOf('/') != -1) {
cnt++;
}
if (hour.indexOf('/') != -1) {
cnt++;
}
if (dayOfMonth.indexOf('/') != -1) {
cnt++;
}
if (month.indexOf('/') != -1) {
cnt++;
}
if (dayOfWeek.indexOf('/') != -1) {
cnt++;
}
return (cnt > 1) ? false : true;
}
private Vector normalizeForm(String time, int type) {
Vector res = new Vector();
int i, j;
if (!time.trim().endsWith(",")) {
time += ",";
}
System.err.println(" time string is: " + time);
for (i = 0, j = time.indexOf(','); j != -1; i = j + 1, j = time.indexOf(',', i)) {
String s = time.substring(i, j).trim();
int k = s.indexOf('/');
int m = s.indexOf('-');
int n = s.indexOf('*');
int skip = 1;
int begin = 0, end = -1;
if (k != s.lastIndexOf('/')) {
System.err.println("too many skips in: " + time);
return null;
}
if (k != -1) { // no skip
try {
skip = Integer.parseInt(s.substring(k + 1).trim());
} catch (NumberFormatException e) {
System.err.println(e.toString());
return null;
}
} else {
k = s.length();
}
if (n != -1 && s.substring(0, k).trim().length() != 1) {
System.err.println("wildcard range ('*') can only used alone: " + time);
return null;
}
if (m != s.lastIndexOf('-')) {
System.err.println("too many ranges in: " + time);
return null;
}
if (n != -1) { // '*' range
switch (type) {
case MINUTE:
begin = 0;
end = 59;
break;
case HOUR:
begin = 0;
end = 23;
break;
case DAY_OF_MONTH:
begin = 1;
end = 31;
break;
case MONTH:
begin = 1;
end = 12;
break;
case DAY_OF_WEEK:
begin = 0;
end = 7;
break;
default:
System.err.println("unknown type");
break;
}
} else if (m != -1) { // '-' range
try {
begin = Integer.parseInt(s.substring(0, m).trim());
end = Integer.parseInt(s.substring(m + 1, k).trim());
} catch (NumberFormatException e) {
System.err.println(e.toString());
return null;
}
} else if (s.substring(0, k).trim().length() > 0) { // one number
try {
begin = Integer.parseInt(s.substring(0, k).trim());
end = begin;
} catch (NumberFormatException e) {
System.err.println(e.toString());
return null;
}
}
for (int idx = begin; idx <= end; idx += skip) {
try {
switch (type) {
case MINUTE:
if (idx < 0 || idx > 59) {
throw new RuntimeException("Out of range: " + idx);
}
break;
case HOUR:
if (idx < 0 || idx > 23) {
throw new RuntimeException("Out of range: " + idx);
}
break;
case DAY_OF_MONTH:
if (idx < 1 || idx > 31) {
throw new RuntimeException("Out of range: " + idx);
}
break;
case MONTH:
if (idx < 1 || idx > 12) {
throw new RuntimeException("Out of range: " + idx);
}
break;
case DAY_OF_WEEK:
if (idx < 0 || idx > 7) {
throw new RuntimeException("Out of range: " + idx);
}
break;
default:
System.err.println("unknown calender unit");
break;
}
} catch (RuntimeException e) {
System.err.println(e.toString());
return null;
}
res.add(new Integer(idx));
}
}
return res;
}
/**
* Called when the unit is created. Initialises the unit's properties and parameters.
*/
public void init() {
super.init();
// Initialise node properties
setDefaultInputNodes(0);
setMinimumInputNodes(0);
setMaximumInputNodes(0);
setDefaultOutputNodes(1);
setMinimumOutputNodes(0);
setMaximumOutputNodes(Integer.MAX_VALUE);
// Initialise parameter update policy
setParameterUpdatePolicy(PROCESS_UPDATE);
// Initialise pop-up description and help file location
setPopUpDescription("Trigger by crontab-like scheduling");
setHelpFileLocation("TriggerCrontab.html");
// Define initial value and type of parameters
defineParameter("minute", "0", USER_ACCESSIBLE);
defineParameter("hour", "0", USER_ACCESSIBLE);
defineParameter("dayOfMonth", "1", USER_ACCESSIBLE);
defineParameter("month", "1", USER_ACCESSIBLE);
defineParameter("dayOfWeek", "1", USER_ACCESSIBLE);
defineParameter("zone", "System Default", USER_ACCESSIBLE);
// Initialise GUI builder interface
String guilines = "";
guilines += "Minute (0-59) $title minute TextField 0\n";
guilines += "Hour (0-23) $title hour TextField 0\n";
guilines += "Day of Month (1-31) $title dayOfMonth TextField 1\n";
guilines += "Month (1-12) $title month TextField 1\n";
guilines += "Day of Week (0-7) $title dayOfWeek TextField 1\n";
guilines += "Zone $title zone Choice [System Default] [UTC] 0\n";
setGUIBuilderV2Info(guilines);
}
/**
* Called when the unit is reset. Restores the unit's variables to values specified by the parameters.
*/
public void reset() {
// Set unit variables to the values specified by the parameters
minute = (String) getParameter("minute");
hour = (String) getParameter("hour");
dayOfMonth = (String) getParameter("dayOfMonth");
month = (String) getParameter("month");
dayOfWeek = (String) getParameter("dayOfWeek");
zone = (String) getParameter("zone");
}
/**
* Called when the unit is disposed of.
*/
public void dispose() {
// Insert code to clean-up TriggerCrontab (e.g. close open files)
}
/**
* Called a parameters is updated (e.g. by the GUI)
*/
public void parameterUpdate(String paramname, Object value) {
// Code to update local variables
if (paramname.equals("minute")) {
minute = (String) value;
}
if (paramname.equals("hour")) {
hour = (String) value;
}
if (paramname.equals("dayOfMonth")) {
dayOfMonth = (String) value;
}
if (paramname.equals("month")) {
month = (String) value;
}
if (paramname.equals("dayOfWeek")) {
dayOfWeek = (String) value;
}
if (paramname.equals("zone")) {
zone = (String) value;
}
}
/**
* @return an array of the input types for TriggerCrontab
*/
public String[] getInputTypes() {
return new String[]{};
}
/**
* @return an array of the output types for TriggerCrontab
*/
public String[] getOutputTypes() {
return new String[]{"Parameter"};
}
private class MyTimerTask extends TimerTask {
public void run() {
output(new triana.types.Parameter(new Long(scheduledExecutionTime())));
System.err.println("tardiness: " + (System.currentTimeMillis() - scheduledExecutionTime()));
}
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.*;
import com.thoughtworks.go.config.materials.git.GitMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.remote.RepoConfigOrigin;
import com.thoughtworks.go.domain.MaterialRevisions;
import com.thoughtworks.go.domain.Pipeline;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.git.GitTestRepo;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.server.cronjob.GoDiskSpaceMonitor;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.dao.PipelineDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.materials.*;
import com.thoughtworks.go.server.perf.MDUPerformanceLogger;
import com.thoughtworks.go.server.persistence.MaterialRepository;
import com.thoughtworks.go.server.scheduling.BuildCauseProducerService;
import com.thoughtworks.go.server.scheduling.ScheduleHelper;
import com.thoughtworks.go.server.scheduling.ScheduleOptions;
import com.thoughtworks.go.server.service.result.ServerHealthStateOperationResult;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import com.thoughtworks.go.serverhealth.ServerHealthService;
import com.thoughtworks.go.util.ConfigElementImplementationRegistryMother;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.SystemEnvironment;
import org.hamcrest.core.IsNot;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static junit.framework.Assert.fail;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.AdditionalMatchers.not;
import static org.mockito.Mockito.mock;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:WEB-INF/applicationContext-acegi-security.xml"
})
public class BuildCauseProducerServiceConfigRepoIntegrationTest {
@Autowired private GoConfigDao goConfigDao;
@Autowired private GoConfigService goConfigService;
@Autowired private PipelineDao pipelineDao;
@Autowired private ServerHealthService serverHealthService;
@Autowired PipelineService pipelineService;
@Autowired private ScheduleHelper scheduleHelper;
@Autowired private DatabaseAccessHelper dbHelper;
@Autowired private MaterialDatabaseUpdater materialDatabaseUpdater;
@Autowired private MaterialRepository materialRepository;
@Autowired private MaterialUpdateService materialUpdateService;
@Autowired private SubprocessExecutionContext subprocessExecutionContext;
@Autowired private ConfigMaterialUpdater materialUpdater;
@Autowired private GoRepoConfigDataSource goRepoConfigDataSource;
@Autowired private SystemEnvironment systemEnvironment;
@Autowired private MaterialConfigConverter materialConfigConverter;
@Autowired private ConfigCache configCache;
@Autowired private MergedGoConfig mergedGoConfig;
@Autowired private PipelineScheduleQueue pipelineScheduleQueue;
@Autowired private PipelineScheduler buildCauseProducer;
@Autowired private BuildCauseProducerService buildCauseProducerService;
@Autowired private MaterialChecker materialChecker;
@Autowired private MaterialExpansionService materialExpansionService;
@Autowired private MaterialUpdateCompletedTopic topic;
@Autowired private ConfigMaterialUpdateCompletedTopic configTopic;
@Autowired private TransactionTemplate transactionTemplate;
private GoDiskSpaceMonitor goDiskSpaceMonitor;
private static GoConfigFileHelper configHelper = new GoConfigFileHelper();
private MagicalGoConfigXmlWriter xmlWriter;
private ConfigTestRepo configTestRepo;
private DiskSpaceSimulator diskSpaceSimulator;
private HgTestRepo hgRepo;
private HgMaterialConfig materialConfig;
private MDUPerformanceLogger logger;
private MaterialUpdateListener worker;
private HgMaterial material;
private Pipeline latestPipeline;
private PipelineConfig pipelineConfig;
MaterialRevisions firstRevisions;
private String PIPELINE_NAME;
String fileName = "pipe1.gocd.xml";
@Before
public void setup() throws Exception {
diskSpaceSimulator = new DiskSpaceSimulator();
hgRepo = new HgTestRepo("testHgRepo");
dbHelper.onSetUp();
configHelper.onSetUp();
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
materialConfig = hgRepo.materialConfig();
configHelper.addConfigRepo(new ConfigRepoConfig(materialConfig,"gocd-xml"));
logger = mock(MDUPerformanceLogger.class);
TestingEmailSender emailSender = new TestingEmailSender();
SystemDiskSpaceChecker mockDiskSpaceChecker = Mockito.mock(SystemDiskSpaceChecker.class);
StageService stageService = mock(StageService.class);
ConfigDbStateRepository configDbStateRepository = mock(ConfigDbStateRepository.class);
goDiskSpaceMonitor = new GoDiskSpaceMonitor(goConfigService, systemEnvironment,
serverHealthService, emailSender, mockDiskSpaceChecker, mock(ArtifactsService.class),
stageService, configDbStateRepository);
goDiskSpaceMonitor.initialize();
worker = new MaterialUpdateListener(configTopic,materialDatabaseUpdater,logger,goDiskSpaceMonitor);
xmlWriter = new MagicalGoConfigXmlWriter(configCache, ConfigElementImplementationRegistryMother.withNoPlugins());
configTestRepo = new ConfigTestRepo(hgRepo, xmlWriter);
this.material = (HgMaterial)materialConfigConverter.toMaterial(materialConfig);
pipelineConfig = PipelineConfigMother.createPipelineConfigWithStages("pipe1", "build", "test");
pipelineConfig.materialConfigs().clear();
pipelineConfig.materialConfigs().add(materialConfig);
PIPELINE_NAME = CaseInsensitiveString.str(pipelineConfig.name());
configTestRepo.addPipelineToRepositoryAndPush(fileName, pipelineConfig);
materialUpdateService.updateMaterial(material);
// time for messages to pass through all services
waitForMaterialNotInProgress();
pipelineConfig = goConfigService.pipelineConfigNamed(pipelineConfig.name());
pipelineScheduleQueue.clear();
//check test setup
Materials materials = materialConfigConverter.toMaterials(pipelineConfig.materialConfigs());
MaterialRevisions peggedRevisions = new MaterialRevisions();
firstRevisions = materialChecker.findLatestRevisions(peggedRevisions, materials);
assertThat(firstRevisions.isMissingModifications(),is(false));
}
@After
public void teardown() throws Exception {
diskSpaceSimulator.onTearDown();
TestRepo.internalTearDown();
dbHelper.onTearDown();
pipelineScheduleQueue.clear();
configHelper.onTearDown();
}
private void waitForMaterialNotInProgress() throws InterruptedException {
// time for messages to pass through all services
int i = 0;
while (materialUpdateService.isInProgress(material)) {
Thread.sleep(100);
if(i++ > 100)
fail("material is hung - more than 10 seconds in progress");
}
}
@Test
public void shouldSchedulePipelineWhenManuallyTriggered() throws Exception {
configTestRepo.addCodeToRepositoryAndPush("a.java", "added code file", "some java code");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
}
@Test
public void shouldSchedulePipeline() throws Exception {
configTestRepo.addCodeToRepositoryAndPush("a.java", "added code file","some java code");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
buildCauseProducerService.autoSchedulePipeline(PIPELINE_NAME,new ServerHealthStateOperationResult(),123);
assertThat(scheduleHelper.waitForAnyScheduled(5).keySet(), hasItem(PIPELINE_NAME));
}
@Test
public void shouldNotSchedulePipelineWhenPartIsInvalid() throws Exception {
configTestRepo.addCodeToRepositoryAndPush(fileName, "added broken config file","bad bad config");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
assertThat(goRepoConfigDataSource.latestParseHasFailedForMaterial(material.config()),is(true));
buildCauseProducerService.autoSchedulePipeline(PIPELINE_NAME, new ServerHealthStateOperationResult(), 123);
scheduleHelper.waitForNotScheduled(5, PIPELINE_NAME);
}
@Test
public void shouldSchedulePipelineWhenPartIsInvalid_AndManuallyTriggered() throws Exception {
List<Modification> lastPush = configTestRepo.addCodeToRepositoryAndPush(fileName, "added broken config file", "bad bad config");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
assertThat(goRepoConfigDataSource.latestParseHasFailedForMaterial(material.config()),is(true));
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
PipelineConfig pipelineConfigAfterSchedule = goConfigService.pipelineConfigNamed(pipelineConfig.name());
RepoConfigOrigin configOriginAfterSchedule = (RepoConfigOrigin) pipelineConfigAfterSchedule.getOrigin();
String lastValidPushedRevision = this.firstRevisions.latestRevision();
assertThat("revisionOfPipelineConfigOriginShouldMatchLastValidPushedCommit",
configOriginAfterSchedule.getRevision(),is(lastValidPushedRevision));
assertThat("buildCauseRevisionShouldMatchLastPushedCommit",
cause.getMaterialRevisions().latestRevision(), is(lastPush.get(0).getRevision()));
}
@Test
public void shouldNotSchedulePipelineWhenConfigAndMaterialRevisionsMismatch() throws Exception {
// we will use this worker to force material update without updating config
MaterialUpdateListener byPassWorker = new MaterialUpdateListener(topic, materialDatabaseUpdater, logger, goDiskSpaceMonitor);
List<Modification> mod = configTestRepo.addCodeToRepositoryAndPush("a.java", "added code file", "some java code");
byPassWorker.onMessage(new MaterialUpdateMessage(material,123));
//now db should have been updated, but config is still old
RepoConfigOrigin configOrigin = (RepoConfigOrigin) goConfigService.pipelineConfigNamed(new CaseInsensitiveString(PIPELINE_NAME)).getOrigin();
assertThat(configOrigin.getRevision(),is(firstRevisions.latestRevision()));
buildCauseProducerService.autoSchedulePipeline(PIPELINE_NAME,new ServerHealthStateOperationResult(),123);
scheduleHelper.waitForNotScheduled(5, PIPELINE_NAME);
}
@Test
// unfortunately there is no way to know why revisions would mismatch during manual trigger.
// We already let all manual triggers to bypass revision match check
public void shouldSchedulePipelineWhenConfigAndMaterialRevisionsMismatch_AndManuallyTriggered() throws Exception {
// we will use this worker to force material update without updating config
MaterialUpdateListener byPassWorker = new MaterialUpdateListener(topic, materialDatabaseUpdater, logger, goDiskSpaceMonitor);
List<Modification> lastPush = configTestRepo.addCodeToRepositoryAndPush("a.java", "added code file", "some java code");
byPassWorker.onMessage(new MaterialUpdateMessage(material,123));
//now db should have been updated, but config is still old
RepoConfigOrigin configOrigin = (RepoConfigOrigin) goConfigService.pipelineConfigNamed(new CaseInsensitiveString(PIPELINE_NAME)).getOrigin();
assertThat(configOrigin.getRevision(),is(firstRevisions.latestRevision()));
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
assertThat("buildCauseRevisionShouldMatchLastPushedCommit",
cause.getMaterialRevisions().latestRevision(), is(lastPush.get(0).getRevision()));
}
@Test
public void shouldReloadPipelineConfigurationWhenManuallyTriggered() throws Exception
{
// we change configuration of the pipeline by pushing new stage to config repo
pipelineConfig = PipelineConfigMother.createPipelineConfigWithStages("pipe1", "build", "test","newStage");
pipelineConfig.materialConfigs().clear();
pipelineConfig.materialConfigs().add(materialConfig);
List<Modification> mod = configTestRepo.addPipelineToRepositoryAndPush(fileName, pipelineConfig);
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
PipelineConfig pipelineConfigAfterSchedule = goConfigService.pipelineConfigNamed(pipelineConfig.name());
RepoConfigOrigin configOriginAfterSchedule = (RepoConfigOrigin) pipelineConfigAfterSchedule.getOrigin();
String lastPushedRevision = mod.get(0).getRevision();
assertThat("revisionOfPipelineConfigOriginShouldMatchLastPushedCommit",
configOriginAfterSchedule.getRevision(),is(lastPushedRevision));
assertThat("buildCauseRevisionShouldMatchLastPushedCommit",
cause.getMaterialRevisions().latestRevision(),is(lastPushedRevision));
}
@Test
public void shouldNotScheduleWhenPipelineRemovedFromConfigRepoWhenManuallyTriggered() throws Exception
{
configTestRepo.addCodeToRepositoryAndPush(fileName, "removed pipeline from configuration",
"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
+ "<cruise xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:noNamespaceSchemaLocation=\"cruise-config.xsd\" schemaVersion=\"38\">\n"
+ "</cruise>");
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
waitForMaterialNotInProgress();
// config is correct
mergedGoConfig.throwExceptionIfExists();
assertThat(pipelineScheduleQueue.toBeScheduled().keySet(), IsNot.not(hasItem(PIPELINE_NAME)));
assertThat(goConfigService.hasPipelineNamed(pipelineConfig.name()),is(false));
}
@Test
public void shouldReloadPipelineConfigurationAndUpdateNewMaterialWhenManuallyTriggered() throws Exception
{
GitTestRepo otherGitRepo = new GitTestRepo();
pipelineConfig = PipelineConfigMother.createPipelineConfigWithStages("pipe1", "build", "test");
pipelineConfig.materialConfigs().clear();
materialConfig = hgRepo.createMaterialConfig("dest1");
materialConfig.setAutoUpdate(true);
pipelineConfig.materialConfigs().add(materialConfig);
// new material is added
GitMaterial gitMaterial = otherGitRepo.createMaterial("dest2");
gitMaterial.setAutoUpdate(true);
MaterialConfig otherMaterialConfig = gitMaterial.config();
otherMaterialConfig.setAutoUpdate(true);
pipelineConfig.materialConfigs().add(otherMaterialConfig);
List<Modification> mod = configTestRepo.addPipelineToRepositoryAndPush(fileName, pipelineConfig);
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
mergedGoConfig.throwExceptionIfExists();
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(20);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
PipelineConfig pipelineConfigAfterSchedule = goConfigService.pipelineConfigNamed(pipelineConfig.name());
RepoConfigOrigin configOriginAfterSchedule = (RepoConfigOrigin) pipelineConfigAfterSchedule.getOrigin();
String lastPushedRevision = mod.get(0).getRevision();
assertThat("revisionOfPipelineConfigOriginShouldMatchLastPushedCommit",
configOriginAfterSchedule.getRevision(),is(lastPushedRevision));
assertThat(pipelineConfig.materialConfigs(), hasItem(otherMaterialConfig));
assertThat("buildCauseRevisionShouldMatchLastPushedCommit",
cause.getMaterialRevisions().latestRevision(),is(lastPushedRevision));
// update of commited material happened during manual trigger
MaterialRevisions modificationsInDb = materialRepository.findLatestModification(gitMaterial);
assertThat(modificationsInDb.latestRevision(),is(otherGitRepo.latestModification().getRevision()));
}
@Test
public void shouldSchedulePipelineRerunWithSpecifiedRevisions() throws Exception
{
List<Modification> firstBuildModifications = configTestRepo.addCodeToRepositoryAndPush("a.java", "added first code file", "some java code");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
mergedGoConfig.throwExceptionIfExists();
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
mergedGoConfig.throwExceptionIfExists();
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
List<Modification> secondBuildModifications = configTestRepo.addCodeToRepositoryAndPush("a.java", "added second code file", "some java code");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
pipelineScheduleQueue.clear();
// revision will be older by 1 commit -
// formally this is scm-config-consistency violation but we let this schedule because of manual trigger
String explicitRevision = firstBuildModifications.get(0).getRevision();
revisions.put(materialConfig.getPipelineUniqueFingerprint(), explicitRevision);
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, new Username(new CaseInsensitiveString("Admin")),
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
mergedGoConfig.throwExceptionIfExists();
afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by Admin"));
PipelineConfig pipelineConfigAfterSchedule = goConfigService.pipelineConfigNamed(pipelineConfig.name());
RepoConfigOrigin configOriginAfterSchedule = (RepoConfigOrigin) pipelineConfigAfterSchedule.getOrigin();
String lastPushedRevision = secondBuildModifications.get(0).getRevision();
assertThat("revisionOfPipelineConfigOriginShouldMatchLastPushedCommit",
configOriginAfterSchedule.getRevision(),is(lastPushedRevision));
assertThat("buildCauseRevisionShouldMatchSpecifiedRevision",
cause.getMaterialRevisions().latestRevision(),is(explicitRevision));
}
@Test
public void shouldSchedulePipelineWithSameMaterialIn2DestinationsWhenManuallyTriggered_WithSpecifiedRevisions() throws Exception
{
pipelineConfig = PipelineConfigMother.createPipelineConfigWithStages("pipe1", "build", "test");
pipelineConfig.materialConfigs().clear();
materialConfig = hgRepo.createMaterialConfig("dest1");
materialConfig.setAutoUpdate(true);
// new material is added
MaterialConfig otherMaterialConfig = hgRepo.createMaterialConfig("dest2");
otherMaterialConfig.setAutoUpdate(true);
pipelineConfig.materialConfigs().add(materialConfig);
pipelineConfig.materialConfigs().add(otherMaterialConfig);
List<Modification> firstBuildModifications = configTestRepo.addPipelineToRepositoryAndPush(fileName, pipelineConfig);
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
mergedGoConfig.throwExceptionIfExists();
final HashMap<String, String> revisions = new HashMap<String, String>();
final HashMap<String, String> environmentVariables = new HashMap<String, String>();
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, Username.ANONYMOUS,
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
mergedGoConfig.throwExceptionIfExists();
Map<String, BuildCause> afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
BuildCause cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by anonymous"));
List<Modification> secondBuildModifications = configTestRepo.addCodeToRepositoryAndPush("a.java", "added code file", "some java code");
materialUpdateService.updateMaterial(material);
waitForMaterialNotInProgress();
pipelineScheduleQueue.clear();
// revision in dest 1 will be older by 1 commit - this is kind of scm-config-consistency violation
String explicitRevision = firstBuildModifications.get(0).getRevision();
revisions.put(materialConfig.getPipelineUniqueFingerprint(), explicitRevision);
buildCauseProducer.manualProduceBuildCauseAndSave(PIPELINE_NAME, new Username(new CaseInsensitiveString("Admin")),
new ScheduleOptions(revisions, environmentVariables, new HashMap<String, String>()), new ServerHealthStateOperationResult());
mergedGoConfig.throwExceptionIfExists();
afterLoad = scheduleHelper.waitForAnyScheduled(5);
assertThat(afterLoad.keySet(), hasItem(PIPELINE_NAME));
cause = afterLoad.get(PIPELINE_NAME);
assertThat(cause.getBuildCauseMessage(), containsString("Forced by Admin"));
PipelineConfig pipelineConfigAfterSchedule = goConfigService.pipelineConfigNamed(pipelineConfig.name());
RepoConfigOrigin configOriginAfterSchedule = (RepoConfigOrigin) pipelineConfigAfterSchedule.getOrigin();
String lastPushedRevision = secondBuildModifications.get(0).getRevision();
assertThat("revisionOfPipelineConfigOriginShouldMatchLastPushedCommit",
configOriginAfterSchedule.getRevision(),is(lastPushedRevision));
assertThat(pipelineConfigAfterSchedule.materialConfigs(), hasItem(otherMaterialConfig));
assertThat("buildCauseRevisionShouldMatchSpecifiedRevision",
cause.getMaterialRevisions().latestRevision(),is(explicitRevision));
}
}
| |
/*
* Copyright (c) 2008 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.wicketstuff.htmlvalidator.xml.langattributes;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.ErrorHandler;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.AttributesImpl;
public class XmlLangAttributeDroppingContentHandlerWrapper implements ContentHandler {
private final ContentHandler delegate;
private final ErrorHandler errorHandler;
private Locator locator = null;
/**
* @param delegate
*/
public XmlLangAttributeDroppingContentHandlerWrapper(ContentHandler delegate, ErrorHandler errorHandler) {
this.delegate = delegate;
this.errorHandler = errorHandler;
}
/**
* @param arg0
* @param arg1
* @param arg2
* @throws SAXException
* @see org.xml.sax.ContentHandler#characters(char[], int, int)
*/
public void characters(char[] arg0, int arg1, int arg2) throws SAXException {
delegate.characters(arg0, arg1, arg2);
}
/**
* @throws SAXException
* @see org.xml.sax.ContentHandler#endDocument()
*/
public void endDocument() throws SAXException {
delegate.endDocument();
}
/**
* @param arg0
* @param arg1
* @param arg2
* @throws SAXException
* @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
public void endElement(String arg0, String arg1, String arg2)
throws SAXException {
delegate.endElement(arg0, arg1, arg2);
}
/**
* @param arg0
* @throws SAXException
* @see org.xml.sax.ContentHandler#endPrefixMapping(java.lang.String)
*/
public void endPrefixMapping(String arg0) throws SAXException {
delegate.endPrefixMapping(arg0);
}
/**
* @param arg0
* @param arg1
* @param arg2
* @throws SAXException
* @see org.xml.sax.ContentHandler#ignorableWhitespace(char[], int, int)
*/
public void ignorableWhitespace(char[] arg0, int arg1, int arg2)
throws SAXException {
delegate.ignorableWhitespace(arg0, arg1, arg2);
}
/**
* @param arg0
* @param arg1
* @throws SAXException
* @see org.xml.sax.ContentHandler#processingInstruction(java.lang.String, java.lang.String)
*/
public void processingInstruction(String arg0, String arg1)
throws SAXException {
delegate.processingInstruction(arg0, arg1);
}
/**
* @param arg0
* @see org.xml.sax.ContentHandler#setDocumentLocator(org.xml.sax.Locator)
*/
public void setDocumentLocator(Locator arg0) {
locator = arg0;
delegate.setDocumentLocator(arg0);
}
/**
* @param arg0
* @throws SAXException
* @see org.xml.sax.ContentHandler#skippedEntity(java.lang.String)
*/
public void skippedEntity(String arg0) throws SAXException {
delegate.skippedEntity(arg0);
}
/**
* @throws SAXException
* @see org.xml.sax.ContentHandler#startDocument()
*/
public void startDocument() throws SAXException {
delegate.startDocument();
}
/**
* @param ns
* @param arg1
* @param arg2
* @param attributes
* @throws SAXException
* @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
public void startElement(String ns, String arg1, String arg2,
Attributes attributes) throws SAXException {
if ("http://www.w3.org/1999/xhtml" == ns) {
delegate.startElement(ns, arg1, arg2, filterAttributes(attributes));
} else {
delegate.startElement(ns, arg1, arg2, attributes);
}
}
private static boolean equalsIgnoreAsciiCase(String one,
String other) {
if (other == null) {
if (one == null) {
return true;
} else {
return false;
}
}
if (one.length() != other.length()) {
return false;
}
for (int i = 0; i < one.length(); i++) {
char c0 = one.charAt(i);
char c1 = other.charAt(i);
if (c0 >= 'A' && c0 <= 'Z') {
c0 += 0x20;
}
if (c1 >= 'A' && c1 <= 'Z') {
c1 += 0x20;
}
if (c0 != c1) {
return false;
}
}
return true;
}
private Attributes filterAttributes(Attributes attributes) throws SAXException {
int len = attributes.getLength();
String langValue = null;
for (int i = 0; i < len; i++) {
String local = attributes.getLocalName(i);
String uri = attributes.getURI(i);
if (local == "lang" && uri == "http://www.w3.org/XML/1998/namespace") {
langValue = attributes.getValue(i);
} else if (local == "xml:lang" && uri == "") {
String xmlLangValue = attributes.getValue(i);
AttributesImpl attributesImpl = new AttributesImpl();
for (int j = 0; j < i; j++) {
attributesImpl.addAttribute(attributes.getURI(j), attributes.getLocalName(j), attributes.getQName(j), attributes.getType(j), attributes.getValue(j));
}
for (int k = i + 1; k < len; k++) {
uri = attributes.getURI(k);
local = attributes.getLocalName(k);
if (local == "lang" && uri == "http://www.w3.org/XML/1998/namespace") {
langValue = attributes.getValue(k);
}
attributesImpl.addAttribute(uri, local, attributes.getQName(k), attributes.getType(k), attributes.getValue(k));
}
if (errorHandler != null && !equalsIgnoreAsciiCase(xmlLangValue, langValue)) {
errorHandler.error(new SAXParseException("When the attribute \u201Cxml:lang\u201D in no namespace is specified, the element must also have the attribute \u201Clang\u201D present with the same value.", locator));
}
return attributesImpl;
}
}
return attributes;
}
/**
* @param arg0
* @param arg1
* @throws SAXException
* @see org.xml.sax.ContentHandler#startPrefixMapping(java.lang.String, java.lang.String)
*/
public void startPrefixMapping(String arg0, String arg1)
throws SAXException {
delegate.startPrefixMapping(arg0, arg1);
}
}
| |
/*
* Copyright 1&1 Internet AG, https://github.com/1and1/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.oneandone.troilus;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.Map.Entry;
import java.util.stream.Collectors;
import net.oneandone.troilus.Context;
import net.oneandone.troilus.DeleteQuery;
import net.oneandone.troilus.DeleteQueryDataImpl;
import net.oneandone.troilus.ListReadQuery;
import net.oneandone.troilus.ListReadQueryDataImpl;
import net.oneandone.troilus.ColumnName;
import net.oneandone.troilus.SingleReadQuery;
import net.oneandone.troilus.SingleReadQueryDataImpl;
import net.oneandone.troilus.UpdateQuery;
import net.oneandone.troilus.WriteQueryDataImpl;
import net.oneandone.troilus.interceptor.CascadeOnDeleteInterceptor;
import net.oneandone.troilus.interceptor.CascadeOnWriteInterceptor;
import net.oneandone.troilus.interceptor.DeleteQueryData;
import net.oneandone.troilus.interceptor.DeleteQueryRequestInterceptor;
import net.oneandone.troilus.interceptor.ListReadQueryData;
import net.oneandone.troilus.interceptor.ListReadQueryRequestInterceptor;
import net.oneandone.troilus.interceptor.ListReadQueryResponseInterceptor;
import net.oneandone.troilus.interceptor.QueryInterceptor;
import net.oneandone.troilus.interceptor.SingleReadQueryData;
import net.oneandone.troilus.interceptor.SingleReadQueryRequestInterceptor;
import net.oneandone.troilus.interceptor.SingleReadQueryResponseInterceptor;
import net.oneandone.troilus.interceptor.WriteQueryData;
import net.oneandone.troilus.interceptor.WriteQueryRequestInterceptor;
import net.oneandone.troilus.java7.Batchable;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import com.datastax.driver.core.ExecutionInfo;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.ConsistencyLevel;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListenableFuture;
/**
* DaoImpl
*/
public class DaoImpl implements Dao {
private final Context ctx;
/**
* @param session the underyling session
* @param tablename the tablename
*/
public DaoImpl(Session session, String tablename) {
this(new Context(session, tablename));
}
private DaoImpl(Context ctx) {
this.ctx = ctx;
}
@Override
public Dao withConsistency(ConsistencyLevel consistencyLevel) {
return new DaoImpl(ctx.withConsistency(consistencyLevel));
}
@Override
public Dao withSerialConsistency(ConsistencyLevel consistencyLevel) {
return new DaoImpl(ctx.withSerialConsistency(consistencyLevel));
}
@Override
public Dao withTracking() {
return new DaoImpl(ctx.withEnableTracking());
}
@Override
public Dao withoutTracking() {
return new DaoImpl(ctx.withDisableTracking());
}
@Override
public Dao withRetryPolicy(RetryPolicy policy) {
return new DaoImpl(ctx.withRetryPolicy(policy));
}
@Override
public Dao withInterceptor(QueryInterceptor queryInterceptor) {
Context context = ctx.withInterceptor(queryInterceptor);
if (ListReadQueryRequestInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new ListReadQueryRequestInterceptorAdapter((ListReadQueryRequestInterceptor) queryInterceptor));
}
if (ListReadQueryResponseInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new ListReadQueryResponseInterceptorAdapter((ListReadQueryResponseInterceptor) queryInterceptor));
}
if (SingleReadQueryRequestInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new SingleReadQueryRequestInterceptorAdapter((SingleReadQueryRequestInterceptor) queryInterceptor));
}
if (SingleReadQueryResponseInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new SingleReadQueryResponseInterceptorAdapter((SingleReadQueryResponseInterceptor) queryInterceptor));
}
if (WriteQueryRequestInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new WriteQueryRequestInterceptorAdapter((WriteQueryRequestInterceptor) queryInterceptor));
}
if (DeleteQueryRequestInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new DeleteQueryRequestInterceptorAdapter((DeleteQueryRequestInterceptor) queryInterceptor));
}
if (CascadeOnWriteInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new CascadeOnWriteInterceptorAdapter((CascadeOnWriteInterceptor) queryInterceptor));
}
if (CascadeOnDeleteInterceptor.class.isAssignableFrom(queryInterceptor.getClass())) {
context = context.withInterceptor(new CascadeOnDeleteInterceptorAdapter((CascadeOnDeleteInterceptor) queryInterceptor));
}
return new DaoImpl(context);
}
@Override
public Insertion writeEntity(Object entity) {
ImmutableMap<String, com.google.common.base.Optional<Object>> values = ctx.getBeanMapper().toValues(entity, ctx.getDbSession().getColumnNames());
return new InsertQueryAdapter(ctx, new InsertQuery(ctx, new WriteQueryDataImpl().valuesToMutate(values)));
}
@Override
public UpdateWithUnitAndCounter writeWhere(Clause... clauses) {
return new UpdateQueryAdapter(ctx, new UpdateQuery(ctx, new WriteQueryDataImpl().whereConditions((ImmutableList.copyOf(clauses)))));
}
@Override
public WriteWithCounter writeWithKey(ImmutableMap<String, Object> composedKeyParts) {
return new UpdateQueryAdapter(ctx, new UpdateQuery(ctx, new WriteQueryDataImpl().keys(composedKeyParts)));
}
@Override
public WriteWithCounter writeWithKey(String keyName, Object keyValue) {
return writeWithKey(ImmutableMap.of(keyName, keyValue));
}
@Override
public WriteWithCounter writeWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2) {
return writeWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2));
}
@Override
public WriteWithCounter writeWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2,
String keyName3, Object keyValue3) {
return writeWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2,
keyName3, keyValue3));
}
@Override
public <T> WriteWithCounter writeWithKey(ColumnName<T> keyName, T keyValue) {
return writeWithKey(keyName.getName(), (Object) keyValue);
}
@Override
public <T, E> WriteWithCounter writeWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2) {
return writeWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2);
}
@Override
public <T, E, F> WriteWithCounter writeWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2,
ColumnName<F> keyName3, F keyValue3) {
return writeWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2,
keyName3.getName(), (Object) keyValue3);
}
@Override
public Deletion deleteWhere(Clause... whereConditions) {
return new DeleteQueryAdapter(ctx, new DeleteQuery(ctx, new DeleteQueryDataImpl().whereConditions(ImmutableList.copyOf(whereConditions))));
};
@Override
public Deletion deleteWithKey(String keyName, Object keyValue) {
return deleteWithKey(ImmutableMap.of(keyName, keyValue));
}
@Override
public Deletion deleteWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2) {
return deleteWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2));
}
@Override
public Deletion deleteWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2,
String keyName3, Object keyValue3) {
return deleteWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2,
keyName3, keyValue3));
}
@Override
public <T> Deletion deleteWithKey(ColumnName<T> keyName, T keyValue) {
return deleteWithKey(keyName.getName(), (Object) keyValue);
}
@Override
public <T, E> Deletion deleteWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2) {
return deleteWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2);
}
@Override
public <T, E, F> Deletion deleteWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2,
ColumnName<F> keyName3, F keyValue3) {
return deleteWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2,
keyName3.getName(), (Object) keyValue3);
}
public Deletion deleteWithKey(ImmutableMap<String, Object> keyNameValuePairs) {
return new DeleteQueryAdapter(ctx, new DeleteQuery(ctx, new DeleteQueryDataImpl().key(keyNameValuePairs)));
}
@Override
public SingleReadWithUnit<Optional<Record>> readWithKey(ImmutableMap<String, Object> composedkey) {
return new SingleReadQueryAdapter(ctx, new SingleReadQuery(ctx, new SingleReadQueryDataImpl().key(composedkey)));
}
@Override
public SingleReadWithUnit<Optional<Record>> readWithKey(String keyName, Object keyValue) {
return readWithKey(ImmutableMap.of(keyName, keyValue));
}
@Override
public SingleReadWithUnit<Optional<Record>> readWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2) {
return readWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2));
}
@Override
public SingleReadWithUnit<Optional<Record>> readWithKey(String keyName1, Object keyValue1,
String keyName2, Object keyValue2,
String keyName3, Object keyValue3) {
return readWithKey(ImmutableMap.of(keyName1, keyValue1,
keyName2, keyValue2,
keyName3, keyValue3));
}
@Override
public <T> SingleReadWithUnit<Optional<Record>> readWithKey(ColumnName<T> keyName, T keyValue) {
return readWithKey(keyName.getName(), (Object) keyValue);
}
@Override
public <T, E> SingleReadWithUnit<Optional<Record>> readWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2) {
return readWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2);
}
@Override
public <T, E, F> SingleReadWithUnit<Optional<Record>> readWithKey(ColumnName<T> keyName1, T keyValue1,
ColumnName<E> keyName2, E keyValue2,
ColumnName<F> keyName3, F keyValue3) {
return readWithKey(keyName1.getName(), (Object) keyValue1,
keyName2.getName(), (Object) keyValue2,
keyName3.getName(), (Object) keyValue3);
}
@Override
public ListReadWithUnit<RecordList> readListWithKeys(String name, ImmutableList<Object> values) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().keys(ImmutableMap.of(name, values))));
}
@Override
public ListReadWithUnit<RecordList> readListWithKeys(String composedKeyNamePart1, Object composedKeyValuePart1,
String composedKeyNamePart2, ImmutableList<Object> composedKeyValuesPart2) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().keys(ImmutableMap.of(composedKeyNamePart1, ImmutableList.of(composedKeyValuePart1),
composedKeyNamePart2, composedKeyValuesPart2))));
}
@Override
public ListReadWithUnit<RecordList> readListWithKeys(String composedKeyNamePart1, Object composedKeyValuePart1,
String composedKeyNamePart2, Object composedKeyValuePart2,
String composedKeyNamePart3, ImmutableList<Object> composedKeyValuesPart3) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().keys(ImmutableMap.of(composedKeyNamePart1, ImmutableList.of(composedKeyValuePart1),
composedKeyNamePart2, ImmutableList.of(composedKeyValuePart2),
composedKeyNamePart3, composedKeyValuesPart3))));
}
@Override
public ListReadWithUnit<RecordList> readListWithKey(String composedKeyNamePart1, Object composedKeyValuePart1) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().keys(ImmutableMap.of(composedKeyNamePart1, ImmutableList.of(composedKeyValuePart1)))));
}
@Override
public ListReadWithUnit<RecordList> readListWithKey(String composedKeyNamePart1, Object composedKeyValuePart1,
String composedKeyNamePart2, Object composedKeyValuePart2) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().keys(ImmutableMap.of(composedKeyNamePart1, ImmutableList.of(composedKeyValuePart1),
composedKeyNamePart2, ImmutableList.of(composedKeyValuePart2)))));
}
@SuppressWarnings("unchecked")
@Override
public <T> ListReadWithUnit<RecordList> readListWithKeys(ColumnName<T> name, ImmutableList<T> values) {
return readListWithKeys(name.getName(), (ImmutableList<Object>) values);
}
@SuppressWarnings("unchecked")
@Override
public <T, E> ListReadWithUnit<RecordList> readListWithKeys(ColumnName<T> composedKeyNamePart1, T composedKeyValuePart1,
ColumnName<E> composedKeyNamePart2, ImmutableList<E> composedKeyValuesPart2) {
return readListWithKeys(composedKeyNamePart1.getName(), (Object) composedKeyValuePart1,
composedKeyNamePart2.getName(), (ImmutableList<Object>) composedKeyValuesPart2);
}
@SuppressWarnings("unchecked")
@Override
public <T, E, F> ListReadWithUnit<RecordList> readListWithKeys( ColumnName<T> composedKeyNamePart1, T composedKeyValuePart1,
ColumnName<E> composedKeyNamePart2, E composedKeyValuePart2,
ColumnName<F> composedKeyNamePart3, ImmutableList<F> composedKeyValuesPart3) {
return readListWithKeys(composedKeyNamePart1.getName(), (Object) composedKeyValuePart1,
composedKeyNamePart2.getName(), (Object) composedKeyValuePart2,
composedKeyNamePart3.getName(), (ImmutableList<Object>) composedKeyValuesPart3);
}
@Override
public <T> ListReadWithUnit<RecordList> readListWithKey(ColumnName<T> name, T value) {
return readListWithKey(name.getName(), (Object) value);
}
@Override
public <T, E> ListReadWithUnit<RecordList> readListWithKey(ColumnName<T> composedKeyNamePart1, T composedKeyValuePart1,
ColumnName<E> composedKeyNamePart2, E composedKeyValuePart2) {
return readListWithKey(composedKeyNamePart1.getName(), (Object) composedKeyValuePart1,
composedKeyNamePart2.getName(), (Object) composedKeyValuePart2);
}
@Override
public ListReadWithUnit<RecordList> readWhere(Clause... clauses) {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().whereConditions(ImmutableSet.copyOf(clauses))));
}
@Override
public ListReadWithUnit<RecordList> readAll() {
return new ListReadQueryAdapter(ctx, new ListReadQuery(ctx, new ListReadQueryDataImpl().columnsToFetch(ImmutableMap.of())));
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("ctx", ctx)
.toString();
}
/**
* Java8 adapter of a ListReadQueryData
*/
static class ListReadQueryDataAdapter implements ListReadQueryData {
private final net.oneandone.troilus.java7.interceptor.ListReadQueryData data;
ListReadQueryDataAdapter() {
this(new ListReadQueryDataImpl());
}
private ListReadQueryDataAdapter(net.oneandone.troilus.java7.interceptor.ListReadQueryData data) {
this.data = data;
}
@Override
public ListReadQueryDataAdapter keys(ImmutableMap<String, ImmutableList<Object>> keys) {
return new ListReadQueryDataAdapter(data.keys(keys));
}
@Override
public ListReadQueryDataAdapter whereConditions(ImmutableSet<Clause> whereConditions) {
return new ListReadQueryDataAdapter(data.whereConditions(whereConditions));
}
@Override
public ListReadQueryDataAdapter columnsToFetch(ImmutableMap<String, Boolean> columnsToFetch) {
return new ListReadQueryDataAdapter(data.columnsToFetch(columnsToFetch));
}
@Override
public ListReadQueryDataAdapter limit(Optional<Integer> optionalLimit) {
return new ListReadQueryDataAdapter(data.limit(optionalLimit.orElse(null)));
}
@Override
public ListReadQueryDataAdapter allowFiltering(Optional<Boolean> optionalAllowFiltering) {
return new ListReadQueryDataAdapter(data.allowFiltering(optionalAllowFiltering.orElse(null)));
}
@Override
public ListReadQueryDataAdapter fetchSize(Optional<Integer> optionalFetchSize) {
return new ListReadQueryDataAdapter(data.fetchSize(optionalFetchSize.orElse(null)));
}
@Override
public ListReadQueryDataAdapter distinct(Optional<Boolean> optionalDistinct) {
return new ListReadQueryDataAdapter(data.distinct(optionalDistinct.orElse(null)));
}
@Override
public ImmutableMap<String, ImmutableList<Object>> getKeys() {
return data.getKeys();
}
@Override
public ImmutableSet<Clause> getWhereConditions() {
return data.getWhereConditions();
}
@Override
public ImmutableMap<String, Boolean> getColumnsToFetch() {
return data.getColumnsToFetch();
}
@Override
public Optional<Integer> getLimit() {
return Optional.ofNullable(data.getLimit());
}
@Override
public Optional<Boolean> getAllowFiltering() {
return Optional.ofNullable(data.getAllowFiltering());
}
@Override
public Optional<Integer> getFetchSize() {
return Optional.ofNullable(data.getFetchSize());
}
@Override
public Optional<Boolean> getDistinct() {
return Optional.ofNullable(data.getDistinct());
}
static net.oneandone.troilus.java7.interceptor.ListReadQueryData convert(ListReadQueryData data) {
return new ListReadQueryDataImpl().keys(data.getKeys())
.whereConditions(data.getWhereConditions())
.columnsToFetch(data.getColumnsToFetch())
.limit(data.getLimit().orElse(null))
.allowFiltering(data.getAllowFiltering().orElse(null))
.fetchSize(data.getFetchSize().orElse(null))
.distinct(data.getDistinct().orElse(null));
}
}
/**
* Java8 adapter of a RecordList
*/
static class RecordListAdapter implements RecordList {
private final net.oneandone.troilus.java7.RecordList recordList;
private RecordListAdapter(net.oneandone.troilus.java7.RecordList recordList) {
this.recordList = recordList;
}
static RecordList convertFromJava7(net.oneandone.troilus.java7.RecordList recordList) {
return new RecordListAdapter(recordList);
}
@Override
public ExecutionInfo getExecutionInfo() {
return recordList.getExecutionInfo();
}
@Override
public ImmutableList<ExecutionInfo> getAllExecutionInfo() {
return recordList.getAllExecutionInfo();
}
@Override
public boolean wasApplied() {
return recordList.wasApplied();
}
@Override
public Iterator<Record> iterator() {
return new Iterator<Record>() {
private final Iterator<net.oneandone.troilus.java7.Record> iterator = recordList.iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Record next() {
return RecordAdapter.convertFromJava7(iterator.next());
}
};
}
@Override
public void subscribe(Subscriber<? super Record> subscriber) {
recordList.subscribe(new RecordSubscriberAdapter(subscriber));
}
private static final class RecordSubscriberAdapter implements Subscriber<net.oneandone.troilus.java7.Record> {
private final Subscriber<? super Record> subscriber;
public RecordSubscriberAdapter(Subscriber<? super Record> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onSubscribe(Subscription s) {
subscriber.onSubscribe(s);
}
@Override
public void onNext(net.oneandone.troilus.java7.Record record) {
subscriber.onNext(RecordAdapter.convertFromJava7(record));
}
@Override
public void onError(Throwable t) {
subscriber.onError(t);
}
@Override
public void onComplete() {
subscriber.onComplete();
}
}
static net.oneandone.troilus.java7.RecordList convertToJava7(RecordList recordList) {
return new net.oneandone.troilus.java7.RecordList() {
@Override
public boolean wasApplied() {
return recordList.wasApplied();
}
@Override
public ExecutionInfo getExecutionInfo() {
return recordList.getExecutionInfo();
}
@Override
public ImmutableList<ExecutionInfo> getAllExecutionInfo() {
return recordList.getAllExecutionInfo();
}
public void subscribe(Subscriber<? super net.oneandone.troilus.java7.Record> subscriber) {
recordList.subscribe(new Java7RecordSubscriberAdapter(subscriber));
}
public Iterator<net.oneandone.troilus.java7.Record> iterator() {
return new Iterator<net.oneandone.troilus.java7.Record>() {
private final Iterator<Record> iterator = recordList.iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public net.oneandone.troilus.java7.Record next() {
return RecordAdapter.convertToJava7(iterator.next());
}
};
}
};
}
}
static final class Java7RecordSubscriberAdapter implements Subscriber<Record> {
private final Subscriber<? super net.oneandone.troilus.java7.Record> subscriber;
public Java7RecordSubscriberAdapter(Subscriber<? super net.oneandone.troilus.java7.Record> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onSubscribe(Subscription s) {
subscriber.onSubscribe(s);
}
@Override
public void onNext(Record record) {
subscriber.onNext(RecordAdapter.convertToJava7(record));
}
@Override
public void onError(Throwable t) {
subscriber.onError(t);
}
@Override
public void onComplete() {
subscriber.onComplete();
}
}
static class EntityListAdapter<F> extends ResultAdapter implements EntityList<F> {
private final net.oneandone.troilus.EntityList<F> entityList;
EntityListAdapter(net.oneandone.troilus.EntityList<F> entityList) {
super(entityList);
this.entityList = entityList;
}
@Override
public Iterator<F> iterator() {
return new Iterator<F>() {
final Iterator<F> recordIt = entityList.iterator();
@Override
public boolean hasNext() {
return recordIt.hasNext();
}
@Override
public F next() {
return recordIt.next();
}
};
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void subscribe(Subscriber<? super F> subscriber) {
entityList.subscribe(new SubscriberAdapter(subscriber));
}
}
static final class SubscriberAdapter<F> implements Subscriber<F> {
private final Subscriber<? super F> subscriber;
public SubscriberAdapter(Subscriber<? super F> subscriber) {
this.subscriber = subscriber;
}
@Override
public void onSubscribe(Subscription s) {
subscriber.onSubscribe(s);
}
@Override
public void onNext(F t) {
subscriber.onNext(t);
}
@Override
public void onError(Throwable t) {
subscriber.onError(t);
}
@Override
public void onComplete() {
subscriber.onComplete();
}
}
private static class WriteQueryDataAdapter implements WriteQueryData {
private final net.oneandone.troilus.java7.interceptor.WriteQueryData data;
WriteQueryDataAdapter(net.oneandone.troilus.java7.interceptor.WriteQueryData data) {
this.data = data;
}
@Override
public WriteQueryDataAdapter keys(ImmutableMap<String, Object> keys) {
return new WriteQueryDataAdapter(data.keys(keys));
}
@Override
public WriteQueryDataAdapter whereConditions(ImmutableList<Clause> whereConditions) {
return new WriteQueryDataAdapter(data.whereConditions(whereConditions));
}
@Override
public WriteQueryDataAdapter valuesToMutate(ImmutableMap<String, Optional<Object>> valuesToMutate) {
return new WriteQueryDataAdapter(data.valuesToMutate(toGuavaOptional(valuesToMutate)));
}
@Override
public WriteQueryDataAdapter setValuesToAdd(ImmutableMap<String, ImmutableSet<Object>> setValuesToAdd) {
return new WriteQueryDataAdapter(data.setValuesToAdd(setValuesToAdd));
}
@Override
public WriteQueryDataAdapter setValuesToRemove(ImmutableMap<String, ImmutableSet<Object>> setValuesToRemove) {
return new WriteQueryDataAdapter(data.setValuesToRemove(setValuesToRemove));
}
@Override
public WriteQueryDataAdapter listValuesToAppend(ImmutableMap<String, ImmutableList<Object>> listValuesToAppend) {
return new WriteQueryDataAdapter(data.listValuesToAppend(listValuesToAppend));
}
@Override
public WriteQueryDataAdapter listValuesToPrepend(ImmutableMap<String, ImmutableList<Object>> listValuesToPrepend) {
return new WriteQueryDataAdapter(data.listValuesToPrepend(listValuesToPrepend));
}
@Override
public WriteQueryDataAdapter listValuesToRemove(ImmutableMap<String, ImmutableList<Object>> listValuesToRemove) {
return new WriteQueryDataAdapter(data.listValuesToRemove(listValuesToRemove));
}
@Override
public WriteQueryDataAdapter mapValuesToMutate(ImmutableMap<String, ImmutableMap<Object, Optional<Object>>> mapValuesToMutate) {
return new WriteQueryDataAdapter(data.mapValuesToMutate(toGuavaOptional(mapValuesToMutate)));
}
@Override
public WriteQueryDataAdapter onlyIfConditions(ImmutableList<Clause> onlyIfConditions) {
return new WriteQueryDataAdapter(data.onlyIfConditions(onlyIfConditions));
}
@Override
public WriteQueryDataAdapter ifNotExists(Optional<Boolean> ifNotExists) {
return new WriteQueryDataAdapter(data.ifNotExists(ifNotExists.orElse(null)));
}
@Override
public ImmutableMap<String, Object> getKeys() {
return data.getKeys();
}
@Override
public <T> boolean hasKey(ColumnName<T> name) {
return data.hasKey(name);
}
@Override
public boolean hasKey(String name) {
return data.hasKey(name);
}
@Override
public <T> T getKey(ColumnName<T> name) {
return data.getKey(name);
}
@Override
public Object getKey(String name) {
return data.getKey(name);
}
@Override
public ImmutableList<Clause> getWhereConditions() {
return data.getWhereConditions();
}
@Override
public ImmutableMap<String, Optional<Object>> getValuesToMutate() {
return fromGuavaOptional(data.getValuesToMutate());
}
@Override
public <T> boolean hasValueToMutate(ColumnName<T> name) {
return data.hasValueToMutate(name);
}
@Override
public boolean hasValueToMutate(String name) {
return data.hasValueToMutate(name);
}
@Override
public <T> T getValueToMutate(ColumnName<T> name) {
return data.getValueToMutate(name);
}
@Override
public Object getValueToMutate(String name) {
return data.getValueToMutate(name);
}
@Override
public ImmutableMap<String, ImmutableSet<Object>> getSetValuesToAdd() {
return data.getSetValuesToAdd();
}
@Override
public <T> boolean hasSetValuesToAdd(ColumnName<Set<T>> name) {
return data.hasSetValuesToAdd(name);
}
@Override
public boolean hasSetValuesToAdd(String name) {
return data.hasSetValuesToAdd(name);
}
@Override
public <T> ImmutableSet<T> getSetValuesToAdd(ColumnName<Set<T>> name) {
return data.getSetValuesToAdd(name);
}
@Override
public ImmutableSet<Object> getSetValuesToAdd(String name) {
return data.getSetValuesToAdd(name);
}
@Override
public <T> boolean hasSetValuesToAddOrSet(ColumnName<Set<T>> name) {
return data.hasSetValuesToAddOrSet(name);
}
@Override
public boolean hasSetValuesToAddOrSet(String name) {
return data.hasSetValuesToAddOrSet(name);
}
@Override
public <T> ImmutableSet<T> getSetValuesToAddOrSet( ColumnName<Set<T>> name) {
return data.getSetValuesToAddOrSet(name);
}
@Override
public ImmutableSet<Object> getSetValuesToAddOrSet(String name) {
return data.getSetValuesToAddOrSet(name);
}
@Override
public ImmutableMap<String, ImmutableSet<Object>> getSetValuesToRemove() {
return data.getSetValuesToRemove();
}
@Override
public <T> boolean hasSetValuesToRemove(ColumnName<Set<T>> name) {
return data.hasSetValuesToRemove(name);
}
@Override
public boolean hasSetValuesToRemove(String name) {
return data.hasSetValuesToRemove(name);
}
@Override
public <T> ImmutableSet<T> getSetValuesToRemove(ColumnName<Set<T>> name) {
return data.getSetValuesToRemove(name);
}
@Override
public ImmutableSet<Object> getSetValuesToRemove(String name) {
return data.getSetValuesToRemove(name);
}
@Override
public ImmutableMap<String, ImmutableList<Object>> getListValuesToAppend() {
return data.getListValuesToAppend();
}
@Override
public <T> boolean hasListValuesToAppend(ColumnName<List<T>> name) {
return data.hasListValuesToAppend(name);
}
@Override
public boolean hasListValuesToAppend(String name) {
return data.hasListValuesToAppend(name);
}
@Override
public <T> ImmutableList<T> getListValuesToAppend(ColumnName<List<T>> name) {
return data.getListValuesToAppend(name);
}
@Override
public ImmutableList<Object> getListValuesToAppend(String name) {
return data.getListValuesToAppend(name);
}
@Override
public <T> boolean hasListValuesToPrepend(ColumnName<List<T>> name) {
return data.hasListValuesToPrepend(name);
}
@Override
public boolean hasListValuesToPrepend(String name) {
return data.hasListValuesToPrepend(name);
}
@Override
public <T> ImmutableList<T> getListValuesToPrepend(ColumnName<List<T>> name) {
return data.getListValuesToPrepend(name);
}
@Override
public ImmutableList<Object> getListValuesToPrepend(String name) {
return data.getListValuesToPrepend(name);
}
@Override
public ImmutableMap<String, ImmutableList<Object>> getListValuesToPrepend() {
return data.getListValuesToPrepend();
}
@Override
public <T> boolean hasListValuesToAddOrSet(ColumnName<List<T>> name) {
return data.hasListValuesToAddOrSet(name);
}
@Override
public boolean hasListValuesToAddOrSet(String name) {
return data.hasListValuesToAddOrSet(name);
}
@Override
public <T> ImmutableList<T> getListValuesToAddOrSet( ColumnName<List<T>> name) {
return data.getListValuesToAddOrSet(name);
}
@Override
public ImmutableList<Object> getListValuesToAddOrSet(String name) {
return data.getListValuesToAddOrSet(name);
}
@Override
public ImmutableMap<String, ImmutableList<Object>> getListValuesToRemove() {
return data.getListValuesToRemove();
}
@Override
public <T> boolean hasListValuesToRemove(ColumnName<List<T>> name) {
return data.hasListValuesToRemove(name);
}
@Override
public boolean hasListValuesToRemove(String name) {
return data.hasListValuesToRemove(name);
}
@Override
public ImmutableList<Object> getListValuesToRemove(String name) {
return data.getListValuesToRemove(name);
}
@Override
public <T> ImmutableList<T> getListValuesToRemove(ColumnName<List<T>> name) {
return data.getListValuesToRemove(name);
}
@Override
public ImmutableMap<String, ImmutableMap<Object, Optional<Object>>> getMapValuesToMutate() {
Map<String, ImmutableMap<Object, Optional<Object>>> result = Maps.newHashMap();
for (Entry<String, ImmutableMap<Object, com.google.common.base.Optional<Object>>> entry : data.getMapValuesToMutate().entrySet()) {
Map<Object, Optional<Object>> iresult = Maps.newHashMap();
for (Entry<Object, com.google.common.base.Optional<Object>> entry2 : entry.getValue().entrySet()) {
iresult.put(entry2.getKey(), Optional.ofNullable(entry2.getValue().orNull()));
}
result.put(entry.getKey(), ImmutableMap.copyOf(iresult));
}
return ImmutableMap.copyOf(result);
}
@Override
public <T, V> boolean hasMapValuesToMutate(ColumnName<Map<T, V>> name) {
return data.hasMapValuesToMutate(name);
}
@Override
public boolean hasMapValuesToMutate(String name) {
return data.hasMapValuesToMutate(name);
}
@Override
public <T, V> ImmutableMap<T, Optional<V>> getMapValuesToMutate( ColumnName<Map<T, V>> name) {
return fromGuavaOptional(data.getMapValuesToMutate(name));
}
@Override
public ImmutableMap<Object, Optional<Object>> getMapValuesToMutate(String name) {
return fromGuavaOptional(data.getMapValuesToMutate(name));
}
@Override
public ImmutableList<Clause> getOnlyIfConditions() {
return data.getOnlyIfConditions();
}
@Override
public Optional<Boolean> getIfNotExits() {
return Optional.ofNullable(data.getIfNotExits());
}
private static <T, V> ImmutableMap<T, Optional<V>> fromGuavaOptional(ImmutableMap<T, com.google.common.base.Optional<V>> map) {
Map<T, Optional<V>> result = Maps.newHashMap();
for (Entry<T, com.google.common.base.Optional<V>> entry : map.entrySet()) {
result.put(entry.getKey(), Optional.ofNullable(entry.getValue().orNull()));
}
return ImmutableMap.copyOf(result);
}
private static <T,V> ImmutableMap<T, com.google.common.base.Optional<V>> toGuavaOptional(ImmutableMap<T, Optional<V>> map) {
Map<T, com.google.common.base.Optional<V>> result = Maps.newHashMap();
for (Entry<T, Optional<V>> entry : map.entrySet()) {
result.put(entry.getKey(), com.google.common.base.Optional.fromNullable(entry.getValue().orElse(null)));
}
return ImmutableMap.copyOf(result);
}
private static ImmutableMap<String, ImmutableMap<Object, com.google.common.base.Optional<Object>>> toGuavaOptional(Map<String, ImmutableMap<Object, Optional<Object>>> map) {
Map<String, ImmutableMap<Object, com.google.common.base.Optional<Object>>> result = Maps.newHashMap();
for (Entry<String, ImmutableMap<Object, Optional<Object>>> entry : map.entrySet()) {
Map<Object, com.google.common.base.Optional<Object>> iresult = Maps.newHashMap();
for (Entry<Object, Optional<Object>> entry2 : entry.getValue().entrySet()) {
iresult.put(entry2.getKey(), com.google.common.base.Optional.fromNullable(entry2.getValue().orElse(null)));
}
result.put(entry.getKey(), ImmutableMap.copyOf(iresult));
}
return ImmutableMap.copyOf(result);
}
static net.oneandone.troilus.java7.interceptor.WriteQueryData convert(WriteQueryData data) {
return new WriteQueryDataImpl().keys(data.getKeys())
.whereConditions(data.getWhereConditions())
.valuesToMutate(toGuavaOptional(data.getValuesToMutate()))
.setValuesToAdd(data.getSetValuesToAdd())
.setValuesToRemove(data.getSetValuesToRemove())
.listValuesToAppend(data.getListValuesToAppend())
.listValuesToPrepend(data.getListValuesToPrepend())
.listValuesToRemove(data.getListValuesToRemove())
.mapValuesToMutate(toGuavaOptional(data.getMapValuesToMutate()))
.onlyIfConditions(data.getOnlyIfConditions())
.ifNotExists(data.getIfNotExits().orElse(null));
}
}
private static final class ListReadQueryRequestInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.ListReadQueryRequestInterceptor {
private ListReadQueryRequestInterceptor interceptor;
public ListReadQueryRequestInterceptorAdapter(ListReadQueryRequestInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<net.oneandone.troilus.java7.interceptor.ListReadQueryData> onListReadRequestAsync(net.oneandone.troilus.java7.interceptor.ListReadQueryData data) {
return CompletableFutures.toListenableFuture(interceptor.onListReadRequestAsync(new ListReadQueryDataAdapter(data))
.thenApply((queryData -> ListReadQueryDataAdapter.convert(queryData))));
}
@Override
public String toString() {
return "ListReadQueryPreInterceptor (with " + interceptor + ")";
}
}
private static final class ListReadQueryResponseInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.ListReadQueryResponseInterceptor {
private ListReadQueryResponseInterceptor interceptor;
public ListReadQueryResponseInterceptorAdapter(ListReadQueryResponseInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<net.oneandone.troilus.java7.RecordList> onListReadResponseAsync(net.oneandone.troilus.java7.interceptor.ListReadQueryData data, net.oneandone.troilus.java7.RecordList recordList) {
return CompletableFutures.toListenableFuture(interceptor.onListReadResponseAsync(new ListReadQueryDataAdapter(data), RecordListAdapter.convertFromJava7(recordList))
.thenApply(list -> RecordListAdapter.convertToJava7(list)));
}
@Override
public String toString() {
return "ListReadQueryPostInterceptor (with " + interceptor + ")";
}
}
private static final class SingleReadQueryRequestInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.SingleReadQueryRequestInterceptor {
private SingleReadQueryRequestInterceptor interceptor;
public SingleReadQueryRequestInterceptorAdapter(SingleReadQueryRequestInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<SingleReadQueryData> onSingleReadRequestAsync(SingleReadQueryData data) {
return CompletableFutures.toListenableFuture(interceptor.onSingleReadRequestAsync(data));
}
@Override
public String toString() {
return "ListReadQueryPreInterceptor (with " + interceptor + ")";
}
}
private static final class SingleReadQueryResponseInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.SingleReadQueryResponseInterceptor {
private SingleReadQueryResponseInterceptor interceptor;
public SingleReadQueryResponseInterceptorAdapter(SingleReadQueryResponseInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<net.oneandone.troilus.java7.Record> onSingleReadResponseAsync(SingleReadQueryData data, net.oneandone.troilus.java7.Record record) {
return CompletableFutures.toListenableFuture(interceptor.onSingleReadResponseAsync(data, (record == null) ? Optional.empty() : Optional.of(RecordAdapter.convertFromJava7(record)))
.thenApply(optionalRecord -> RecordAdapter.convertToJava7(optionalRecord.orElse((null)))));
}
@Override
public String toString() {
return "SingleReadQueryPostInterceptorAdapter (with " + interceptor + ")";
}
}
private static final class WriteQueryRequestInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.WriteQueryRequestInterceptor {
private WriteQueryRequestInterceptor interceptor;
public WriteQueryRequestInterceptorAdapter(WriteQueryRequestInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<net.oneandone.troilus.java7.interceptor.WriteQueryData> onWriteRequestAsync(net.oneandone.troilus.java7.interceptor.WriteQueryData data) {
return CompletableFutures.toListenableFuture(interceptor.onWriteRequestAsync(new WriteQueryDataAdapter(data))
.thenApply(queryData -> WriteQueryDataAdapter.convert(queryData)));
}
@Override
public String toString() {
return "WriteQueryPreInterceptorAdapter (with " + interceptor + ")";
}
}
private static final class DeleteQueryRequestInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.DeleteQueryRequestInterceptor {
private DeleteQueryRequestInterceptor interceptor;
public DeleteQueryRequestInterceptorAdapter(DeleteQueryRequestInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<DeleteQueryData> onDeleteRequestAsync(DeleteQueryData queryData) {
return CompletableFutures.toListenableFuture(interceptor.onDeleteRequestAsync(queryData));
}
@Override
public String toString() {
return "WriteQueryPreInterceptorAdapter (with " + interceptor + ")";
}
}
private final class CascadeOnWriteInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.CascadeOnWriteInterceptor {
private CascadeOnWriteInterceptor interceptor;
public CascadeOnWriteInterceptorAdapter(CascadeOnWriteInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<ImmutableSet<? extends Batchable>> onWriteAsync(net.oneandone.troilus.java7.interceptor.WriteQueryData queryData) {
return CompletableFutures.toListenableFuture(interceptor.onWrite(new WriteQueryDataAdapter(queryData))
.thenApply(batchables -> batchables.stream().map(batchable -> Batchables.toJava7Batchable(ctx, batchable)).collect(Collectors.<net.oneandone.troilus.java7.Batchable>toSet()))
.thenApply(batchables -> ImmutableSet.copyOf(batchables)));
}
@Override
public String toString() {
return "CascadeOnWriteInterceptorAdapter (with " + interceptor + ")";
}
}
private final class CascadeOnDeleteInterceptorAdapter implements net.oneandone.troilus.java7.interceptor.CascadeOnDeleteInterceptor {
private CascadeOnDeleteInterceptor interceptor;
public CascadeOnDeleteInterceptorAdapter(CascadeOnDeleteInterceptor interceptor) {
this.interceptor = interceptor;
}
@Override
public ListenableFuture<ImmutableSet<? extends Batchable>> onDeleteAsync(DeleteQueryData queryData) {
return CompletableFutures.toListenableFuture(interceptor.onDelete(queryData)
.thenApply(batchables -> batchables.stream().map(batchable -> Batchables.toJava7Batchable(ctx, batchable)).collect(Collectors.<net.oneandone.troilus.java7.Batchable>toSet()))
.thenApply(batchables -> ImmutableSet.copyOf(batchables)));
}
@Override
public String toString() {
return "CascadeOnDeleteInterceptorAdapter (with " + interceptor + ")";
}
}
}
| |
package org.tourgune.emocionometro;
import org.tourgune.emocionometro.preferences.PreferencesUtil;
import android.app.Activity;
import android.os.Bundle;
import android.view.Gravity;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
public class Configure extends Activity implements OnClickListener,
OnCheckedChangeListener {
// Preferences Form elements
private EditText newUserText;
private static final int newUserTextId = R.id.configUserText;
private EditText seekBarMinmovementText;
private static final int seekBarMinmovementTextId = R.id.configSeekBarMinMovementText;
private CheckBox useLongClickChkbx;
private static final int useLongClickChkbxId = R.id.configUseLongClick;
private Button seekbarPrecisenessButton;
private static final int seekbarPrecisenessButtonId = R.id.configSeekbarPreciseness;
private CheckBox doesSeekbarStartInZeroChkbx;
private static final int doesSeekbarStartInZeroChkbxId = R.id.configDoesSeekbarStartInZero;
private CheckBox hasSeekbarMemoryChkbx;
private static final int hasSeekbarMemoryChkbxId = R.id.configHasSeekbarMemoryChkbx;
private CheckBox isSurveyRandomChkbx;
private static final int isSurveyRandomChkbxId = R.id.configIsSurveyRandomChkbx;
private TextView surveyRandomLapseTitle;
private static final int surveyRandomLapseTitleId = R.id.configSurveyRandomLapseTitle;
private EditText surveyRandomLapseText;
private static final int surveyRandomLapseTextId = R.id.configSurveyRandomLapseText;
private EditText surveyPeriodText;
private static final int surveyPeriodTextId = R.id.configSurveyPeriodText;
private CheckBox isSurveyLimitedInTimeChkbx;
private static final int isSurveyLimitedInTimeChkbxId = R.id.configIsSurveyLimitedInTime;
private TextView surveyStartingTimeTitle;
private static final int surveyStartingTimeTitleId = R.id.configSurveyStartTimeTitle;
private EditText surveyStartingTimeText;
private static final int surveyStartingTimeTextId = R.id.configSurveyStartTime;
private TextView surveyEndinTimeTitle;
private static final int surveyEndinTimeTitleId = R.id.configSurveyEndTimeTitle;
private EditText surveyEndingTimeText;
private static final int surveyEndingTimeTextId = R.id.configSurveyEndTime;
private TextView surveyNumberOfDaysTitle;
private static final int surveyNumberOfDaysTitleId = R.id.configDayNumberTitle;
private EditText surveyNumberOfDaysText;
private static final int surveyNumberOfDaysTextId = R.id.configDayNumber;
private Button commitButton;
private static final int commitButtonId = R.id.configEdituserButton;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.config);
initVariables();
listeners();
}
public void initVariables() {
newUserText = (EditText) findViewById(newUserTextId);
newUserText.setText(Integer.toString(PreferencesUtil.getUserId(this)));
seekBarMinmovementText = (EditText) findViewById(seekBarMinmovementTextId);
seekBarMinmovementText.setText(Integer.toString(PreferencesUtil
.getSeekBarMinimumMovement(this)));
useLongClickChkbx = (CheckBox) findViewById(useLongClickChkbxId);
useLongClickChkbx.setChecked(PreferencesUtil
.getUseLongClick(this));
seekbarPrecisenessButton = (Button) findViewById(seekbarPrecisenessButtonId);
seekbarPrecisenessButton.setText(PreferencesUtil
.getSeekBarPreciseness(this));
doesSeekbarStartInZeroChkbx= (CheckBox) findViewById(doesSeekbarStartInZeroChkbxId);
doesSeekbarStartInZeroChkbx.setChecked(PreferencesUtil
.getDoesSeekbarStartInZero(this));
hasSeekbarMemoryChkbx = (CheckBox) findViewById(hasSeekbarMemoryChkbxId);
hasSeekbarMemoryChkbx.setChecked(PreferencesUtil
.getHasSeekbarMemory(this));
isSurveyRandomChkbx = (CheckBox) findViewById(isSurveyRandomChkbxId);
isSurveyRandomChkbx.setChecked(PreferencesUtil.getIsSurveyRandom(this));
surveyRandomLapseTitle= (TextView) findViewById(surveyRandomLapseTitleId);
surveyRandomLapseText = (EditText) findViewById(surveyRandomLapseTextId);
surveyRandomLapseText.setText(Float.toString(fromMsToMin(PreferencesUtil
.getSurveyRandomLapse(this))));
if (isSurveyRandomChkbx.isChecked())
{
surveyRandomLapseTitle.setVisibility(android.view.View.VISIBLE);
surveyRandomLapseText.setVisibility(android.view.View.VISIBLE);
}
else{
surveyRandomLapseTitle.setVisibility(android.view.View.GONE);
surveyRandomLapseText.setVisibility(android.view.View.GONE);
}
surveyPeriodText = (EditText) findViewById(surveyPeriodTextId);
surveyPeriodText.setText(Float.toString(fromMsToMin(PreferencesUtil
.getSurveyPeriod(this))));
isSurveyLimitedInTimeChkbx = (CheckBox) findViewById(isSurveyLimitedInTimeChkbxId);
isSurveyLimitedInTimeChkbx.setChecked(PreferencesUtil
.getIsSurveyLimitedInTime(this));
surveyStartingTimeTitle=(TextView) findViewById(surveyStartingTimeTitleId);
surveyStartingTimeText = (EditText) findViewById(surveyStartingTimeTextId);
surveyStartingTimeText.setText(PreferencesUtil
.getSurveyStartingTime(this));
surveyEndinTimeTitle=(TextView) findViewById(surveyEndinTimeTitleId);
surveyEndingTimeText = (EditText) findViewById(surveyEndingTimeTextId);
surveyEndingTimeText.setText(PreferencesUtil.getSurveyEndingTime(this));
surveyNumberOfDaysTitle=(TextView) findViewById(surveyNumberOfDaysTitleId);
surveyNumberOfDaysText = (EditText)findViewById(surveyNumberOfDaysTextId);
surveyNumberOfDaysText.setText("DEPRECATED");
if (isSurveyLimitedInTimeChkbx.isChecked()) {
surveyStartingTimeTitle.setVisibility(android.view.View.VISIBLE);
surveyEndinTimeTitle.setVisibility(android.view.View.VISIBLE);
surveyNumberOfDaysTitle.setVisibility(android.view.View.VISIBLE);
surveyStartingTimeText.setVisibility(android.view.View.VISIBLE);
surveyEndingTimeText.setVisibility(android.view.View.VISIBLE);
surveyNumberOfDaysText.setVisibility(android.view.View.VISIBLE);
}
else {
surveyStartingTimeTitle.setVisibility(android.view.View.GONE);
surveyEndinTimeTitle.setVisibility(android.view.View.GONE);
surveyNumberOfDaysTitle.setVisibility(android.view.View.GONE);
surveyStartingTimeText.setVisibility(android.view.View.GONE);
surveyEndingTimeText.setVisibility(android.view.View.GONE);
surveyNumberOfDaysText.setVisibility(android.view.View.GONE);
}
commitButton = (Button) findViewById(commitButtonId);
}
public void listeners() {
commitButton.setOnClickListener(this);
seekbarPrecisenessButton.setOnClickListener(this);
isSurveyRandomChkbx.setOnCheckedChangeListener(this);
isSurveyLimitedInTimeChkbx.setOnCheckedChangeListener(this);
}
public boolean validateAndCommitForm() {
// /hacemos commit de todos los elementos del formulario
try {
int userId = Integer.parseInt(newUserText.getText().toString());
PreferencesUtil.setUserId(this, userId);
newUserText.setTextColor(android.graphics.Color.BLACK);
}
catch (Exception e) {
e.printStackTrace();
newUserText.setTextColor(android.graphics.Color.RED);
showToast("it must be a number", 0);
return false;
}
try {
int userId = Integer.parseInt(seekBarMinmovementText.getText()
.toString());
PreferencesUtil.setSeekBarMinimumMovement(this, userId);
seekBarMinmovementText.setTextColor(android.graphics.Color.BLACK);
}
catch (Exception e) {
e.printStackTrace();
seekBarMinmovementText.setTextColor(android.graphics.Color.RED);
showToast("it must be a number", 0);
return false;
}
PreferencesUtil.setUseLongClick(this, useLongClickChkbx
.isChecked());
PreferencesUtil.setSeekBarPreciseness(this, seekbarPrecisenessButton.getText().toString());
PreferencesUtil.setDoesSeekbarStartInZero(this, doesSeekbarStartInZeroChkbx.isChecked());
PreferencesUtil.setHasSeekbarMemory(this, hasSeekbarMemoryChkbx
.isChecked());
PreferencesUtil
.setIsSurveyRandom(this, isSurveyRandomChkbx.isChecked());
if (isSurveyRandomChkbx.isChecked()) {
try {
float rndLapse = Float.parseFloat(surveyRandomLapseText.getText()
.toString());
PreferencesUtil.setSurveyRandomLapse(this, fromMinToMs(rndLapse));
surveyRandomLapseText
.setTextColor(android.graphics.Color.BLACK);
}
catch (Exception e) {
e.printStackTrace();
surveyRandomLapseText.setTextColor(android.graphics.Color.RED);
showToast("it must be a number", 0);
return false;
}
}
try {
float surveyPeriod = Float.parseFloat(surveyPeriodText.getText()
.toString());
PreferencesUtil.setSurveyPeriod(this, fromMinToMs(surveyPeriod));
surveyPeriodText.setTextColor(android.graphics.Color.BLACK);
} catch (Exception e) {
e.printStackTrace();
surveyPeriodText.setTextColor(android.graphics.Color.RED);
showToast("it must be a number", 0);
return false;
}
PreferencesUtil.setIsSurveyLimitedInTime(this,
isSurveyLimitedInTimeChkbx.isChecked());
if (isSurveyLimitedInTimeChkbx.isChecked()) {
if (!PreferencesUtil.setSurveyStartingTime(this,
surveyStartingTimeText.getText().toString()))
return false;
if (!PreferencesUtil.setSurveyEndingTime(this, surveyEndingTimeText
.getText().toString()))
return false;
}
return true;
}
public float fromMsToMin(int ms){
float msAux=(float) ms;
return (msAux/60/1000);
}
public int fromMinToMs(float min){
return (int)(min*60*1000);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case commitButtonId:
if (validateAndCommitForm())
finish();
break;
case seekbarPrecisenessButtonId:
String currentPreciseness=seekbarPrecisenessButton.getText().toString();
if (currentPreciseness.equals("1/10"))
seekbarPrecisenessButton.setText("1/2");
else if (currentPreciseness.equals("1/2"))
seekbarPrecisenessButton.setText("1");
else if (currentPreciseness.equals("1"))
seekbarPrecisenessButton.setText("1/10");
break;
}
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
switch (buttonView.getId()) {
case isSurveyRandomChkbxId:
if (isChecked)
{
surveyRandomLapseTitle.setVisibility(android.view.View.VISIBLE);
surveyRandomLapseText.setVisibility(android.view.View.VISIBLE);
}
else{
surveyRandomLapseTitle.setVisibility(android.view.View.GONE);
surveyRandomLapseText.setVisibility(android.view.View.GONE);
}
break;
case isSurveyLimitedInTimeChkbxId:
if (isChecked) {
surveyStartingTimeTitle.setVisibility(android.view.View.VISIBLE);
surveyEndinTimeTitle.setVisibility(android.view.View.VISIBLE);
surveyNumberOfDaysTitle.setVisibility(android.view.View.VISIBLE);
surveyStartingTimeText.setVisibility(android.view.View.VISIBLE);
surveyEndingTimeText.setVisibility(android.view.View.VISIBLE);
surveyNumberOfDaysText.setVisibility(android.view.View.VISIBLE);
}
else {
surveyStartingTimeTitle.setVisibility(android.view.View.GONE);
surveyEndinTimeTitle.setVisibility(android.view.View.GONE);
surveyNumberOfDaysTitle.setVisibility(android.view.View.GONE);
surveyStartingTimeText.setVisibility(android.view.View.GONE);
surveyEndingTimeText.setVisibility(android.view.View.GONE);
surveyNumberOfDaysText.setVisibility(android.view.View.GONE);
}
break;
}
}
// public boolean onKeyDown (int keyCode, KeyEvent event){
//
// return true;
// }
public void showToast(String text, int duration) {
Toast subscribedToast = new Toast(this);
subscribedToast = Toast.makeText(this, text, duration);
subscribedToast.setGravity(Gravity.CENTER, 0, 0);
subscribedToast.show();
}
}
| |
/*
* generated by Xtext
*/
package de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import org.antlr.runtime.RecognitionException;
import org.eclipse.xtext.AbstractElement;
import org.eclipse.xtext.ui.editor.contentassist.antlr.AbstractContentAssistParser;
import org.eclipse.xtext.ui.editor.contentassist.antlr.FollowElement;
import org.eclipse.xtext.ui.editor.contentassist.antlr.internal.AbstractInternalContentAssistParser;
import com.google.inject.Inject;
import de.uni_hildesheim.sse.vil.templatelang.services.TemplateLangGrammarAccess;
public class TemplateLangParser extends AbstractContentAssistParser {
@Inject
private TemplateLangGrammarAccess grammarAccess;
private Map<AbstractElement, String> nameMappings;
@Override
protected de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr.internal.InternalTemplateLangParser createParser() {
de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr.internal.InternalTemplateLangParser result = new de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr.internal.InternalTemplateLangParser(null);
result.setGrammarAccess(grammarAccess);
return result;
}
@Override
protected String getRuleName(AbstractElement element) {
if (nameMappings == null) {
nameMappings = new HashMap<AbstractElement, String>() {
private static final long serialVersionUID = 1L;
{
put(grammarAccess.getLanguageUnitAccess().getAlternatives_13(), "rule__LanguageUnit__Alternatives_13");
put(grammarAccess.getHintedExpressionAccess().getHintAlternatives_1_1_0(), "rule__HintedExpression__HintAlternatives_1_1_0");
put(grammarAccess.getStmtAccess().getAlternatives(), "rule__Stmt__Alternatives");
put(grammarAccess.getAlternativeAccess().getAlternatives_4(), "rule__Alternative__Alternatives_4");
put(grammarAccess.getAlternativeAccess().getAlternatives_5_1(), "rule__Alternative__Alternatives_5_1");
put(grammarAccess.getLoopAccess().getAlternatives_8(), "rule__Loop__Alternatives_8");
put(grammarAccess.getWhileAccess().getAlternatives_4(), "rule__While__Alternatives_4");
put(grammarAccess.getMultiselectAccess().getAlternatives(), "rule__Multiselect__Alternatives");
put(grammarAccess.getExpressionAccess().getAlternatives(), "rule__Expression__Alternatives");
put(grammarAccess.getLogicalOperatorAccess().getAlternatives(), "rule__LogicalOperator__Alternatives");
put(grammarAccess.getEqualityOperatorAccess().getAlternatives(), "rule__EqualityOperator__Alternatives");
put(grammarAccess.getRelationalOperatorAccess().getAlternatives(), "rule__RelationalOperator__Alternatives");
put(grammarAccess.getAdditiveOperatorAccess().getAlternatives(), "rule__AdditiveOperator__Alternatives");
put(grammarAccess.getMultiplicativeOperatorAccess().getAlternatives(), "rule__MultiplicativeOperator__Alternatives");
put(grammarAccess.getUnaryOperatorAccess().getAlternatives(), "rule__UnaryOperator__Alternatives");
put(grammarAccess.getPrimaryExpressionAccess().getAlternatives(), "rule__PrimaryExpression__Alternatives");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getAlternatives_0(), "rule__ExpressionOrQualifiedExecution__Alternatives_0");
put(grammarAccess.getSubCallAccess().getAlternatives(), "rule__SubCall__Alternatives");
put(grammarAccess.getSubCallAccess().getTypeAlternatives_0_0_0(), "rule__SubCall__TypeAlternatives_0_0_0");
put(grammarAccess.getConstantAccess().getAlternatives(), "rule__Constant__Alternatives");
put(grammarAccess.getConstantAccess().getBValueAlternatives_3_0(), "rule__Constant__BValueAlternatives_3_0");
put(grammarAccess.getIdentifierAccess().getAlternatives(), "rule__Identifier__Alternatives");
put(grammarAccess.getTypeAccess().getAlternatives(), "rule__Type__Alternatives");
put(grammarAccess.getContainerInitializerExpressionAccess().getAlternatives(), "rule__ContainerInitializerExpression__Alternatives");
put(grammarAccess.getLanguageUnitAccess().getGroup(), "rule__LanguageUnit__Group__0");
put(grammarAccess.getLanguageUnitAccess().getGroup_10(), "rule__LanguageUnit__Group_10__0");
put(grammarAccess.getHintedExpressionAccess().getGroup(), "rule__HintedExpression__Group__0");
put(grammarAccess.getHintedExpressionAccess().getGroup_1(), "rule__HintedExpression__Group_1__0");
put(grammarAccess.getIndentationHintAccess().getGroup(), "rule__IndentationHint__Group__0");
put(grammarAccess.getIndentationHintAccess().getGroup_3(), "rule__IndentationHint__Group_3__0");
put(grammarAccess.getIndentationHintPartAccess().getGroup(), "rule__IndentationHintPart__Group__0");
put(grammarAccess.getFormattingHintAccess().getGroup(), "rule__FormattingHint__Group__0");
put(grammarAccess.getFormattingHintAccess().getGroup_3(), "rule__FormattingHint__Group_3__0");
put(grammarAccess.getFormattingHintPartAccess().getGroup(), "rule__FormattingHintPart__Group__0");
put(grammarAccess.getVilDefAccess().getGroup(), "rule__VilDef__Group__0");
put(grammarAccess.getStmtBlockAccess().getGroup(), "rule__StmtBlock__Group__0");
put(grammarAccess.getAlternativeAccess().getGroup(), "rule__Alternative__Group__0");
put(grammarAccess.getAlternativeAccess().getGroup_5(), "rule__Alternative__Group_5__0");
put(grammarAccess.getContentAccess().getGroup(), "rule__Content__Group__0");
put(grammarAccess.getContentAccess().getGroup_1(), "rule__Content__Group_1__0");
put(grammarAccess.getContentAccess().getGroup_1_0(), "rule__Content__Group_1_0__0");
put(grammarAccess.getContentAccess().getGroup_1_1(), "rule__Content__Group_1_1__0");
put(grammarAccess.getSwitchAccess().getGroup(), "rule__Switch__Group__0");
put(grammarAccess.getSwitchAccess().getGroup_5(), "rule__Switch__Group_5__0");
put(grammarAccess.getSwitchAccess().getGroup_5_1(), "rule__Switch__Group_5_1__0");
put(grammarAccess.getSwitchAccess().getGroup_5_2(), "rule__Switch__Group_5_2__0");
put(grammarAccess.getSwitchPartAccess().getGroup(), "rule__SwitchPart__Group__0");
put(grammarAccess.getLoopAccess().getGroup(), "rule__Loop__Group__0");
put(grammarAccess.getLoopAccess().getGroup_6(), "rule__Loop__Group_6__0");
put(grammarAccess.getLoopAccess().getGroup_6_2(), "rule__Loop__Group_6_2__0");
put(grammarAccess.getWhileAccess().getGroup(), "rule__While__Group__0");
put(grammarAccess.getFlushAccess().getGroup(), "rule__Flush__Group__0");
put(grammarAccess.getGenericMultiselectAccess().getGroup(), "rule__GenericMultiselect__Group__0");
put(grammarAccess.getMultiSelectPartAccess().getGroup(), "rule__MultiSelectPart__Group__0");
put(grammarAccess.getUserMultiselectAccess().getGroup(), "rule__UserMultiselect__Group__0");
put(grammarAccess.getExtensionAccess().getGroup(), "rule__Extension__Group__0");
put(grammarAccess.getJavaQualifiedNameAccess().getGroup(), "rule__JavaQualifiedName__Group__0");
put(grammarAccess.getJavaQualifiedNameAccess().getGroup_1(), "rule__JavaQualifiedName__Group_1__0");
put(grammarAccess.getVariableDeclarationAccess().getGroup(), "rule__VariableDeclaration__Group__0");
put(grammarAccess.getVariableDeclarationAccess().getGroup_3(), "rule__VariableDeclaration__Group_3__0");
put(grammarAccess.getCompoundAccess().getGroup(), "rule__Compound__Group__0");
put(grammarAccess.getCompoundAccess().getGroup_3(), "rule__Compound__Group_3__0");
put(grammarAccess.getTypeDefAccess().getGroup(), "rule__TypeDef__Group__0");
put(grammarAccess.getAdviceAccess().getGroup(), "rule__Advice__Group__0");
put(grammarAccess.getVersionSpecAccess().getGroup(), "rule__VersionSpec__Group__0");
put(grammarAccess.getParameterListAccess().getGroup(), "rule__ParameterList__Group__0");
put(grammarAccess.getParameterListAccess().getGroup_1(), "rule__ParameterList__Group_1__0");
put(grammarAccess.getParameterAccess().getGroup(), "rule__Parameter__Group__0");
put(grammarAccess.getParameterAccess().getGroup_2(), "rule__Parameter__Group_2__0");
put(grammarAccess.getVersionStmtAccess().getGroup(), "rule__VersionStmt__Group__0");
put(grammarAccess.getImportAccess().getGroup(), "rule__Import__Group__0");
put(grammarAccess.getExpressionStatementAccess().getGroup(), "rule__ExpressionStatement__Group__0");
put(grammarAccess.getExpressionStatementAccess().getGroup_0(), "rule__ExpressionStatement__Group_0__0");
put(grammarAccess.getExpressionStatementAccess().getGroup_0_1(), "rule__ExpressionStatement__Group_0_1__0");
put(grammarAccess.getLogicalExpressionAccess().getGroup(), "rule__LogicalExpression__Group__0");
put(grammarAccess.getLogicalExpressionPartAccess().getGroup(), "rule__LogicalExpressionPart__Group__0");
put(grammarAccess.getEqualityExpressionAccess().getGroup(), "rule__EqualityExpression__Group__0");
put(grammarAccess.getEqualityExpressionPartAccess().getGroup(), "rule__EqualityExpressionPart__Group__0");
put(grammarAccess.getRelationalExpressionAccess().getGroup(), "rule__RelationalExpression__Group__0");
put(grammarAccess.getRelationalExpressionAccess().getGroup_1(), "rule__RelationalExpression__Group_1__0");
put(grammarAccess.getRelationalExpressionPartAccess().getGroup(), "rule__RelationalExpressionPart__Group__0");
put(grammarAccess.getAdditiveExpressionAccess().getGroup(), "rule__AdditiveExpression__Group__0");
put(grammarAccess.getAdditiveExpressionPartAccess().getGroup(), "rule__AdditiveExpressionPart__Group__0");
put(grammarAccess.getMultiplicativeExpressionAccess().getGroup(), "rule__MultiplicativeExpression__Group__0");
put(grammarAccess.getMultiplicativeExpressionPartAccess().getGroup(), "rule__MultiplicativeExpressionPart__Group__0");
put(grammarAccess.getUnaryExpressionAccess().getGroup(), "rule__UnaryExpression__Group__0");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getGroup(), "rule__ExpressionOrQualifiedExecution__Group__0");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getGroup_0_1(), "rule__ExpressionOrQualifiedExecution__Group_0_1__0");
put(grammarAccess.getUnqualifiedExecutionAccess().getGroup(), "rule__UnqualifiedExecution__Group__0");
put(grammarAccess.getSuperExecutionAccess().getGroup(), "rule__SuperExecution__Group__0");
put(grammarAccess.getConstructorExecutionAccess().getGroup(), "rule__ConstructorExecution__Group__0");
put(grammarAccess.getSubCallAccess().getGroup_0(), "rule__SubCall__Group_0__0");
put(grammarAccess.getSubCallAccess().getGroup_1(), "rule__SubCall__Group_1__0");
put(grammarAccess.getDeclaratorAccess().getGroup(), "rule__Declarator__Group__0");
put(grammarAccess.getDeclaratorAccess().getGroup_1(), "rule__Declarator__Group_1__0");
put(grammarAccess.getDeclarationAccess().getGroup(), "rule__Declaration__Group__0");
put(grammarAccess.getDeclarationAccess().getGroup_2(), "rule__Declaration__Group_2__0");
put(grammarAccess.getDeclarationUnitAccess().getGroup(), "rule__DeclarationUnit__Group__0");
put(grammarAccess.getDeclarationUnitAccess().getGroup_1(), "rule__DeclarationUnit__Group_1__0");
put(grammarAccess.getCallAccess().getGroup(), "rule__Call__Group__0");
put(grammarAccess.getArgumentListAccess().getGroup(), "rule__ArgumentList__Group__0");
put(grammarAccess.getArgumentListAccess().getGroup_1(), "rule__ArgumentList__Group_1__0");
put(grammarAccess.getNamedArgumentAccess().getGroup(), "rule__NamedArgument__Group__0");
put(grammarAccess.getNamedArgumentAccess().getGroup_0(), "rule__NamedArgument__Group_0__0");
put(grammarAccess.getQualifiedPrefixAccess().getGroup(), "rule__QualifiedPrefix__Group__0");
put(grammarAccess.getQualifiedPrefixAccess().getGroup_1(), "rule__QualifiedPrefix__Group_1__0");
put(grammarAccess.getQualifiedNameAccess().getGroup(), "rule__QualifiedName__Group__0");
put(grammarAccess.getQualifiedNameAccess().getGroup_1(), "rule__QualifiedName__Group_1__0");
put(grammarAccess.getTypeAccess().getGroup_1(), "rule__Type__Group_1__0");
put(grammarAccess.getTypeAccess().getGroup_2(), "rule__Type__Group_2__0");
put(grammarAccess.getTypeAccess().getGroup_3(), "rule__Type__Group_3__0");
put(grammarAccess.getTypeAccess().getGroup_4(), "rule__Type__Group_4__0");
put(grammarAccess.getTypeParametersAccess().getGroup(), "rule__TypeParameters__Group__0");
put(grammarAccess.getTypeParametersAccess().getGroup_2(), "rule__TypeParameters__Group_2__0");
put(grammarAccess.getContainerInitializerAccess().getGroup(), "rule__ContainerInitializer__Group__0");
put(grammarAccess.getContainerInitializerAccess().getGroup_2(), "rule__ContainerInitializer__Group_2__0");
put(grammarAccess.getContainerInitializerAccess().getGroup_2_1(), "rule__ContainerInitializer__Group_2_1__0");
put(grammarAccess.getLanguageUnitAccess().getImportsAssignment_0(), "rule__LanguageUnit__ImportsAssignment_0");
put(grammarAccess.getLanguageUnitAccess().getJavaExtsAssignment_1(), "rule__LanguageUnit__JavaExtsAssignment_1");
put(grammarAccess.getLanguageUnitAccess().getAdvicesAssignment_2(), "rule__LanguageUnit__AdvicesAssignment_2");
put(grammarAccess.getLanguageUnitAccess().getIndentAssignment_3(), "rule__LanguageUnit__IndentAssignment_3");
put(grammarAccess.getLanguageUnitAccess().getFormattingAssignment_4(), "rule__LanguageUnit__FormattingAssignment_4");
put(grammarAccess.getLanguageUnitAccess().getNameAssignment_6(), "rule__LanguageUnit__NameAssignment_6");
put(grammarAccess.getLanguageUnitAccess().getParamAssignment_8(), "rule__LanguageUnit__ParamAssignment_8");
put(grammarAccess.getLanguageUnitAccess().getExtAssignment_10_1(), "rule__LanguageUnit__ExtAssignment_10_1");
put(grammarAccess.getLanguageUnitAccess().getVersionAssignment_12(), "rule__LanguageUnit__VersionAssignment_12");
put(grammarAccess.getLanguageUnitAccess().getElementsAssignment_13_0(), "rule__LanguageUnit__ElementsAssignment_13_0");
put(grammarAccess.getLanguageUnitAccess().getElementsAssignment_13_1(), "rule__LanguageUnit__ElementsAssignment_13_1");
put(grammarAccess.getLanguageUnitAccess().getElementsAssignment_13_2(), "rule__LanguageUnit__ElementsAssignment_13_2");
put(grammarAccess.getLanguageUnitAccess().getElementsAssignment_13_3(), "rule__LanguageUnit__ElementsAssignment_13_3");
put(grammarAccess.getTopAccess().getExAssignment(), "rule__Top__ExAssignment");
put(grammarAccess.getHintedExpressionAccess().getExAssignment_0(), "rule__HintedExpression__ExAssignment_0");
put(grammarAccess.getHintedExpressionAccess().getHintAssignment_1_1(), "rule__HintedExpression__HintAssignment_1_1");
put(grammarAccess.getIndentationHintAccess().getPartsAssignment_2(), "rule__IndentationHint__PartsAssignment_2");
put(grammarAccess.getIndentationHintAccess().getPartsAssignment_3_1(), "rule__IndentationHint__PartsAssignment_3_1");
put(grammarAccess.getIndentationHintPartAccess().getNameAssignment_0(), "rule__IndentationHintPart__NameAssignment_0");
put(grammarAccess.getIndentationHintPartAccess().getValueAssignment_2(), "rule__IndentationHintPart__ValueAssignment_2");
put(grammarAccess.getFormattingHintAccess().getPartsAssignment_2(), "rule__FormattingHint__PartsAssignment_2");
put(grammarAccess.getFormattingHintAccess().getPartsAssignment_3_1(), "rule__FormattingHint__PartsAssignment_3_1");
put(grammarAccess.getFormattingHintPartAccess().getNameAssignment_0(), "rule__FormattingHintPart__NameAssignment_0");
put(grammarAccess.getFormattingHintPartAccess().getValueAssignment_2(), "rule__FormattingHintPart__ValueAssignment_2");
put(grammarAccess.getVilDefAccess().getProtectedAssignment_0(), "rule__VilDef__ProtectedAssignment_0");
put(grammarAccess.getVilDefAccess().getTypeAssignment_2(), "rule__VilDef__TypeAssignment_2");
put(grammarAccess.getVilDefAccess().getIdAssignment_3(), "rule__VilDef__IdAssignment_3");
put(grammarAccess.getVilDefAccess().getParamAssignment_5(), "rule__VilDef__ParamAssignment_5");
put(grammarAccess.getVilDefAccess().getStmtsAssignment_7(), "rule__VilDef__StmtsAssignment_7");
put(grammarAccess.getStmtBlockAccess().getStmtsAssignment_2(), "rule__StmtBlock__StmtsAssignment_2");
put(grammarAccess.getStmtAccess().getVarAssignment_0(), "rule__Stmt__VarAssignment_0");
put(grammarAccess.getStmtAccess().getAltAssignment_1(), "rule__Stmt__AltAssignment_1");
put(grammarAccess.getStmtAccess().getSwitchAssignment_2(), "rule__Stmt__SwitchAssignment_2");
put(grammarAccess.getStmtAccess().getMultiAssignment_3(), "rule__Stmt__MultiAssignment_3");
put(grammarAccess.getStmtAccess().getLoopAssignment_4(), "rule__Stmt__LoopAssignment_4");
put(grammarAccess.getStmtAccess().getWhileAssignment_5(), "rule__Stmt__WhileAssignment_5");
put(grammarAccess.getStmtAccess().getExprStmtAssignment_6(), "rule__Stmt__ExprStmtAssignment_6");
put(grammarAccess.getStmtAccess().getCtnAssignment_7(), "rule__Stmt__CtnAssignment_7");
put(grammarAccess.getStmtAccess().getFlushAssignment_8(), "rule__Stmt__FlushAssignment_8");
put(grammarAccess.getAlternativeAccess().getExprAssignment_2(), "rule__Alternative__ExprAssignment_2");
put(grammarAccess.getAlternativeAccess().getIfAssignment_4_0(), "rule__Alternative__IfAssignment_4_0");
put(grammarAccess.getAlternativeAccess().getIfBlockAssignment_4_1(), "rule__Alternative__IfBlockAssignment_4_1");
put(grammarAccess.getAlternativeAccess().getElseAssignment_5_1_0(), "rule__Alternative__ElseAssignment_5_1_0");
put(grammarAccess.getAlternativeAccess().getElseBlockAssignment_5_1_1(), "rule__Alternative__ElseBlockAssignment_5_1_1");
put(grammarAccess.getContentAccess().getCtnAssignment_0(), "rule__Content__CtnAssignment_0");
put(grammarAccess.getContentAccess().getNoCRAssignment_1_0_0(), "rule__Content__NoCRAssignment_1_0_0");
put(grammarAccess.getContentAccess().getCRAssignment_1_0_1(), "rule__Content__CRAssignment_1_0_1");
put(grammarAccess.getContentAccess().getIndentAssignment_1_1_1(), "rule__Content__IndentAssignment_1_1_1");
put(grammarAccess.getSwitchAccess().getExprAssignment_2(), "rule__Switch__ExprAssignment_2");
put(grammarAccess.getSwitchAccess().getPartsAssignment_5_0(), "rule__Switch__PartsAssignment_5_0");
put(grammarAccess.getSwitchAccess().getPartsAssignment_5_1_1(), "rule__Switch__PartsAssignment_5_1_1");
put(grammarAccess.getSwitchAccess().getDfltAssignment_5_2_3(), "rule__Switch__DfltAssignment_5_2_3");
put(grammarAccess.getSwitchPartAccess().getLeftAssignment_0(), "rule__SwitchPart__LeftAssignment_0");
put(grammarAccess.getSwitchPartAccess().getRightAssignment_2(), "rule__SwitchPart__RightAssignment_2");
put(grammarAccess.getLoopAccess().getTypeAssignment_2(), "rule__Loop__TypeAssignment_2");
put(grammarAccess.getLoopAccess().getIdAssignment_3(), "rule__Loop__IdAssignment_3");
put(grammarAccess.getLoopAccess().getExprAssignment_5(), "rule__Loop__ExprAssignment_5");
put(grammarAccess.getLoopAccess().getSeparatorAssignment_6_1(), "rule__Loop__SeparatorAssignment_6_1");
put(grammarAccess.getLoopAccess().getFinalSeparatorAssignment_6_2_1(), "rule__Loop__FinalSeparatorAssignment_6_2_1");
put(grammarAccess.getLoopAccess().getStmtAssignment_8_0(), "rule__Loop__StmtAssignment_8_0");
put(grammarAccess.getLoopAccess().getBlockAssignment_8_1(), "rule__Loop__BlockAssignment_8_1");
put(grammarAccess.getWhileAccess().getExprAssignment_2(), "rule__While__ExprAssignment_2");
put(grammarAccess.getWhileAccess().getStmtAssignment_4_0(), "rule__While__StmtAssignment_4_0");
put(grammarAccess.getWhileAccess().getBlockAssignment_4_1(), "rule__While__BlockAssignment_4_1");
put(grammarAccess.getMultiselectAccess().getGenAssignment_0(), "rule__Multiselect__GenAssignment_0");
put(grammarAccess.getMultiselectAccess().getUserAssignment_1(), "rule__Multiselect__UserAssignment_1");
put(grammarAccess.getGenericMultiselectAccess().getPreambleAssignment_1(), "rule__GenericMultiselect__PreambleAssignment_1");
put(grammarAccess.getGenericMultiselectAccess().getSelectorAssignment_2(), "rule__GenericMultiselect__SelectorAssignment_2");
put(grammarAccess.getGenericMultiselectAccess().getPartAssignment_3(), "rule__GenericMultiselect__PartAssignment_3");
put(grammarAccess.getGenericMultiselectAccess().getTrailerAssignment_4(), "rule__GenericMultiselect__TrailerAssignment_4");
put(grammarAccess.getMultiSelectPartAccess().getCaseAssignment_1(), "rule__MultiSelectPart__CaseAssignment_1");
put(grammarAccess.getMultiSelectPartAccess().getCodeAssignment_2(), "rule__MultiSelectPart__CodeAssignment_2");
put(grammarAccess.getMultiSelectPartAccess().getSelectorAssignment_3(), "rule__MultiSelectPart__SelectorAssignment_3");
put(grammarAccess.getUserMultiselectAccess().getIdAssignment_0(), "rule__UserMultiselect__IdAssignment_0");
put(grammarAccess.getUserMultiselectAccess().getStmtAssignment_3(), "rule__UserMultiselect__StmtAssignment_3");
put(grammarAccess.getExtensionAccess().getNameAssignment_1(), "rule__Extension__NameAssignment_1");
put(grammarAccess.getJavaQualifiedNameAccess().getQnameAssignment_0(), "rule__JavaQualifiedName__QnameAssignment_0");
put(grammarAccess.getJavaQualifiedNameAccess().getQnameAssignment_1_0(), "rule__JavaQualifiedName__QnameAssignment_1_0");
put(grammarAccess.getJavaQualifiedNameAccess().getQnameAssignment_1_1(), "rule__JavaQualifiedName__QnameAssignment_1_1");
put(grammarAccess.getVariableDeclarationAccess().getConstAssignment_0(), "rule__VariableDeclaration__ConstAssignment_0");
put(grammarAccess.getVariableDeclarationAccess().getTypeAssignment_1(), "rule__VariableDeclaration__TypeAssignment_1");
put(grammarAccess.getVariableDeclarationAccess().getNameAssignment_2(), "rule__VariableDeclaration__NameAssignment_2");
put(grammarAccess.getVariableDeclarationAccess().getExpressionAssignment_3_1(), "rule__VariableDeclaration__ExpressionAssignment_3_1");
put(grammarAccess.getCompoundAccess().getAbstractAssignment_0(), "rule__Compound__AbstractAssignment_0");
put(grammarAccess.getCompoundAccess().getNameAssignment_2(), "rule__Compound__NameAssignment_2");
put(grammarAccess.getCompoundAccess().getSuperAssignment_3_1(), "rule__Compound__SuperAssignment_3_1");
put(grammarAccess.getCompoundAccess().getVarsAssignment_5(), "rule__Compound__VarsAssignment_5");
put(grammarAccess.getTypeDefAccess().getNameAssignment_1(), "rule__TypeDef__NameAssignment_1");
put(grammarAccess.getTypeDefAccess().getTypeAssignment_2(), "rule__TypeDef__TypeAssignment_2");
put(grammarAccess.getAdviceAccess().getNameAssignment_2(), "rule__Advice__NameAssignment_2");
put(grammarAccess.getAdviceAccess().getVersionSpecAssignment_4(), "rule__Advice__VersionSpecAssignment_4");
put(grammarAccess.getVersionSpecAccess().getRestrictionAssignment_1(), "rule__VersionSpec__RestrictionAssignment_1");
put(grammarAccess.getParameterListAccess().getParamAssignment_0(), "rule__ParameterList__ParamAssignment_0");
put(grammarAccess.getParameterListAccess().getParamAssignment_1_1(), "rule__ParameterList__ParamAssignment_1_1");
put(grammarAccess.getParameterAccess().getTypeAssignment_0(), "rule__Parameter__TypeAssignment_0");
put(grammarAccess.getParameterAccess().getNameAssignment_1(), "rule__Parameter__NameAssignment_1");
put(grammarAccess.getParameterAccess().getDfltAssignment_2_1(), "rule__Parameter__DfltAssignment_2_1");
put(grammarAccess.getVersionStmtAccess().getVersionAssignment_1(), "rule__VersionStmt__VersionAssignment_1");
put(grammarAccess.getImportAccess().getNameAssignment_1(), "rule__Import__NameAssignment_1");
put(grammarAccess.getImportAccess().getVersionSpecAssignment_2(), "rule__Import__VersionSpecAssignment_2");
put(grammarAccess.getExpressionStatementAccess().getVarAssignment_0_0(), "rule__ExpressionStatement__VarAssignment_0_0");
put(grammarAccess.getExpressionStatementAccess().getFieldAssignment_0_1_1(), "rule__ExpressionStatement__FieldAssignment_0_1_1");
put(grammarAccess.getExpressionStatementAccess().getExprAssignment_1(), "rule__ExpressionStatement__ExprAssignment_1");
put(grammarAccess.getExpressionAccess().getExprAssignment_0(), "rule__Expression__ExprAssignment_0");
put(grammarAccess.getExpressionAccess().getInitAssignment_1(), "rule__Expression__InitAssignment_1");
put(grammarAccess.getLogicalExpressionAccess().getLeftAssignment_0(), "rule__LogicalExpression__LeftAssignment_0");
put(grammarAccess.getLogicalExpressionAccess().getRightAssignment_1(), "rule__LogicalExpression__RightAssignment_1");
put(grammarAccess.getLogicalExpressionPartAccess().getOpAssignment_0(), "rule__LogicalExpressionPart__OpAssignment_0");
put(grammarAccess.getLogicalExpressionPartAccess().getExAssignment_1(), "rule__LogicalExpressionPart__ExAssignment_1");
put(grammarAccess.getEqualityExpressionAccess().getLeftAssignment_0(), "rule__EqualityExpression__LeftAssignment_0");
put(grammarAccess.getEqualityExpressionAccess().getRightAssignment_1(), "rule__EqualityExpression__RightAssignment_1");
put(grammarAccess.getEqualityExpressionPartAccess().getOpAssignment_0(), "rule__EqualityExpressionPart__OpAssignment_0");
put(grammarAccess.getEqualityExpressionPartAccess().getExAssignment_1(), "rule__EqualityExpressionPart__ExAssignment_1");
put(grammarAccess.getRelationalExpressionAccess().getLeftAssignment_0(), "rule__RelationalExpression__LeftAssignment_0");
put(grammarAccess.getRelationalExpressionAccess().getRightAssignment_1_0(), "rule__RelationalExpression__RightAssignment_1_0");
put(grammarAccess.getRelationalExpressionAccess().getRight2Assignment_1_1(), "rule__RelationalExpression__Right2Assignment_1_1");
put(grammarAccess.getRelationalExpressionPartAccess().getOpAssignment_0(), "rule__RelationalExpressionPart__OpAssignment_0");
put(grammarAccess.getRelationalExpressionPartAccess().getExAssignment_1(), "rule__RelationalExpressionPart__ExAssignment_1");
put(grammarAccess.getAdditiveExpressionAccess().getLeftAssignment_0(), "rule__AdditiveExpression__LeftAssignment_0");
put(grammarAccess.getAdditiveExpressionAccess().getRightAssignment_1(), "rule__AdditiveExpression__RightAssignment_1");
put(grammarAccess.getAdditiveExpressionPartAccess().getOpAssignment_0(), "rule__AdditiveExpressionPart__OpAssignment_0");
put(grammarAccess.getAdditiveExpressionPartAccess().getExAssignment_1(), "rule__AdditiveExpressionPart__ExAssignment_1");
put(grammarAccess.getMultiplicativeExpressionAccess().getLeftAssignment_0(), "rule__MultiplicativeExpression__LeftAssignment_0");
put(grammarAccess.getMultiplicativeExpressionAccess().getRightAssignment_1(), "rule__MultiplicativeExpression__RightAssignment_1");
put(grammarAccess.getMultiplicativeExpressionPartAccess().getOpAssignment_0(), "rule__MultiplicativeExpressionPart__OpAssignment_0");
put(grammarAccess.getMultiplicativeExpressionPartAccess().getExprAssignment_1(), "rule__MultiplicativeExpressionPart__ExprAssignment_1");
put(grammarAccess.getUnaryExpressionAccess().getOpAssignment_0(), "rule__UnaryExpression__OpAssignment_0");
put(grammarAccess.getUnaryExpressionAccess().getExprAssignment_1(), "rule__UnaryExpression__ExprAssignment_1");
put(grammarAccess.getPostfixExpressionAccess().getLeftAssignment(), "rule__PostfixExpression__LeftAssignment");
put(grammarAccess.getPrimaryExpressionAccess().getOtherExAssignment_0(), "rule__PrimaryExpression__OtherExAssignment_0");
put(grammarAccess.getPrimaryExpressionAccess().getUnqExAssignment_1(), "rule__PrimaryExpression__UnqExAssignment_1");
put(grammarAccess.getPrimaryExpressionAccess().getSuperExAssignment_2(), "rule__PrimaryExpression__SuperExAssignment_2");
put(grammarAccess.getPrimaryExpressionAccess().getNewExAssignment_3(), "rule__PrimaryExpression__NewExAssignment_3");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getValAssignment_0_0(), "rule__ExpressionOrQualifiedExecution__ValAssignment_0_0");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getParenthesisAssignment_0_1_1(), "rule__ExpressionOrQualifiedExecution__ParenthesisAssignment_0_1_1");
put(grammarAccess.getExpressionOrQualifiedExecutionAccess().getCallsAssignment_1(), "rule__ExpressionOrQualifiedExecution__CallsAssignment_1");
put(grammarAccess.getUnqualifiedExecutionAccess().getCallAssignment_0(), "rule__UnqualifiedExecution__CallAssignment_0");
put(grammarAccess.getUnqualifiedExecutionAccess().getCallsAssignment_1(), "rule__UnqualifiedExecution__CallsAssignment_1");
put(grammarAccess.getSuperExecutionAccess().getCallAssignment_2(), "rule__SuperExecution__CallAssignment_2");
put(grammarAccess.getSuperExecutionAccess().getCallsAssignment_3(), "rule__SuperExecution__CallsAssignment_3");
put(grammarAccess.getConstructorExecutionAccess().getTypeAssignment_1(), "rule__ConstructorExecution__TypeAssignment_1");
put(grammarAccess.getConstructorExecutionAccess().getParamAssignment_3(), "rule__ConstructorExecution__ParamAssignment_3");
put(grammarAccess.getConstructorExecutionAccess().getCallsAssignment_5(), "rule__ConstructorExecution__CallsAssignment_5");
put(grammarAccess.getSubCallAccess().getTypeAssignment_0_0(), "rule__SubCall__TypeAssignment_0_0");
put(grammarAccess.getSubCallAccess().getCallAssignment_0_1(), "rule__SubCall__CallAssignment_0_1");
put(grammarAccess.getSubCallAccess().getArrayExAssignment_1_1(), "rule__SubCall__ArrayExAssignment_1_1");
put(grammarAccess.getDeclaratorAccess().getDeclAssignment_0(), "rule__Declarator__DeclAssignment_0");
put(grammarAccess.getDeclaratorAccess().getDeclAssignment_1_1(), "rule__Declarator__DeclAssignment_1_1");
put(grammarAccess.getDeclarationAccess().getTypeAssignment_0(), "rule__Declaration__TypeAssignment_0");
put(grammarAccess.getDeclarationAccess().getUnitsAssignment_1(), "rule__Declaration__UnitsAssignment_1");
put(grammarAccess.getDeclarationAccess().getUnitsAssignment_2_1(), "rule__Declaration__UnitsAssignment_2_1");
put(grammarAccess.getDeclarationUnitAccess().getIdAssignment_0(), "rule__DeclarationUnit__IdAssignment_0");
put(grammarAccess.getDeclarationUnitAccess().getDefltAssignment_1_1(), "rule__DeclarationUnit__DefltAssignment_1_1");
put(grammarAccess.getCallAccess().getNameAssignment_0(), "rule__Call__NameAssignment_0");
put(grammarAccess.getCallAccess().getDeclAssignment_2(), "rule__Call__DeclAssignment_2");
put(grammarAccess.getCallAccess().getParamAssignment_3(), "rule__Call__ParamAssignment_3");
put(grammarAccess.getArgumentListAccess().getParamAssignment_0(), "rule__ArgumentList__ParamAssignment_0");
put(grammarAccess.getArgumentListAccess().getParamAssignment_1_1(), "rule__ArgumentList__ParamAssignment_1_1");
put(grammarAccess.getNamedArgumentAccess().getNameAssignment_0_0(), "rule__NamedArgument__NameAssignment_0_0");
put(grammarAccess.getNamedArgumentAccess().getExAssignment_1(), "rule__NamedArgument__ExAssignment_1");
put(grammarAccess.getQualifiedPrefixAccess().getQnameAssignment_0(), "rule__QualifiedPrefix__QnameAssignment_0");
put(grammarAccess.getQualifiedPrefixAccess().getQnameAssignment_1_0(), "rule__QualifiedPrefix__QnameAssignment_1_0");
put(grammarAccess.getQualifiedPrefixAccess().getQnameAssignment_1_1(), "rule__QualifiedPrefix__QnameAssignment_1_1");
put(grammarAccess.getQualifiedNameAccess().getPrefixAssignment_0(), "rule__QualifiedName__PrefixAssignment_0");
put(grammarAccess.getQualifiedNameAccess().getQnameAssignment_1_0(), "rule__QualifiedName__QnameAssignment_1_0");
put(grammarAccess.getQualifiedNameAccess().getQnameAssignment_1_1(), "rule__QualifiedName__QnameAssignment_1_1");
put(grammarAccess.getConstantAccess().getNValueAssignment_0(), "rule__Constant__NValueAssignment_0");
put(grammarAccess.getConstantAccess().getSValueAssignment_1(), "rule__Constant__SValueAssignment_1");
put(grammarAccess.getConstantAccess().getQValueAssignment_2(), "rule__Constant__QValueAssignment_2");
put(grammarAccess.getConstantAccess().getBValueAssignment_3(), "rule__Constant__BValueAssignment_3");
put(grammarAccess.getConstantAccess().getNullAssignment_4(), "rule__Constant__NullAssignment_4");
put(grammarAccess.getConstantAccess().getVersionAssignment_5(), "rule__Constant__VersionAssignment_5");
put(grammarAccess.getNumValueAccess().getValAssignment(), "rule__NumValue__ValAssignment");
put(grammarAccess.getTypeAccess().getNameAssignment_0(), "rule__Type__NameAssignment_0");
put(grammarAccess.getTypeAccess().getSetAssignment_1_0(), "rule__Type__SetAssignment_1_0");
put(grammarAccess.getTypeAccess().getParamAssignment_1_1(), "rule__Type__ParamAssignment_1_1");
put(grammarAccess.getTypeAccess().getSeqAssignment_2_0(), "rule__Type__SeqAssignment_2_0");
put(grammarAccess.getTypeAccess().getParamAssignment_2_1(), "rule__Type__ParamAssignment_2_1");
put(grammarAccess.getTypeAccess().getMapAssignment_3_0(), "rule__Type__MapAssignment_3_0");
put(grammarAccess.getTypeAccess().getParamAssignment_3_1(), "rule__Type__ParamAssignment_3_1");
put(grammarAccess.getTypeAccess().getCallAssignment_4_0(), "rule__Type__CallAssignment_4_0");
put(grammarAccess.getTypeAccess().getReturnAssignment_4_1(), "rule__Type__ReturnAssignment_4_1");
put(grammarAccess.getTypeAccess().getParamAssignment_4_2(), "rule__Type__ParamAssignment_4_2");
put(grammarAccess.getTypeParametersAccess().getParamAssignment_1(), "rule__TypeParameters__ParamAssignment_1");
put(grammarAccess.getTypeParametersAccess().getParamAssignment_2_1(), "rule__TypeParameters__ParamAssignment_2_1");
put(grammarAccess.getContainerInitializerAccess().getExprsAssignment_2_0(), "rule__ContainerInitializer__ExprsAssignment_2_0");
put(grammarAccess.getContainerInitializerAccess().getExprsAssignment_2_1_1(), "rule__ContainerInitializer__ExprsAssignment_2_1_1");
put(grammarAccess.getContainerInitializerExpressionAccess().getLogicalAssignment_0(), "rule__ContainerInitializerExpression__LogicalAssignment_0");
put(grammarAccess.getContainerInitializerExpressionAccess().getContainerAssignment_1(), "rule__ContainerInitializerExpression__ContainerAssignment_1");
}
};
}
return nameMappings.get(element);
}
@Override
protected Collection<FollowElement> getFollowElements(AbstractInternalContentAssistParser parser) {
try {
de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr.internal.InternalTemplateLangParser typedParser = (de.uni_hildesheim.sse.vil.templatelang.ui.contentassist.antlr.internal.InternalTemplateLangParser) parser;
typedParser.entryRuleLanguageUnit();
return typedParser.getFollowElements();
} catch(RecognitionException ex) {
throw new RuntimeException(ex);
}
}
@Override
protected String[] getInitialHiddenTokens() {
return new String[] { "RULE_WS", "RULE_ML_COMMENT", "RULE_SL_COMMENT" };
}
public TemplateLangGrammarAccess getGrammarAccess() {
return this.grammarAccess;
}
public void setGrammarAccess(TemplateLangGrammarAccess grammarAccess) {
this.grammarAccess = grammarAccess;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.hint;
import com.intellij.codeInsight.AutoPopupController;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.daemon.impl.ParameterHintsPresentationManager;
import com.intellij.codeInsight.lookup.LookupEvent;
import com.intellij.codeInsight.lookup.LookupListener;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.codeInsight.lookup.LookupManagerListener;
import com.intellij.codeInsight.lookup.impl.LookupImpl;
import com.intellij.codeWithMe.ClientId;
import com.intellij.ide.IdeTooltip;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.parameterInfo.ParameterInfoHandler;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.Inlay;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.VisualPosition;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.Balloon.Position;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.ui.HintHint;
import com.intellij.ui.LightweightHint;
import com.intellij.ui.ScreenUtil;
import com.intellij.util.indexing.DumbModeAccessType;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.List;
import static com.intellij.codeInsight.hint.ParameterInfoTaskRunnerUtil.runTask;
public class ParameterInfoController extends ParameterInfoControllerBase {
private LightweightHint myHint;
private final ParameterInfoComponent myComponent;
private boolean myKeepOnHintHidden;
private final MyBestLocationPointProvider myProvider;
@Override
protected boolean canBeDisposed() {
return !myHint.isVisible() && !myKeepOnHintHidden && !ApplicationManager.getApplication().isHeadlessEnvironment()
|| myEditor instanceof EditorWindow && !((EditorWindow)myEditor).isValid();
}
@Override
public boolean isHintShown(boolean anyType) {
return myHint.isVisible() && (!mySingleParameterInfo || anyType);
}
public ParameterInfoController(@NotNull Project project,
@NotNull Editor editor,
int lbraceOffset,
Object[] descriptors,
Object highlighted,
PsiElement parameterOwner,
@NotNull ParameterInfoHandler handler,
boolean showHint,
boolean requestFocus) {
super(project, editor, lbraceOffset, descriptors, highlighted, parameterOwner, handler, showHint);
myProvider = new MyBestLocationPointProvider(editor);
myComponent = new ParameterInfoComponent(myParameterInfoControllerData, editor, requestFocus, true);
myHint = createHint();
myKeepOnHintHidden = !showHint;
myHint.setSelectingHint(true);
myParameterInfoControllerData.setParameterOwner(parameterOwner);
myParameterInfoControllerData.setHighlighted(highlighted);
registerSelf();
setupListeners();
LookupListener lookupListener = new LookupListener() {
LookupImpl activeLookup = null;
final MergingUpdateQueue queue = new MergingUpdateQueue("Update parameter info position", 200, true, myComponent);
@Override
public void lookupShown(@NotNull LookupEvent event) {
activeLookup = (LookupImpl)event.getLookup();
}
@Override
public void uiRefreshed() {
queue.queue(new Update("PI update") {
@Override
public void run() {
if (activeLookup != null) {
updateComponent();
}
}
});
}
};
LookupManagerListener lookupManagerListener = (oldLookup, newLookup) -> {
if (newLookup != null && ClientId.isCurrentlyUnderLocalId()) {
newLookup.addLookupListener(lookupListener);
}
};
project.getMessageBus().connect(this).subscribe(LookupManagerListener.TOPIC, lookupManagerListener);
if (showHint) {
showHint(requestFocus, mySingleParameterInfo);
}
else {
updateComponent();
}
}
@Override
public void setDescriptors(Object[] descriptors) {
super.setDescriptors(descriptors);
myComponent.fireDescriptorsWereSet();
}
@Override
protected @NotNull ParameterInfoControllerData createParameterInfoControllerData(@NotNull ParameterInfoHandler<PsiElement, Object> handler) {
return new ParameterInfoControllerData(handler) {
@Override
public boolean isDescriptorEnabled(int descriptorIndex) {
return myComponent.isEnabled(descriptorIndex);
}
@Override
public void setDescriptorEnabled(int descriptorIndex, boolean enabled) {
myComponent.setEnabled(descriptorIndex, enabled);
}
};
}
private LightweightHint createHint() {
JPanel wrapper = new WrapperPanel();
wrapper.add(myComponent);
return new LightweightHint(wrapper);
}
@Override
public void showHint(boolean requestFocus, boolean singleParameterInfo) {
if (myHint.isVisible()) {
JComponent myHintComponent = myHint.getComponent();
myHintComponent.removeAll();
hideHint();
myHint = createHint();
}
mySingleParameterInfo = singleParameterInfo && myKeepOnHintHidden;
int caretOffset = myEditor.getCaretModel().getOffset();
Pair<Point, Short> pos = myProvider.getBestPointPosition(myHint, myParameterInfoControllerData.getParameterOwner(), caretOffset,
null, HintManager.ABOVE);
@SuppressWarnings("MagicConstant")
HintHint hintHint = HintManagerImpl.createHintHint(myEditor, pos.getFirst(), myHint, pos.getSecond());
hintHint.setExplicitClose(true);
hintHint.setRequestFocus(requestFocus);
hintHint.setShowImmediately(true);
hintHint.setBorderColor(ParameterInfoComponent.BORDER_COLOR);
hintHint.setBorderInsets(JBUI.insets(4, 1, 4, 1));
hintHint.setComponentBorder(JBUI.Borders.empty());
int flags = HintManager.HIDE_BY_ESCAPE | HintManager.UPDATE_BY_SCROLLING;
if (!singleParameterInfo && myKeepOnHintHidden) flags |= HintManager.HIDE_BY_TEXT_CHANGE;
Editor editorToShow = myEditor instanceof EditorWindow ? ((EditorWindow)myEditor).getDelegate() : myEditor;
//update presentation of descriptors synchronously
myComponent.update(mySingleParameterInfo);
// is case of injection we need to calculate position for EditorWindow
// also we need to show the hint in the main editor because of intention bulb
HintManagerImpl.getInstanceImpl().showEditorHint(myHint, editorToShow, pos.getFirst(), flags, 0, false, hintHint);
updateComponent();
}
@Override
public void updateComponent() {
if (canBeDisposed()) {
Disposer.dispose(this);
return;
}
PsiFile file = PsiUtilBase.getPsiFileInEditor(myEditor, myProject);
int caretOffset = myEditor.getCaretModel().getOffset();
int offset = getCurrentOffset();
UpdateParameterInfoContextBase context = new UpdateParameterInfoContextBase(offset, file);
executeFindElementForUpdatingParameterInfo(context, elementForUpdating -> {
myParameterInfoControllerData.getHandler().processFoundElementForUpdatingParameterInfo(elementForUpdating, context);
if (elementForUpdating != null) {
executeUpdateParameterInfo(elementForUpdating, context, () -> {
boolean knownParameter = (myParameterInfoControllerData.getDescriptors().length == 1 ||
myParameterInfoControllerData.getHighlighted() != null) &&
myParameterInfoControllerData.getCurrentParameterIndex() != -1;
if (mySingleParameterInfo && !knownParameter && myHint.isVisible()) {
hideHint();
}
if (myKeepOnHintHidden && knownParameter && !myHint.isVisible()) {
AutoPopupController.getInstance(myProject).autoPopupParameterInfo(myEditor, null);
}
if (!myDisposed && (myHint.isVisible() && !myEditor.isDisposed() &&
(myEditor.getComponent().getRootPane() != null || ApplicationManager.getApplication().isUnitTestMode()) ||
ApplicationManager.getApplication().isHeadlessEnvironment())) {
Model result = myComponent.update(mySingleParameterInfo);
result.project = myProject;
result.range = myParameterInfoControllerData.getParameterOwner().getTextRange();
result.editor = myEditor;
for (ParameterInfoListener listener : ParameterInfoListener.EP_NAME.getExtensionList()) {
listener.hintUpdated(result);
}
if (ApplicationManager.getApplication().isHeadlessEnvironment()) return;
IdeTooltip tooltip = myHint.getCurrentIdeTooltip();
short position = tooltip != null
? toShort(tooltip.getPreferredPosition())
: HintManager.ABOVE;
Pair<Point, Short> pos = myProvider.getBestPointPosition(
myHint, elementForUpdating,
caretOffset, myEditor.getCaretModel().getVisualPosition(), position);
//noinspection MagicConstant
HintManagerImpl.adjustEditorHintPosition(myHint, myEditor, pos.getFirst(), pos.getSecond());
}
});
}
else {
hideHint();
if (!myKeepOnHintHidden) {
Disposer.dispose(this);
}
}
});
}
private void executeUpdateParameterInfo(PsiElement elementForUpdating,
UpdateParameterInfoContextBase context,
Runnable continuation) {
PsiElement parameterOwner = context.getParameterOwner();
if (parameterOwner != null && !parameterOwner.equals(elementForUpdating)) {
context.removeHint();
return;
}
runTask(myProject,
ReadAction.nonBlocking(() -> {
DumbModeAccessType.RELIABLE_DATA_ONLY.ignoreDumbMode(() -> myParameterInfoControllerData.getHandler().updateParameterInfo(elementForUpdating, context));
return elementForUpdating;
})
.withDocumentsCommitted(myProject)
.expireWhen(
() -> !myKeepOnHintHidden && !myHint.isVisible() && !ApplicationManager.getApplication().isHeadlessEnvironment() ||
getCurrentOffset() != context.getOffset() ||
!elementForUpdating.isValid())
.expireWith(this),
element -> {
if (element != null && continuation != null) {
context.applyUIChanges();
continuation.run();
}
},
null,
myEditor);
}
@HintManager.PositionFlags
private static short toShort(Position position) {
switch (position) {
case above:
return HintManager.ABOVE;
case atLeft:
return HintManager.LEFT;
case atRight:
return HintManager.RIGHT;
default:
return HintManager.UNDER;
}
}
@Override
protected void moveToParameterAtOffset(int offset) {
PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument());
PsiElement argsList = findArgumentList(file, offset, -1);
if (argsList == null && !CodeInsightSettings.getInstance().SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION) return;
if (!myHint.isVisible()) AutoPopupController.getInstance(myProject).autoPopupParameterInfo(myEditor, null);
offset = adjustOffsetToInlay(offset);
myEditor.getCaretModel().moveToOffset(offset);
myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
myEditor.getSelectionModel().removeSelection();
if (argsList != null) {
executeUpdateParameterInfo(argsList, new UpdateParameterInfoContextBase(offset, file), null);
}
}
private int adjustOffsetToInlay(int offset) {
CharSequence text = myEditor.getDocument().getImmutableCharSequence();
int hostWhitespaceStart = CharArrayUtil.shiftBackward(text, offset, WHITESPACE) + 1;
int hostWhitespaceEnd = CharArrayUtil.shiftForward(text, offset, WHITESPACE);
Editor hostEditor = myEditor;
if (myEditor instanceof EditorWindow) {
hostEditor = ((EditorWindow)myEditor).getDelegate();
hostWhitespaceStart = ((EditorWindow)myEditor).getDocument().injectedToHost(hostWhitespaceStart);
hostWhitespaceEnd = ((EditorWindow)myEditor).getDocument().injectedToHost(hostWhitespaceEnd);
}
List<Inlay> inlays = ParameterHintsPresentationManager.getInstance().getParameterHintsInRange(hostEditor,
hostWhitespaceStart, hostWhitespaceEnd);
for (Inlay inlay : inlays) {
int inlayOffset = inlay.getOffset();
if (myEditor instanceof EditorWindow) {
if (((EditorWindow)myEditor).getDocument().getHostRange(inlayOffset) == null) continue;
inlayOffset = ((EditorWindow)myEditor).getDocument().hostToInjected(inlayOffset);
}
return inlayOffset;
}
return offset;
}
@Override
public void setPreservedOnHintHidden(boolean value) {
myKeepOnHintHidden = value;
}
@Override
public boolean isPreservedOnHintHidden() {
return myKeepOnHintHidden;
}
/**
* Returned Point is in layered pane coordinate system.
* Second value is a {@link HintManager.PositionFlags position flag}.
*/
static Pair<Point, Short> chooseBestHintPosition(Editor editor,
VisualPosition pos,
LightweightHint hint,
LookupImpl activeLookup,
short preferredPosition,
boolean showLookupHint) {
if (ApplicationManager.getApplication().isUnitTestMode() ||
ApplicationManager.getApplication().isHeadlessEnvironment()) {
return Pair.pair(new Point(), HintManager.DEFAULT);
}
HintManagerImpl hintManager = HintManagerImpl.getInstanceImpl();
Dimension hintSize = hint.getComponent().getPreferredSize();
JComponent editorComponent = editor.getComponent();
JLayeredPane layeredPane = editorComponent.getRootPane().getLayeredPane();
Point p1;
Point p2;
if (showLookupHint) {
p1 = hintManager.getHintPosition(hint, editor, HintManager.UNDER);
p2 = hintManager.getHintPosition(hint, editor, HintManager.ABOVE);
}
else {
p1 = HintManagerImpl.getHintPosition(hint, editor, pos, HintManager.UNDER);
p2 = HintManagerImpl.getHintPosition(hint, editor, pos, HintManager.ABOVE);
}
boolean isRealPopup = hint.isRealPopup();
boolean p1Ok, p2Ok;
if (!showLookupHint && activeLookup != null && activeLookup.isShown()) {
Rectangle lookupBounds = activeLookup.getBounds();
p1Ok = p1.y + hintSize.height + 50 < layeredPane.getHeight() && !isHintIntersectWithLookup(p1, hintSize, lookupBounds, isRealPopup, HintManager.UNDER);
p2Ok = p2.y - hintSize.height - 70 >= 0 && !isHintIntersectWithLookup(p2, hintSize, lookupBounds, isRealPopup, HintManager.ABOVE);
if (activeLookup.isPositionedAboveCaret()) {
if (!p1Ok) {
var abovePoint = new Point(lookupBounds.x, lookupBounds.y - hintSize.height - 10);
SwingUtilities.convertPointToScreen(abovePoint, layeredPane);
abovePoint.move(lookupBounds.x, lookupBounds.y - hintSize.height - 10);
hint.setForceShowAsPopup(true);
return new Pair<>(abovePoint, HintManager.DEFAULT);
}
}
else {
if (!p2Ok) {
var underPoint = new Point(lookupBounds.x, lookupBounds.y + lookupBounds.height + 10);
SwingUtilities.convertPointToScreen(underPoint, layeredPane);
var screenRectangle = new Rectangle(underPoint, hintSize);
if (isFitTheScreen(screenRectangle)) {
// calculate if hint can be shown under lookup
underPoint.move(lookupBounds.x, lookupBounds.y + lookupBounds.height + 10);
hint.setForceShowAsPopup(true);
return new Pair<>(underPoint, HintManager.DEFAULT);
}
else {
hint.setForceShowAsPopup(true);
var abovePoint = new Point(p2.x - hintSize.width / 2, p2.y - hintSize.height);
return new Pair<>(abovePoint, HintManager.ABOVE);
}
}
}
}
else {
p1Ok = p1.y + hintSize.height < layeredPane.getHeight();
p2Ok = p2.y >= 0;
}
if (isRealPopup) {
hint.setForceShowAsPopup(false);
}
if (!showLookupHint) {
if (preferredPosition != HintManager.DEFAULT) {
if (preferredPosition == HintManager.ABOVE) {
if (p2Ok) return new Pair<>(p2, HintManager.ABOVE);
}
else if (preferredPosition == HintManager.UNDER) {
if (p1Ok) return new Pair<>(p1, HintManager.UNDER);
}
}
}
if (p1Ok) return new Pair<>(p1, HintManager.UNDER);
if (p2Ok) return new Pair<>(p2, HintManager.ABOVE);
int underSpace = layeredPane.getHeight() - p1.y;
int aboveSpace = p2.y;
return aboveSpace > underSpace ? new Pair<>(new Point(p2.x, 0), HintManager.UNDER) : new Pair<>(p1,
HintManager.ABOVE);
}
private static boolean isFitTheScreen(Rectangle aRectangle) {
int screenX = aRectangle.x + aRectangle.width / 2;
int screenY = aRectangle.y + aRectangle.height / 2;
Rectangle screen = ScreenUtil.getScreenRectangle(screenX, screenY);
return screen.contains(aRectangle);
}
private static boolean isHintIntersectWithLookup(Point hintPoint,
Dimension hintSize,
Rectangle lookupBounds,
boolean isRealPopup,
short hintPosition){
Point leftTopPoint = isRealPopup
? hintPoint
: hintPosition == HintManager.ABOVE
? new Point(hintPoint.x - hintSize.width / 2, hintPoint.y - hintSize.height)
: new Point(hintPoint.x - hintSize.width / 2, hintPoint.y);
return lookupBounds.intersects(new Rectangle(leftTopPoint, hintSize));
}
@Override
protected void hideHint() {
myHint.hide();
for (ParameterInfoListener listener : ParameterInfoListener.EP_NAME.getExtensionList()) {
listener.hintHidden(myProject);
}
}
private static class MyBestLocationPointProvider {
private final Editor myEditor;
private int previousOffset = -1;
private Rectangle previousLookupBounds;
private Dimension previousHintSize;
private Point previousBestPoint;
private Short previousBestPosition;
MyBestLocationPointProvider(Editor editor) {
myEditor = editor;
}
@NotNull
private Pair<Point, Short> getBestPointPosition(LightweightHint hint,
PsiElement list,
int offset,
VisualPosition pos,
short preferredPosition) {
if (list != null) {
TextRange range = list.getTextRange();
TextRange rangeWithoutParens = TextRange.from(range.getStartOffset() + 1, Math.max(range.getLength() - 2, 0));
if (!rangeWithoutParens.contains(offset)) {
offset = offset < rangeWithoutParens.getStartOffset() ? rangeWithoutParens.getStartOffset() : rangeWithoutParens.getEndOffset();
pos = null;
}
}
LookupImpl activeLookup = (LookupImpl)LookupManager.getActiveLookup(myEditor);
Rectangle lookupBounds = !ApplicationManager.getApplication().isUnitTestMode()
&& activeLookup != null
&& activeLookup.isShown()
? activeLookup.getBounds()
: null;
Dimension hintSize = hint.getSize();
boolean lookupPositionChanged = lookupBounds != null && !lookupBounds.equals(previousLookupBounds);
boolean hintSizeChanged = !hintSize.equals(previousHintSize);
if (previousOffset == offset && !lookupPositionChanged && !hintSizeChanged) {
return Pair.create(previousBestPoint, previousBestPosition);
}
boolean isMultiline = list != null && StringUtil.containsAnyChar(list.getText(), "\n\r");
if (pos == null) pos = EditorUtil.inlayAwareOffsetToVisualPosition(myEditor, offset);
Pair<Point, Short> position;
if (!isMultiline) {
position = chooseBestHintPosition(myEditor, pos, hint, activeLookup, preferredPosition, false);
}
else {
Point p = HintManagerImpl.getHintPosition(hint, myEditor, pos, HintManager.ABOVE);
position = new Pair<>(p, HintManager.ABOVE);
}
previousBestPoint = position.getFirst();
previousBestPosition = position.getSecond();
previousOffset = offset;
previousLookupBounds = lookupBounds;
previousHintSize = hintSize;
return position;
}
}
private static class WrapperPanel extends JPanel {
WrapperPanel() {
super(new BorderLayout());
setBorder(JBUI.Borders.empty());
}
// foreground/background/font are used to style the popup (HintManagerImpl.createHintHint)
@Override
public Color getForeground() {
return getComponentCount() == 0 ? super.getForeground() : getComponent(0).getForeground();
}
@Override
public Color getBackground() {
return getComponentCount() == 0 ? super.getBackground() : getComponent(0).getBackground();
}
@Override
public Font getFont() {
return getComponentCount() == 0 ? super.getFont() : getComponent(0).getFont();
}
// for test purposes
@Override
public String toString() {
return getComponentCount() == 0 ? "<empty>" : getComponent(0).toString();
}
}
}
| |
/*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing.util;
import static com.graphhopper.routing.util.PriorityCode.*;
import com.graphhopper.reader.Relation;
import com.graphhopper.reader.Way;
import com.graphhopper.util.Helper;
import com.graphhopper.util.InstructionAnnotation;
import com.graphhopper.util.Translation;
import java.util.*;
/**
* Defines bit layout of bicycles (not motorcycles) for speed, access and relations (network).
* <p/>
* @author Peter Karich
* @author Nop
* @author ratrun
*/
public class BikeCommonFlagEncoder extends AbstractFlagEncoder
{
/**
* Reports wether this edge is unpaved.
*/
public static final int K_UNPAVED = 100;
protected static final int PUSHING_SECTION_SPEED = 4;
private long unpavedBit = 0;
// Pushing section heighways are parts where you need to get off your bike and push it (German: Schiebestrecke)
protected final HashSet<String> pushingSections = new HashSet<String>();
protected final HashSet<String> oppositeLanes = new HashSet<String>();
protected final Set<String> preferHighwayTags = new HashSet<String>();
protected final Set<String> avoidHighwayTags = new HashSet<String>();
protected final Set<String> unpavedSurfaceTags = new HashSet<String>();
private final Map<String, Integer> trackTypeSpeed = new HashMap<String, Integer>();
private final Map<String, Integer> surfaceSpeed = new HashMap<String, Integer>();
private final Set<String> roadValues = new HashSet<String>();
private final Map<String, Integer> highwaySpeed = new HashMap<String, Integer>();
// convert network tag of bicycle routes into a way route code
private final Map<String, Integer> bikeNetworkToCode = new HashMap<String, Integer>();
protected EncodedValue relationCodeEncoder;
private EncodedValue wayTypeEncoder;
private EncodedValue preferWayEncoder;
protected BikeCommonFlagEncoder( int speedBits, double speedFactor, int maxTurnCosts )
{
super(speedBits, speedFactor, maxTurnCosts);
// strict set, usually vehicle and agricultural/forestry are ignored by cyclists
restrictions.addAll(Arrays.asList("bicycle", "access"));
restrictedValues.add("private");
restrictedValues.add("no");
restrictedValues.add("restricted");
restrictedValues.add("military");
intendedValues.add("yes");
intendedValues.add("designated");
intendedValues.add("official");
intendedValues.add("permissive");
oppositeLanes.add("opposite");
oppositeLanes.add("opposite_lane");
oppositeLanes.add("opposite_track");
setBlockByDefault(false);
potentialBarriers.add("gate");
// potentialBarriers.add("lift_gate");
potentialBarriers.add("swing_gate");
absoluteBarriers.add("kissing_gate");
absoluteBarriers.add("stile");
absoluteBarriers.add("turnstile");
// make intermodal connections possible but mark as pushing section
acceptedRailways.add("platform");
unpavedSurfaceTags.add("unpaved");
unpavedSurfaceTags.add("gravel");
unpavedSurfaceTags.add("ground");
unpavedSurfaceTags.add("dirt");
unpavedSurfaceTags.add("grass");
unpavedSurfaceTags.add("compacted");
unpavedSurfaceTags.add("earth");
unpavedSurfaceTags.add("fine_gravel");
unpavedSurfaceTags.add("grass_paver");
unpavedSurfaceTags.add("ice");
unpavedSurfaceTags.add("mud");
unpavedSurfaceTags.add("salt");
unpavedSurfaceTags.add("sand");
unpavedSurfaceTags.add("wood");
roadValues.add("living_street");
roadValues.add("road");
roadValues.add("service");
roadValues.add("unclassified");
roadValues.add("residential");
roadValues.add("trunk");
roadValues.add("trunk_link");
roadValues.add("primary");
roadValues.add("primary_link");
roadValues.add("secondary");
roadValues.add("secondary_link");
roadValues.add("tertiary");
roadValues.add("tertiary_link");
maxPossibleSpeed = 30;
setTrackTypeSpeed("grade1", 18); // paved
setTrackTypeSpeed("grade2", 12); // now unpaved ...
setTrackTypeSpeed("grade3", 8);
setTrackTypeSpeed("grade4", 6);
setTrackTypeSpeed("grade5", 4); // like sand/grass
setSurfaceSpeed("paved", 18);
setSurfaceSpeed("asphalt", 18);
setSurfaceSpeed("cobblestone", 8);
setSurfaceSpeed("cobblestone:flattened", 10);
setSurfaceSpeed("sett", 10);
setSurfaceSpeed("concrete", 18);
setSurfaceSpeed("concrete:lanes", 16);
setSurfaceSpeed("concrete:plates", 16);
setSurfaceSpeed("paving_stones", 12);
setSurfaceSpeed("paving_stones:30", 12);
setSurfaceSpeed("unpaved", 14);
setSurfaceSpeed("compacted", 16);
setSurfaceSpeed("dirt", 10);
setSurfaceSpeed("earth", 12);
setSurfaceSpeed("fine_gravel", 18);
setSurfaceSpeed("grass", 8);
setSurfaceSpeed("grass_paver", 8);
setSurfaceSpeed("gravel", 12);
setSurfaceSpeed("ground", 12);
setSurfaceSpeed("ice", PUSHING_SECTION_SPEED / 2);
setSurfaceSpeed("metal", 10);
setSurfaceSpeed("mud", 10);
setSurfaceSpeed("pebblestone", 16);
setSurfaceSpeed("salt", 6);
setSurfaceSpeed("sand", 6);
setSurfaceSpeed("wood", 6);
setHighwaySpeed("living_street", 6);
setHighwaySpeed("steps", PUSHING_SECTION_SPEED / 2);
setHighwaySpeed("cycleway", 18);
setHighwaySpeed("path", 18);
setHighwaySpeed("footway", 6);
setHighwaySpeed("pedestrian", 6);
setHighwaySpeed("track", 12);
setHighwaySpeed("service", 14);
setHighwaySpeed("residential", 18);
// no other highway applies:
setHighwaySpeed("unclassified", 16);
// unknown road:
setHighwaySpeed("road", 12);
setHighwaySpeed("trunk", 18);
setHighwaySpeed("trunk_link", 18);
setHighwaySpeed("primary", 18);
setHighwaySpeed("primary_link", 18);
setHighwaySpeed("secondary", 18);
setHighwaySpeed("secondary_link", 18);
setHighwaySpeed("tertiary", 18);
setHighwaySpeed("tertiary_link", 18);
setHighwaySpeed("nohighway",6);
// special case see tests and #191
setHighwaySpeed("motorway", 18);
setHighwaySpeed("motorway_link", 18);
avoidHighwayTags.add("motorway");
avoidHighwayTags.add("motorway_link");
setCyclingNetworkPreference("icn", PriorityCode.BEST.getValue());
setCyclingNetworkPreference("ncn", PriorityCode.BEST.getValue());
setCyclingNetworkPreference("rcn", PriorityCode.VERY_NICE.getValue());
setCyclingNetworkPreference("lcn", PriorityCode.PREFER.getValue());
setCyclingNetworkPreference("mtb", PriorityCode.UNCHANGED.getValue());
setCyclingNetworkPreference("deprecated", PriorityCode.AVOID_AT_ALL_COSTS.getValue());
}
@Override
public int defineWayBits( int index, int shift )
{
// first two bits are reserved for route handling in superclass
shift = super.defineWayBits(index, shift);
speedEncoder = new EncodedDoubleValue("Speed", shift, speedBits, speedFactor, highwaySpeed.get("cycleway"),
maxPossibleSpeed);
shift += speedEncoder.getBits();
unpavedBit = 1L << shift++;
// 2 bits
wayTypeEncoder = new EncodedValue("WayType", shift, 2, 1, 0, 3, true);
shift += wayTypeEncoder.getBits();
preferWayEncoder = new EncodedValue("PreferWay", shift, 3, 1, 0, 7);
shift += preferWayEncoder.getBits();
return shift;
}
@Override
public int defineRelationBits( int index, int shift )
{
relationCodeEncoder = new EncodedValue("RelationCode", shift, 3, 1, 0, 7);
return shift + relationCodeEncoder.getBits();
}
@Override
public long acceptWay( Way way )
{
String highwayValue = way.getTag("highway");
if (highwayValue == null)
{
if (way.hasTag("route", ferries))
{
// if bike is NOT explictly tagged allow bike but only if foot is not specified
String bikeTag = way.getTag("bicycle");
if (bikeTag == null && !way.hasTag("foot") || "yes".equals(bikeTag))
return acceptBit | ferryBit;
}
// special case not for all acceptedRailways, only platform
if (way.hasTag("railway", "platform"))
return acceptBit;
return 0;
}
if (!highwaySpeed.containsKey(highwayValue))
return 0;
// use the way if it is tagged for bikes
if (way.hasTag("bicycle", intendedValues))
return acceptBit;
// accept only if explicitely tagged for bike usage
if ("motorway".equals(highwayValue) || "motorway_link".equals(highwayValue))
return 0;
if (way.hasTag("motorroad", "yes"))
return 0;
// do not use fords with normal bikes, flagged fords are in included above
if (isBlockFords() && (way.hasTag("highway", "ford") || way.hasTag("ford")))
return 0;
// check access restrictions
if (way.hasTag(restrictions, restrictedValues))
return 0;
// do not accept railways (sometimes incorrectly mapped!)
if (way.hasTag("railway") && !way.hasTag("railway", acceptedRailways))
return 0;
String sacScale = way.getTag("sac_scale");
if (sacScale != null)
{
if ((way.hasTag("highway", "cycleway"))
&& (way.hasTag("sac_scale", "hiking")))
return acceptBit;
if (!allowedSacScale(sacScale))
return 0;
}
return acceptBit;
}
boolean allowedSacScale( String sacScale )
{
// other scales are nearly impossible by an ordinary bike, see http://wiki.openstreetmap.org/wiki/Key:sac_scale
return "hiking".equals(sacScale);
}
@Override
public long handleRelationTags( Relation relation, long oldRelationFlags )
{
int code = 0;
if (relation.hasTag("route", "bicycle"))
{
Integer val = bikeNetworkToCode.get(relation.getTag("network"));
if (val != null)
code = val;
} else if (relation.hasTag("route", "ferry"))
{
code = PriorityCode.AVOID_IF_POSSIBLE.getValue();
}
int oldCode = (int) relationCodeEncoder.getValue(oldRelationFlags);
if (oldCode < code)
return relationCodeEncoder.setValue(0, code);
return oldRelationFlags;
}
@Override
public long handleWayTags( Way way, long allowed, long relationFlags )
{
if (!isAccept(allowed))
return 0;
long encoded = 0;
if (!isFerry(allowed))
{
double speed = getSpeed(way);
int priorityFromRelation = 0;
if (relationFlags != 0)
priorityFromRelation = (int) relationCodeEncoder.getValue(relationFlags);
encoded = setLong(encoded, PriorityWeighting.KEY, handlePriority(way, priorityFromRelation));
// bike maxspeed handling is different from car as we don't increase speed
speed = applyMaxSpeed(way, speed, false);
encoded = handleSpeed(way, speed, encoded);
encoded = handleBikeRelated(way, encoded, relationFlags > UNCHANGED.getValue());
boolean isRoundabout = way.hasTag("junction", "roundabout");
if (isRoundabout)
{
encoded = setBool(encoded, K_ROUNDABOUT, true);
}
} else
{
encoded = handleFerryTags(way,
highwaySpeed.get("living_street"),
highwaySpeed.get("track"),
highwaySpeed.get("primary"));
encoded |= directionBitMask;
}
return encoded;
}
int getSpeed( Way way )
{
int speed = PUSHING_SECTION_SPEED;
String s = way.getTag("surface");
if (!Helper.isEmpty(s))
{
Integer sInt = surfaceSpeed.get(s);
if (sInt != null)
speed = sInt;
} else
{
String tt = way.getTag("tracktype");
if (!Helper.isEmpty(tt))
{
Integer tInt = trackTypeSpeed.get(tt);
if (tInt != null)
speed = tInt;
} else
{
String highway = way.getTag("highway");
if (!Helper.isEmpty(highway))
{
Integer hwInt = highwaySpeed.get(highway);
if (hwInt != null)
{
if (way.getTag("service") == null)
speed = hwInt;
else
speed = highwaySpeed.get("living_street");
}
}
}
}
// Until now we assumed that the way is no pusing section
// Now we check, but only in case that our speed is bigger compared to the PUSHING_SECTION_SPEED
if ((speed > PUSHING_SECTION_SPEED)
&& (!way.hasTag("bicycle", intendedValues) && way.hasTag("highway", pushingSections)))
{
if (way.hasTag("highway", "steps"))
speed = PUSHING_SECTION_SPEED / 2;
else
speed = PUSHING_SECTION_SPEED;
}
return speed;
}
@Override
public InstructionAnnotation getAnnotation( long flags, Translation tr )
{
int paveType = 0; // paved
if (isBool(flags, K_UNPAVED))
paveType = 1; // unpaved
int wayType = (int) wayTypeEncoder.getValue(flags);
String wayName = getWayName(paveType, wayType, tr);
return new InstructionAnnotation(0, wayName);
}
String getWayName( int pavementType, int wayType, Translation tr )
{
String pavementName = "";
if (pavementType == 1)
pavementName = tr.tr("unpaved");
String wayTypeName = "";
switch (wayType)
{
case 0:
wayTypeName = tr.tr("road");
break;
case 1:
wayTypeName = tr.tr("off_bike");
break;
case 2:
wayTypeName = tr.tr("cycleway");
break;
case 3:
wayTypeName = tr.tr("way");
break;
}
if (pavementName.isEmpty())
{
if (wayType == 0 || wayType == 3)
return "";
return wayTypeName;
} else
{
if (wayTypeName.isEmpty())
return pavementName;
else
return wayTypeName + ", " + pavementName;
}
}
/**
* In this method we prefer cycleways or roads with designated bike access and avoid big roads
* or roads with trams or pedestrian.
* <p>
* @return new priority based on priorityFromRelation and on the tags in OSMWay.
*/
protected int handlePriority( Way way, int priorityFromRelation )
{
TreeMap<Double, Integer> weightToPrioMap = new TreeMap<Double, Integer>();
if (priorityFromRelation == 0)
weightToPrioMap.put(0d, UNCHANGED.getValue());
else
weightToPrioMap.put(110d, priorityFromRelation);
collect(way, weightToPrioMap);
// pick priority with biggest order value
return weightToPrioMap.lastEntry().getValue();
}
/**
* @param weightToPrioMap associate a weight with every priority. This sorted map allows
* subclasses to 'insert' more important priorities as well as overwrite determined priorities.
*/
void collect( Way way, TreeMap<Double, Integer> weightToPrioMap )
{
String service = way.getTag("service");
String highway = way.getTag("highway");
if (way.hasTag("bicycle", "designated"))
weightToPrioMap.put(100d, PREFER.getValue());
if ("cycleway".equals(highway))
weightToPrioMap.put(100d, VERY_NICE.getValue());
double maxSpeed = getMaxSpeed(way);
if (preferHighwayTags.contains(highway) || maxSpeed > 0 && maxSpeed <= 30)
{
weightToPrioMap.put(40d, PREFER.getValue());
if (way.hasTag("tunnel", intendedValues))
weightToPrioMap.put(40d, UNCHANGED.getValue());
}
if (pushingSections.contains(highway)
|| way.hasTag("bicycle", "use_sidepath")
|| "parking_aisle".equals(service))
{
weightToPrioMap.put(50d, AVOID_IF_POSSIBLE.getValue());
}
if (avoidHighwayTags.contains(highway) || maxSpeed > 80)
{
weightToPrioMap.put(50d, REACH_DEST.getValue());
if (way.hasTag("tunnel", intendedValues))
weightToPrioMap.put(50d, AVOID_AT_ALL_COSTS.getValue());
}
if (way.hasTag("railway", "tram"))
weightToPrioMap.put(50d, AVOID_AT_ALL_COSTS.getValue());
}
/**
* Handle surface and wayType encoding
*/
long handleBikeRelated( Way way, long encoded, boolean partOfCycleRelation )
{
String surfaceTag = way.getTag("surface");
String highway = way.getTag("highway");
String trackType = way.getTag("tracktype");
// Populate bits at wayTypeMask with wayType
WayType wayType = WayType.OTHER_SMALL_WAY;
boolean isPusingSection = isPushingSection(way);
if (isPusingSection && !partOfCycleRelation || "steps".equals(highway))
wayType = WayType.PUSHING_SECTION;
if ("track".equals(highway) && (trackType == null || !"grade1".equals(trackType))
|| "path".equals(highway) && surfaceTag == null
|| unpavedSurfaceTags.contains(surfaceTag))
{
encoded = setBool(encoded, K_UNPAVED, true);
}
if (way.hasTag("bicycle", intendedValues))
{
if (isPusingSection && !way.hasTag("bicycle", "designated"))
wayType = WayType.OTHER_SMALL_WAY;
else
wayType = WayType.CYCLEWAY;
} else if ("cycleway".equals(highway))
wayType = WayType.CYCLEWAY;
else if (roadValues.contains(highway))
wayType = WayType.ROAD;
return wayTypeEncoder.setValue(encoded, wayType.getValue());
}
@Override
public long setBool( long flags, int key, boolean value )
{
switch (key)
{
case K_UNPAVED:
return value ? flags | unpavedBit : flags & ~unpavedBit;
default:
return super.setBool(flags, key, value);
}
}
@Override
public boolean isBool( long flags, int key )
{
switch (key)
{
case K_UNPAVED:
return (flags & unpavedBit) != 0;
default:
return super.isBool(flags, key);
}
}
@Override
public double getDouble( long flags, int key )
{
switch (key)
{
case PriorityWeighting.KEY:
double prio = preferWayEncoder.getValue(flags);
if (prio == 0)
return (double) UNCHANGED.getValue() / BEST.getValue();
return prio / BEST.getValue();
default:
return super.getDouble(flags, key);
}
}
@Override
public long getLong( long flags, int key )
{
switch (key)
{
case PriorityWeighting.KEY:
return preferWayEncoder.getValue(flags);
default:
return super.getLong(flags, key);
}
}
@Override
public long setLong( long flags, int key, long value )
{
switch (key)
{
case PriorityWeighting.KEY:
return preferWayEncoder.setValue(flags, value);
default:
return super.setLong(flags, key, value);
}
}
boolean isPushingSection( Way way )
{
return way.hasTag("highway", pushingSections) || way.hasTag("railway", "platform");
}
protected long handleSpeed( Way way, double speed, long encoded )
{
encoded = setSpeed(encoded, speed);
// handle oneways
boolean isOneway = way.hasTag("oneway", oneways)
|| way.hasTag("vehicle:backward")
|| way.hasTag("vehicle:forward");
if ((isOneway || way.hasTag("junction", "roundabout"))
&& !way.hasTag("oneway:bicycle", "no")
&& !way.hasTag("bicycle:backward")
&& !way.hasTag("cycleway", oppositeLanes))
{
boolean isBackward = way.hasTag("oneway", "-1")
|| way.hasTag("vehicle:forward", "no");
if (isBackward)
encoded |= backwardBit;
else
encoded |= forwardBit;
} else
{
encoded |= directionBitMask;
}
return encoded;
}
private enum WayType
{
ROAD(0),
PUSHING_SECTION(1),
CYCLEWAY(2),
OTHER_SMALL_WAY(3);
private final int value;
private WayType( int value )
{
this.value = value;
}
public int getValue()
{
return value;
}
};
protected void setHighwaySpeed( String highway, int speed )
{
highwaySpeed.put(highway, speed);
}
protected int getHighwaySpeed( String key )
{
return highwaySpeed.get(key);
}
void setTrackTypeSpeed( String tracktype, int speed )
{
trackTypeSpeed.put(tracktype, speed);
}
void setSurfaceSpeed( String surface, int speed )
{
surfaceSpeed.put(surface, speed);
}
void setCyclingNetworkPreference( String network, int code )
{
bikeNetworkToCode.put(network, code);
}
void addPushingSection( String highway )
{
pushingSections.add(highway);
}
@Override
public boolean supports( Class<?> feature )
{
if (super.supports(feature))
return true;
return PriorityWeighting.class.isAssignableFrom(feature) && !PriorityWithAvoidancesWeighting.class.isAssignableFrom(feature);
}
}
| |
/**
* Copyright 2012 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package playn.tests.core;
import pythagoras.f.FloatMath;
import playn.core.*;
import playn.scene.*;
public class CanvasTest extends Test {
private final static float GAP = 10;
private float nextX, nextY, maxY;
private CanvasLayer time;
private int lastSecs;
public CanvasTest (TestsGame game) {
super(game, "Canvas", "Tests various Canvas rendering features.");
}
@Override public void init() {
nextX = nextY = GAP;
lastSecs = -1;
addTestCanvas("radial fill gradient", 100, 100, canvas -> {
Gradient.Config cfg = new Gradient.Radial(
0, 0, 50, new int[] { 0xFFFF0000, 0xFF00FF00 }, new float[] { 0, 1 });
canvas.setFillGradient(canvas.createGradient(cfg));
canvas.fillRect(0, 0, 100, 100);
});
addTestCanvas("linear fill gradient", 100, 100, canvas -> {
Gradient.Config cfg = new Gradient.Linear(
0, 0, 100, 100, new int[] { 0xFF0000FF, 0xFF00FF00 }, new float[] { 0, 1 });
canvas.setFillGradient(canvas.createGradient(cfg));
canvas.fillRect(0, 0, 100, 100);
});
addTestCanvas("image fill pattern", 100, 100, "images/tile.png", (canvas, tile) -> {
canvas.setFillPattern(tile.createPattern(true, true));
canvas.fillRect(0, 0, 100, 100);
});
addTestCanvas("lines and circles", 100, 100, canvas -> {
canvas.setFillColor(0xFF99CCFF);
canvas.fillRect(0, 0, 100, 100);
// draw a point and some lines
canvas.setStrokeColor(0xFFFF0000);
canvas.drawPoint(50, 50);
canvas.drawLine(0, 25, 100, 25);
canvas.drawLine(0, 75, 100, 75);
canvas.drawLine(25, 0, 25, 100);
canvas.drawLine(75, 0, 75, 100);
// stroke and fill a circle
canvas.strokeCircle(25, 75, 10);
canvas.setFillColor(0xFF0000FF);
canvas.fillCircle(75, 75, 10);
});
addTestCanvas("image, subimage", 100, 100, "images/orange.png", (canvas, orange) -> {
canvas.setFillColor(0xFF99CCFF);
canvas.fillRect(0, 0, 100, 100);
// draw an image normally, scaled, cropped, cropped and scaled, etc.
float half = 37/2f;
canvas.draw(orange, 10, 10);
canvas.draw(orange, 55, 10, 37, 37, half, half, half, half);
canvas.draw(orange, 10, 55, 37, 37, half, 0, half, half);
canvas.draw(orange, 55, 55, 37, 37, half, half/2, half, half);
});
Canvas repcan = createCanvas(30, 30, canvas -> {
canvas.setFillColor(0xFF99CCFF).fillCircle(15, 15, 15);
canvas.setStrokeColor(0xFF000000).strokeRect(0, 0, 30, 30);
});
Texture.Config repeat = Texture.Config.DEFAULT.repeat(true, true);
Texture reptex = repcan.toTexture(repeat);
addTestLayer("ImageLayer repeat x/y", 100, 100, new ImageLayer(reptex).setSize(100, 100));
time = new CanvasLayer(game.graphics, 100, 100);
addTestLayer("updated canvas", 100, 100, time);
Gradient linear = repcan.createGradient(new Gradient.Linear(
0, 0, 100, 100, new int[] { 0xFF0000FF, 0xFF00FF00 }, new float[] { 0, 1 }));
float dotRadius = 40;
Gradient radial = repcan.createGradient(new Gradient.Radial(
100/3f, 100/2.5f, dotRadius, new int[] { 0xFFFFFFFF, 0xFFCC66FF }, new float[] { 0, 1 }));
addTestCanvas("filled bezier path", 100, 100, canvas -> {
// draw a rounded rect with bezier curves
Path path = canvas.createPath();
path.moveTo(10, 0);
path.lineTo(90, 0);
path.bezierTo(95, 0, 100, 5, 100, 10);
path.lineTo(100, 90);
path.bezierTo(100, 95, 95, 100, 90, 100);
path.lineTo(10, 100);
path.bezierTo(5, 100, 0, 95, 0, 90);
path.lineTo(0, 10);
path.bezierTo(0, 5, 5, 0, 10, 0);
path.close();
canvas.setFillGradient(linear).fillPath(path);
});
addTestCanvas("gradient round rect", 100, 100, canvas -> {
// draw a rounded rect directly
canvas.setFillGradient(linear).fillRoundRect(0, 0, 100, 100, 10);
});
addTestCanvas("gradient filled text", 100, 100, canvas -> {
// draw a rounded rect directly
canvas.setFillGradient(linear);
TextLayout capF = game.graphics.layoutText("F", new TextFormat(F_FONT.derive(96)));
canvas.fillText(capF, 15, 5);
});
addTestCanvas("nested round rect", 100, 100, canvas -> {
// demonstrates a bug (now worked around) in Android round-rect drawing
canvas.setFillColor(0xFFFFCC99).fillRoundRect(0, 0, 98.32f, 29.5f, 12f);
canvas.setFillColor(0xFF99CCFF).fillRoundRect(3, 3, 92.32f, 23.5f, 9.5f);
});
addTestCanvas("android fill/stroke bug", 100, 100, canvas -> {
canvas.save();
canvas.setFillGradient(radial).fillCircle(50, 50, dotRadius);
canvas.restore();
canvas.setStrokeColor(0xFF000000).setStrokeWidth(1.5f).strokeCircle(50, 50, dotRadius);
});
addTestCanvas("transform test", 100, 100, canvas -> {
canvas.setFillColor(0xFFCCCCCC).fillRect(0, 0, 50, 50);
canvas.setFillColor(0xFFCCCCCC).fillRect(50, 50, 50, 50);
TextLayout capF = game.graphics.layoutText("F", new TextFormat(F_FONT));
float theta = -FloatMath.PI/4, tsin = FloatMath.sin(theta), tcos = FloatMath.cos(theta);
canvas.setFillColor(0xFF000000).fillText(capF, 0, 0);
canvas.transform(tcos, -tsin, tsin, tcos, 50, 50);
canvas.setFillColor(0xFF000000).fillText(capF, 0, 0);
});
addTestCanvas("round rect precision", 100, 100, new Drawer() {
final float bwid = 4;
void outer (Canvas canvas, float y) {
canvas.setFillColor(0xFF000000);
canvas.fillRect(2, y, 94, 30);
}
void inner (Canvas canvas, float y) {
canvas.setFillColor(0xFF555555);
canvas.fillRect(2 + bwid, y + bwid, 94 - bwid * 2, 30 - bwid * 2);
}
void stroke (Canvas canvas, float y) {
canvas.setStrokeColor(0xFF99CCFF);
canvas.setStrokeWidth(bwid);
canvas.strokeRoundRect(2 + bwid / 2, y + bwid / 2, 94 - bwid, 30 - bwid, 10);
}
public void draw(Canvas canvas) {
float y = 1;
outer(canvas, y);
inner(canvas, y);
stroke(canvas, y);
y += 34;
outer(canvas, y);
stroke(canvas, y);
inner(canvas, y);
y += 34;
stroke(canvas, y);
outer(canvas, y);
inner(canvas, y);
}
});
ImageLayer tileLayer = new ImageLayer(
game.assets.getImage("images/tile.png").setConfig(repeat));
addTestLayer("img layer anim setWidth", 100, 100, tileLayer.setSize(0, 100));
conns.add(game.paint.connect(clock -> {
int curSecs = clock.tick/1000;
if (curSecs != lastSecs) {
Canvas tcanvas = time.begin();
tcanvas.clear();
tcanvas.setStrokeColor(0xFF000000).strokeRect(0, 0, 99, 99);
tcanvas.drawText(""+curSecs, 40, 55);
lastSecs = curSecs;
time.end();
}
// round the width so that it goes to zero sometimes (which should be fine)
if (tileLayer != null) tileLayer.forceWidth = Math.round(
Math.abs(FloatMath.sin(clock.tick/2000f)) * 100);
}));
Canvas cancan = createCanvas(50, 50, canvas -> {
canvas.setFillGradient(radial).fillRect(0, 0, canvas.width, canvas.height);
});
addTestCanvas("canvas drawn on canvas", 100, 100, canvas -> {
canvas.translate(50, 25);
canvas.rotate(FloatMath.PI/4);
canvas.draw(cancan.image, 0, 0);
});
addTestCanvas("drawArc", 100, 100, canvas -> {
float[] quads = { 0, 0.5f, 1, 1.5f};
float PI = FloatMath.PI;
for (float quad : quads) {
drawCircleArc(canvas, 20 + quad*40, 30, PI * quad, PI/2);
drawCircleArc(canvas, 20 + quad*40, 70, PI * quad, -PI/2);
}
});
}
private void drawCircleArc(Canvas canvas, float x, float y, float start, float span) {
canvas.setStrokeColor(0xFFDDDDDD).strokeCircle(x, y, 10).
setStrokeColor(0xFF000000).drawArc(x, y, 10, start, span);
}
private interface Drawer {
void draw(Canvas canvas);
}
private void addTestCanvas(String descrip, int width, int height, Drawer drawer) {
Canvas canvas = createCanvas(width, height, drawer);
addTestLayer(descrip, width, height, new ImageLayer(canvas.toTexture()));
}
private Canvas createCanvas(int width, int height, Drawer drawer) {
Canvas canvas = game.graphics.createCanvas(width, height);
drawer.draw(canvas);
return canvas;
}
private void addTestLayer(String descrip, int width, int height, Layer layer) {
// if this layer won't fit in this row, wrap down to the next
if (nextX + width > game.graphics.viewSize.width()) {
nextY += (maxY + GAP);
nextX = GAP;
maxY = 0;
}
// add the layer and its description below
game.rootLayer.addAt(layer, nextX, nextY);
ImageLayer dlayer = createDescripLayer(descrip, width);
game.rootLayer.addAt(dlayer, nextX + Math.round((width-dlayer.width())/2),
nextY + height + 2);
// update our positioning info
nextX += (width + GAP);
maxY = Math.max(maxY, height+dlayer.height()+2);
}
private interface ImageDrawer {
void draw(Canvas canvas, Image image);
}
private void addTestCanvas(String descrip, int width, int height, String imagePath,
ImageDrawer drawer) {
Canvas target = game.graphics.createCanvas(width, height);
ImageLayer layer = new ImageLayer().setSize(width, height);
game.assets.getImage(imagePath).state.onSuccess(image -> {
drawer.draw(target, image);
layer.setTile(target.toTexture());
});
addTestLayer(descrip, width, height, layer);
}
private Font F_FONT = new Font("Helvetica", Font.Style.BOLD, 48);
}
| |
package liquibase.sdk.verifytest;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.util.StringUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class VerifiedTestReader {
private enum Section {
GROUP_DEFINITION,
DEFINITION,
NOTES,
DATA
}
public VerifiedTest read(Reader... readers) throws IOException {
VerifiedTest results = null;
Pattern permutationStartPattern = Pattern.compile("## Permutation: (.*) ##");
Pattern permutationGroupStartPattern = Pattern.compile("## Permutation Group for (.*?): (.*) ##");
Pattern internalKeyValuePattern = Pattern.compile("\\- _(.+):_ (.+)");
Pattern keyValuePattern = Pattern.compile("\\- \\*\\*(.+):\\*\\* (.*)");
Pattern multiLineKeyValuePattern = Pattern.compile("\\- \\*\\*(.+) =>\\*\\*");
Pattern dataDetailsMatcher = Pattern.compile("\\*\\*(.*?)\\*\\*: (.*)");
Pattern notesDetailsMatcher = Pattern.compile("__(.*?)__: (.*)");
for (Reader reader : readers) {
BufferedReader bufferedReader = new BufferedReader(reader);
TestPermutation currentPermutation = null;
Map<String, String> currentPermutationGroup = null;
List<String> permutationColumns = null;
String permutationDefinitionKey = null;
String line;
int lineNumber = 0;
Section section = null;
String multiLineKey = null;
String multiLineValue = null;
while ((line = bufferedReader.readLine()) != null) {
lineNumber++;
if (lineNumber == 1) {
Matcher groupMatcher = Pattern.compile("# Test: (\\S*) \"(.*)\" Group \"(.*)\" #").matcher(line);
Matcher nonGroupMatcher = Pattern.compile("# Test: (\\S*) \"(.*)\" #").matcher(line);
if (groupMatcher.matches()) {
if (results == null) {
String testClass = groupMatcher.group(1);
String testName = groupMatcher.group(2);
results = new VerifiedTest(testClass, testName);
}
} else if (nonGroupMatcher.matches()) {
if (results == null) {
String testClass = nonGroupMatcher.group(1);
String testName = nonGroupMatcher.group(2);
results = new VerifiedTest(testClass, testName);
}
} else {
throw new IOException("Invalid header: " + line);
}
continue;
}
if (multiLineKey != null) {
if (line.equals("") || line.startsWith(" ")) {
multiLineValue += line.replaceFirst(" ", "") + "\n";
continue;
} else {
multiLineValue = multiLineValue.trim();
if (section.equals(Section.DEFINITION)) {
currentPermutation.describe(multiLineKey, multiLineValue, OutputFormat.FromFile);
} else if (section.equals(Section.GROUP_DEFINITION)) {
currentPermutationGroup.put(multiLineKey, multiLineValue);
} else if (section.equals(Section.NOTES)) {
currentPermutation.note(multiLineKey, multiLineValue, OutputFormat.FromFile);
} else if (section.equals(Section.DATA)) {
currentPermutation.data(multiLineKey, multiLineValue, OutputFormat.FromFile);
} else {
throw new UnexpectedLiquibaseException("Unknown multiline section on line " + lineNumber + ": " + section);
}
multiLineKey = null;
multiLineValue = null;
}
}
if (StringUtils.trimToEmpty(line).equals("")) {
continue;
}
if (line.equals("#### Notes ####")) {
section = Section.NOTES;
continue;
} else if (line.equals("#### Data ####")) {
section = Section.DATA;
continue;
}
Matcher permutationStartMatcher = permutationStartPattern.matcher(line);
if (permutationStartMatcher.matches()) {
currentPermutation = new TestPermutation(results);
section = Section.DEFINITION;
continue;
}
Matcher permutationGroupStartMatcher = permutationGroupStartPattern.matcher(line);
if (permutationGroupStartMatcher.matches()) {
currentPermutation = null;
currentPermutationGroup = new HashMap<String, String>();
permutationDefinitionKey = permutationGroupStartMatcher.group(1);
permutationColumns = new ArrayList<String>();
section = Section.GROUP_DEFINITION;
continue;
}
Matcher internalKeyValueMatcher = internalKeyValuePattern.matcher(line);
if (internalKeyValueMatcher.matches()) {
String key = internalKeyValueMatcher.group(1);
String value = internalKeyValueMatcher.group(2);
if (key.equals("VERIFIED")) {
setVerifiedFromString(currentPermutation, value);
} else {
throw new UnexpectedLiquibaseException("Unknown internal parameter " + key);
}
continue;
}
Matcher keyValueMatcher = keyValuePattern.matcher(line);
if (keyValueMatcher.matches()) {
String key = keyValueMatcher.group(1);
String value = keyValueMatcher.group(2);
if (section.equals(Section.DEFINITION)) {
currentPermutation.describe(key, value, OutputFormat.FromFile);
} else if (section.equals(Section.GROUP_DEFINITION)) {
currentPermutationGroup.put(key, value);
} else if (section.equals(Section.NOTES)) {
currentPermutation.note(key, value, OutputFormat.FromFile);
} else if (section.equals(Section.DATA)) {
currentPermutation.data(key, value, OutputFormat.FromFile);
} else {
throw new UnexpectedLiquibaseException("Unknown section " + section);
}
continue;
}
if (line.startsWith("|")) {
String unlikelyStringForSplit = "OIPUGAKJNGAOIUWDEGKJASDG";
String lineToSplit = line.replaceFirst("\\|", unlikelyStringForSplit).replaceAll("([^\\\\])\\|", "$1" + unlikelyStringForSplit);
String[] values = lineToSplit.split("\\s*" + unlikelyStringForSplit + "\\s*");
if (line.startsWith("| Permutation ")) {
for (int i = 3; i < values.length - 1; i++) { //ignoring first value that is an empty string and last value that is DETAILS
permutationColumns.add(values[i]);
}
} else {
if (values[1].equals("")) {
; //continuing row
} else {
currentPermutation = new TestPermutation(results);
for (Map.Entry<String, String> entry : currentPermutationGroup.entrySet()) {
currentPermutation.describe(entry.getKey(), entry.getValue(), OutputFormat.FromFile);
}
setVerifiedFromString(currentPermutation, values[2]);
int columnNum = 0;
Map<String, TestPermutation.Value> valueDescription = new HashMap<String, TestPermutation.Value>();
try {
for (int i = 3; i < values.length - 1; i++) {
if (!values[i].equals("")) {
valueDescription.put(permutationColumns.get(columnNum), new TestPermutation.Value(decode(values[i]), OutputFormat.FromFile));
}
columnNum++;
}
} catch (Throwable e) {
throw new UnexpectedLiquibaseException("Error parsing line " + line);
}
currentPermutation.describeAsTable(permutationDefinitionKey, valueDescription);
}
String details = values[values.length - 1];
Matcher dataMatcher = dataDetailsMatcher.matcher(details);
Matcher notesMatcher = notesDetailsMatcher.matcher(details);
if (dataMatcher.matches()) {
currentPermutation.data(dataMatcher.group(1), decode(dataMatcher.group(2)), OutputFormat.FromFile);
} else if (notesMatcher.matches()) {
currentPermutation.note(notesMatcher.group(1), decode(notesMatcher.group(2)), OutputFormat.FromFile);
} else {
throw new RuntimeException("Unknown details column format: " + details);
}
}
continue;
}
Matcher multiLineKeyValueMatcher = multiLineKeyValuePattern.matcher(line);
if (multiLineKeyValueMatcher.matches()) {
multiLineKey = multiLineKeyValueMatcher.group(1);
multiLineValue = "";
continue;
}
if (currentPermutation == null) {
//in the header section describing what the file is for
} else {
throw new UnexpectedLiquibaseException("Could not parse line " + lineNumber + ": " + line);
}
}
}
return results;
}
private String decode(String string) {
return string.replace("<br>", "\n").replace("\\|", "|");
}
protected void setVerifiedFromString(TestPermutation currentPermutation, String value) {
String[] splitValue = value.split("\\s+", 2);
currentPermutation.setVerified(Boolean.valueOf(splitValue[0]));
if (splitValue.length > 1) {
currentPermutation.setNotRanMessage(splitValue[1]);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.cli.commands.tools;
import javax.xml.XMLConstants;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.stax.StAXSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.ByteBuffer;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import io.airlift.airline.Command;
import io.airlift.airline.Option;
import org.apache.activemq.artemis.api.core.ICoreMessage;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.client.ActiveMQClient;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientRequestor;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.api.core.management.ManagementHelper;
import org.apache.activemq.artemis.api.core.management.ResourceNames;
import org.apache.activemq.artemis.cli.commands.ActionAbstract;
import org.apache.activemq.artemis.cli.commands.ActionContext;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnectorFactory;
import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.utils.Base64;
import org.apache.activemq.artemis.utils.ClassloadingUtil;
import org.apache.activemq.artemis.utils.ListUtil;
import org.apache.activemq.artemis.utils.UUIDGenerator;
import org.jboss.logging.Logger;
/**
* Read XML output from <code>org.apache.activemq.artemis.core.persistence.impl.journal.XmlDataExporter</code>, create a core session, and
* send the messages to a running instance of ActiveMQ Artemis. It uses the StAX <code>javax.xml.stream.XMLStreamReader</code>
* for speed and simplicity.
*/
@Command(name = "imp", description = "Import all message-data using an XML that could be interpreted by any system.")
public final class XmlDataImporter extends ActionAbstract {
// Constants -----------------------------------------------------
private static final Logger logger = Logger.getLogger(XmlDataImporter.class);
// Attributes ----------------------------------------------------
private XMLStreamReader reader;
// this session is really only needed if the "session" variable does not auto-commit sends
ClientSession managementSession;
boolean localSession = false;
final Map<String, String> addressMap = new HashMap<>();
final Map<String, Long> queueIDs = new HashMap<>();
String tempFileName = "";
private ClientSession session;
@Option(name = "--host", description = "The host used to import the data (default localhost)")
public String host = "localhost";
@Option(name = "--port", description = "The port used to import the data (default 61616)")
public int port = 61616;
@Option(name = "--transaction", description = "If this is set to true you will need a whole transaction to commit at the end. (default false)")
public boolean transactional;
@Option(name = "--user", description = "User name used to import the data. (default null)")
public String user = null;
@Option(name = "--password", description = "User name used to import the data. (default null)")
public String password = null;
@Option(name = "--input", description = "The input file name (default=exp.dmp)", required = true)
public String input = "exp.dmp";
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
@Override
public Object execute(ActionContext context) throws Exception {
process(input, host, port, transactional);
return null;
}
public void process(String inputFile, String host, int port, boolean transactional) throws Exception {
this.process(new FileInputStream(inputFile), host, port, transactional);
}
/**
* This is the normal constructor for programmatic access to the
* <code>org.apache.activemq.artemis.core.persistence.impl.journal.XmlDataImporter</code> if the session passed
* in uses auto-commit for sends.
* <br>
* If the session needs to be transactional then use the constructor which takes 2 sessions.
*
* @param inputStream the stream from which to read the XML for import
* @param session used for sending messages, must use auto-commit for sends
* @throws Exception
*/
public void process(InputStream inputStream, ClientSession session) throws Exception {
this.process(inputStream, session, null);
}
/**
* This is the constructor to use if you wish to import all messages transactionally.
* <br>
* Pass in a session which doesn't use auto-commit for sends, and one that does (for management
* operations necessary during import).
*
* @param inputStream the stream from which to read the XML for import
* @param session used for sending messages, doesn't need to auto-commit sends
* @param managementSession used for management queries, must use auto-commit for sends
*/
public void process(InputStream inputStream,
ClientSession session,
ClientSession managementSession) throws Exception {
reader = XMLInputFactory.newInstance().createXMLStreamReader(inputStream);
this.session = session;
if (managementSession != null) {
this.managementSession = managementSession;
} else {
this.managementSession = session;
}
processXml();
}
public void process(InputStream inputStream, String host, int port, boolean transactional) throws Exception {
HashMap<String, Object> connectionParams = new HashMap<>();
connectionParams.put(TransportConstants.HOST_PROP_NAME, host);
connectionParams.put(TransportConstants.PORT_PROP_NAME, Integer.toString(port));
ServerLocator serverLocator = ActiveMQClient.createServerLocatorWithoutHA(new TransportConfiguration(NettyConnectorFactory.class.getName(), connectionParams));
ClientSessionFactory sf = serverLocator.createSessionFactory();
ClientSession session;
ClientSession managementSession;
if (user != null || password != null) {
session = sf.createSession(user, password, false, !transactional, true, false, 0);
managementSession = sf.createSession(user, password, false, true, true, false, 0);
} else {
session = sf.createSession(false, !transactional, true);
managementSession = sf.createSession(false, true, true);
}
localSession = true;
process(inputStream, session, managementSession);
}
public void validate(String file) throws Exception {
validate(new FileInputStream(file));
}
public void validate(InputStream inputStream) throws Exception {
XMLStreamReader reader = XMLInputFactory.newInstance().createXMLStreamReader(inputStream);
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = factory.newSchema(XmlDataImporter.findResource("schema/artemis-import-export.xsd"));
Validator validator = schema.newValidator();
validator.validate(new StAXSource(reader));
reader.close();
}
private static URL findResource(final String resourceName) {
return AccessController.doPrivileged(new PrivilegedAction<URL>() {
@Override
public URL run() {
return ClassloadingUtil.findResource(resourceName);
}
});
}
private void processXml() throws Exception {
try {
while (reader.hasNext()) {
if (logger.isDebugEnabled()) {
logger.debug("EVENT:[" + reader.getLocation().getLineNumber() + "][" + reader.getLocation().getColumnNumber() + "] ");
}
if (reader.getEventType() == XMLStreamConstants.START_ELEMENT) {
if (XmlDataConstants.QUEUE_BINDINGS_CHILD.equals(reader.getLocalName())) {
bindQueue();
} else if (XmlDataConstants.ADDRESS_BINDINGS_CHILD.equals(reader.getLocalName())) {
bindAddress();
} else if (XmlDataConstants.MESSAGES_CHILD.equals(reader.getLocalName())) {
processMessage();
}
}
reader.next();
}
if (!session.isAutoCommitSends()) {
session.commit();
}
} finally {
// if the session was created in our constructor then close it (otherwise the caller will close it)
if (localSession) {
session.close();
managementSession.close();
}
}
}
private void processMessage() throws Exception {
Byte type = 0;
Byte priority = 0;
Long expiration = 0L;
Long timestamp = 0L;
org.apache.activemq.artemis.utils.UUID userId = null;
ArrayList<String> queues = new ArrayList<>();
// get message's attributes
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attributeName = reader.getAttributeLocalName(i);
switch (attributeName) {
case XmlDataConstants.MESSAGE_TYPE:
type = getMessageType(reader.getAttributeValue(i));
break;
case XmlDataConstants.MESSAGE_PRIORITY:
priority = Byte.parseByte(reader.getAttributeValue(i));
break;
case XmlDataConstants.MESSAGE_EXPIRATION:
expiration = Long.parseLong(reader.getAttributeValue(i));
break;
case XmlDataConstants.MESSAGE_TIMESTAMP:
timestamp = Long.parseLong(reader.getAttributeValue(i));
break;
case XmlDataConstants.MESSAGE_USER_ID:
userId = UUIDGenerator.getInstance().generateUUID();
break;
}
}
Message message = session.createMessage(type, true, expiration, timestamp, priority);
message.setUserID(userId);
boolean endLoop = false;
// loop through the XML and gather up all the message's data (i.e. body, properties, queues, etc.)
while (reader.hasNext()) {
int eventType = reader.getEventType();
switch (eventType) {
case XMLStreamConstants.START_ELEMENT:
if (XmlDataConstants.MESSAGE_BODY.equals(reader.getLocalName())) {
processMessageBody(message.toCore());
} else if (XmlDataConstants.PROPERTIES_CHILD.equals(reader.getLocalName())) {
processMessageProperties(message);
} else if (XmlDataConstants.QUEUES_CHILD.equals(reader.getLocalName())) {
processMessageQueues(queues);
}
break;
case XMLStreamConstants.END_ELEMENT:
if (XmlDataConstants.MESSAGES_CHILD.equals(reader.getLocalName())) {
endLoop = true;
}
break;
}
if (endLoop) {
break;
}
reader.next();
}
sendMessage(queues, message);
}
private Byte getMessageType(String value) {
Byte type = Message.DEFAULT_TYPE;
switch (value) {
case XmlDataConstants.DEFAULT_TYPE_PRETTY:
type = Message.DEFAULT_TYPE;
break;
case XmlDataConstants.BYTES_TYPE_PRETTY:
type = Message.BYTES_TYPE;
break;
case XmlDataConstants.MAP_TYPE_PRETTY:
type = Message.MAP_TYPE;
break;
case XmlDataConstants.OBJECT_TYPE_PRETTY:
type = Message.OBJECT_TYPE;
break;
case XmlDataConstants.STREAM_TYPE_PRETTY:
type = Message.STREAM_TYPE;
break;
case XmlDataConstants.TEXT_TYPE_PRETTY:
type = Message.TEXT_TYPE;
break;
}
return type;
}
private void sendMessage(ArrayList<String> queues, Message message) throws Exception {
StringBuilder logMessage = new StringBuilder();
String destination = addressMap.get(queues.get(0));
logMessage.append("Sending ").append(message).append(" to address: ").append(destination).append("; routed to queues: ");
ByteBuffer buffer = ByteBuffer.allocate(queues.size() * 8);
for (String queue : queues) {
long queueID;
if (queueIDs.containsKey(queue)) {
queueID = queueIDs.get(queue);
} else {
// Get the ID of the queues involved so the message can be routed properly. This is done because we cannot
// send directly to a queue, we have to send to an address instead but not all the queues related to the
// address may need the message
try (ClientRequestor requestor = new ClientRequestor(managementSession, "activemq.management")) {
ClientMessage managementMessage = managementSession.createMessage(false);
ManagementHelper.putAttribute(managementMessage, ResourceNames.QUEUE + queue, "ID");
managementSession.start();
if (logger.isDebugEnabled()) {
logger.debug("Requesting ID for: " + queue);
}
ClientMessage reply = requestor.request(managementMessage);
Number idObject = (Number) ManagementHelper.getResult(reply);
queueID = idObject.longValue();
}
if (logger.isDebugEnabled()) {
logger.debug("ID for " + queue + " is: " + queueID);
}
queueIDs.put(queue, queueID); // store it so we don't have to look it up every time
}
logMessage.append(queue).append(", ");
buffer.putLong(queueID);
}
logMessage.delete(logMessage.length() - 2, logMessage.length()); // take off the trailing comma
if (logger.isDebugEnabled()) {
logger.debug(logMessage);
}
message.putBytesProperty(Message.HDR_ROUTE_TO_IDS, buffer.array());
try (ClientProducer producer = session.createProducer(destination)) {
producer.send(message);
}
if (tempFileName.length() > 0) {
File tempFile = new File(tempFileName);
if (!tempFile.delete()) {
ActiveMQServerLogger.LOGGER.couldNotDeleteTempFile(tempFileName);
}
tempFileName = "";
}
}
private void processMessageQueues(ArrayList<String> queues) {
for (int i = 0; i < reader.getAttributeCount(); i++) {
if (XmlDataConstants.QUEUE_NAME.equals(reader.getAttributeLocalName(i))) {
queues.add(reader.getAttributeValue(i));
}
}
}
private void processMessageProperties(Message message) {
String key = "";
String value = "";
String propertyType = "";
String realStringValue = null;
SimpleString realSimpleStringValue = null;
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attributeName = reader.getAttributeLocalName(i);
switch (attributeName) {
case XmlDataConstants.PROPERTY_NAME:
key = reader.getAttributeValue(i);
break;
case XmlDataConstants.PROPERTY_VALUE:
value = reader.getAttributeValue(i);
break;
case XmlDataConstants.PROPERTY_TYPE:
propertyType = reader.getAttributeValue(i);
break;
}
}
if (value.equals(XmlDataConstants.NULL)) {
value = null;
}
switch (propertyType) {
case XmlDataConstants.PROPERTY_TYPE_SHORT:
message.putShortProperty(key, Short.parseShort(value));
break;
case XmlDataConstants.PROPERTY_TYPE_BOOLEAN:
message.putBooleanProperty(key, Boolean.parseBoolean(value));
break;
case XmlDataConstants.PROPERTY_TYPE_BYTE:
message.putByteProperty(key, Byte.parseByte(value));
break;
case XmlDataConstants.PROPERTY_TYPE_BYTES:
message.putBytesProperty(key, value == null ? null : decode(value));
break;
case XmlDataConstants.PROPERTY_TYPE_DOUBLE:
message.putDoubleProperty(key, Double.parseDouble(value));
break;
case XmlDataConstants.PROPERTY_TYPE_FLOAT:
message.putFloatProperty(key, Float.parseFloat(value));
break;
case XmlDataConstants.PROPERTY_TYPE_INTEGER:
message.putIntProperty(key, Integer.parseInt(value));
break;
case XmlDataConstants.PROPERTY_TYPE_LONG:
message.putLongProperty(key, Long.parseLong(value));
break;
case XmlDataConstants.PROPERTY_TYPE_SIMPLE_STRING:
message.putStringProperty(new SimpleString(key), value == null ? null : SimpleString.toSimpleString(value));
break;
case XmlDataConstants.PROPERTY_TYPE_STRING:
message.putStringProperty(key, value);
break;
}
}
private void processMessageBody(final ICoreMessage message) throws XMLStreamException, IOException {
boolean isLarge = false;
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attributeName = reader.getAttributeLocalName(i);
if (XmlDataConstants.MESSAGE_IS_LARGE.equals(attributeName)) {
isLarge = Boolean.parseBoolean(reader.getAttributeValue(i));
}
}
reader.next();
if (logger.isDebugEnabled()) {
logger.debug("XMLStreamReader impl: " + reader);
}
if (isLarge) {
tempFileName = UUID.randomUUID().toString() + ".tmp";
if (logger.isDebugEnabled()) {
logger.debug("Creating temp file " + tempFileName + " for large message.");
}
try (OutputStream out = new FileOutputStream(tempFileName)) {
getMessageBodyBytes(new MessageBodyBytesProcessor() {
@Override
public void processBodyBytes(byte[] bytes) throws IOException {
out.write(bytes);
}
});
}
FileInputStream fileInputStream = new FileInputStream(tempFileName);
BufferedInputStream bufferedInput = new BufferedInputStream(fileInputStream);
((ClientMessage) message).setBodyInputStream(bufferedInput);
} else {
getMessageBodyBytes(new MessageBodyBytesProcessor() {
@Override
public void processBodyBytes(byte[] bytes) throws IOException {
message.getBodyBuffer().writeBytes(bytes);
}
});
}
}
/**
* Message bodies are written to XML as one or more Base64 encoded CDATA elements. Some parser implementations won't
* read an entire CDATA element at once (e.g. Woodstox) so it's possible that multiple CDATA/CHARACTERS events need
* to be combined to reconstruct the Base64 encoded string. You can't decode bits and pieces of each CDATA. Each
* CDATA has to be decoded in its entirety.
*
* @param processor used to deal with the decoded CDATA elements
* @throws IOException
* @throws XMLStreamException
*/
private void getMessageBodyBytes(MessageBodyBytesProcessor processor) throws IOException, XMLStreamException {
int currentEventType;
StringBuilder cdata = new StringBuilder();
while (reader.hasNext()) {
currentEventType = reader.getEventType();
if (currentEventType == XMLStreamConstants.END_ELEMENT) {
break;
} else if (currentEventType == XMLStreamConstants.CHARACTERS && reader.isWhiteSpace() && cdata.length() > 0) {
/* when we hit a whitespace CHARACTERS event we know that the entire CDATA is complete so decode, pass back to
* the processor, and reset the cdata for the next event(s)
*/
processor.processBodyBytes(decode(cdata.toString()));
cdata.setLength(0);
} else {
cdata.append(new String(reader.getTextCharacters(), reader.getTextStart(), reader.getTextLength()).trim());
}
reader.next();
}
}
private void bindQueue() throws Exception {
String queueName = "";
String address = "";
String filter = "";
String routingType = "";
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attributeName = reader.getAttributeLocalName(i);
switch (attributeName) {
case XmlDataConstants.QUEUE_BINDING_ADDRESS:
address = reader.getAttributeValue(i);
break;
case XmlDataConstants.QUEUE_BINDING_NAME:
queueName = reader.getAttributeValue(i);
break;
case XmlDataConstants.QUEUE_BINDING_FILTER_STRING:
filter = reader.getAttributeValue(i);
break;
case XmlDataConstants.QUEUE_BINDING_ROUTING_TYPE:
routingType = reader.getAttributeValue(i);
break;
}
}
ClientSession.QueueQuery queueQuery = session.queueQuery(new SimpleString(queueName));
if (!queueQuery.isExists()) {
session.createQueue(address, RoutingType.valueOf(routingType), queueName, filter, true);
if (logger.isDebugEnabled()) {
logger.debug("Binding queue(name=" + queueName + ", address=" + address + ", filter=" + filter + ")");
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Binding " + queueName + " already exists so won't re-bind.");
}
}
addressMap.put(queueName, address);
}
private void bindAddress() throws Exception {
String addressName = "";
String routingTypes = "";
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attributeName = reader.getAttributeLocalName(i);
switch (attributeName) {
case XmlDataConstants.ADDRESS_BINDING_NAME:
addressName = reader.getAttributeValue(i);
break;
case XmlDataConstants.ADDRESS_BINDING_ROUTING_TYPE:
routingTypes = reader.getAttributeValue(i);
break;
}
}
ClientSession.AddressQuery addressQuery = session.addressQuery(new SimpleString(addressName));
if (!addressQuery.isExists()) {
Set<RoutingType> set = new HashSet<>();
for (String routingType : ListUtil.toList(routingTypes)) {
set.add(RoutingType.valueOf(routingType));
}
session.createAddress(SimpleString.toSimpleString(addressName), set, false);
if (logger.isDebugEnabled()) {
logger.debug("Binding address(name=" + addressName + ", routingTypes=" + routingTypes + ")");
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Binding " + addressName + " already exists so won't re-bind.");
}
}
}
private String getEntries() throws Exception {
StringBuilder entry = new StringBuilder();
boolean endLoop = false;
while (reader.hasNext()) {
int eventType = reader.getEventType();
switch (eventType) {
case XMLStreamConstants.START_ELEMENT:
if (XmlDataConstants.JMS_JNDI_ENTRY.equals(reader.getLocalName())) {
String elementText = reader.getElementText();
entry.append(elementText).append(", ");
if (logger.isDebugEnabled()) {
logger.debug("JMS admin object JNDI entry: " + entry.toString());
}
}
break;
case XMLStreamConstants.END_ELEMENT:
if (XmlDataConstants.JMS_JNDI_ENTRIES.equals(reader.getLocalName())) {
endLoop = true;
}
break;
}
if (endLoop) {
break;
}
reader.next();
}
return entry.delete(entry.length() - 2, entry.length()).toString();
}
private String getConnectors() throws Exception {
StringBuilder entry = new StringBuilder();
boolean endLoop = false;
while (reader.hasNext()) {
int eventType = reader.getEventType();
switch (eventType) {
case XMLStreamConstants.START_ELEMENT:
if (XmlDataConstants.JMS_CONNECTION_FACTORY_CONNECTOR.equals(reader.getLocalName())) {
entry.append(reader.getElementText()).append(", ");
}
break;
case XMLStreamConstants.END_ELEMENT:
if (XmlDataConstants.JMS_CONNECTION_FACTORY_CONNECTORS.equals(reader.getLocalName())) {
endLoop = true;
}
break;
}
if (endLoop) {
break;
}
reader.next();
}
return entry.delete(entry.length() - 2, entry.length()).toString();
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private static byte[] decode(String data) {
return Base64.decode(data, Base64.DONT_BREAK_LINES | Base64.URL_SAFE);
}
private interface MessageBodyBytesProcessor {
void processBodyBytes(byte[] bytes) throws IOException;
}
// Inner classes -------------------------------------------------
}
| |
/*
* Copyright (c) 2014 by Ernesto Carrella
* Licensed under MIT license. Basically do what you want with it but cite me and don't sue me. Which is just politeness, really.
* See the file "LICENSE" for more information
*/
package financial.market;
import agents.EconomicAgent;
import agents.firm.Department;
import com.google.common.base.Preconditions;
import financial.BidListener;
import financial.utilities.ActionsAllowed;
import financial.utilities.HistogramDecoratedPriorityBook;
import financial.utilities.Quote;
import goods.Good;
import goods.GoodType;
import model.MacroII;
import model.utilities.ActionOrder;
import sim.portrayal.Inspector;
import sim.portrayal.inspector.TabbedInspector;
import sim.util.media.chart.HistogramGenerator;
import java.awt.*;
import java.util.*;
import java.util.Queue;
/**
* A simple order book market. Everybody can only quote.
* Whenever a new quote arrives, the order book checks for crossing quotes and make them trade.
* User: carrknight
* Date: 7/15/12
* Time: 5:11 PM
* To change this template use File | Settings | File Templates.
*/
public class OrderBookMarket extends Market {
private Queue<Quote> asks;
private Queue<Quote> bids;
/**
* This is the histogram that will be used by the inspector if the GUI is on
*/
private HistogramGenerator histogramGenerator = null;
/**
* this is the object that tries to match bids and asks and allows for trades.
*/
private OrderHandler orderHandler;
public OrderBookMarket(GoodType t) {
super(t);
//create the two books as priority queues, we might decorate them if there is GUI
//asks put the first element of the queue as the one with lowest price (best ask)
this.asks = new PriorityQueue<>(10,new Comparator<Quote>() {
@Override
public int compare(Quote o1, Quote o2) {
return Long.compare(o1.getPriceQuoted(),o2.getPriceQuoted());
}
});
//bids put the first element of the queue as the one with the highest offer (best bid)
this.bids = new PriorityQueue<>(10,new Comparator<Quote>() {
@Override
public int compare(Quote o1, Quote o2) {
return -Long.compare(o1.getPriceQuoted(),o2.getPriceQuoted());
}
});
//if the gui is on
if(MacroII.hasGUI())
{
buildInspector();
}
else{
assert histogramGenerator == null;
//do not decorate the bids and asks
}
orderHandler = new EndOfPhaseOrderHandler(); //let everybody place quotes before you start crossing them. With this priorities don't really matter
}
/**
* tells the market that the model is starting up, and it would be a good idea to start the data collector
*
* @param model the MacroII model running the show
*/
@Override
public void start(MacroII model) {
super.start(model);
orderHandler.start(model,asks,bids,this);
}
/**
* Order book with limit orders only!
*/
@Override
public ActionsAllowed getBuyerRole() {
return ActionsAllowed.QUOTE;
}
/**
* Order book with limit orders only!
*/
@Override
public ActionsAllowed getSellerRole() {
return ActionsAllowed.QUOTE;
}
/**
* Submit a sell quote on a specific good
*
* @param seller the agent making the sale
* @param price the price at/above which the agent is willing to sell
* @param good the good to sell
* @return the quote made; returns a null quote if the quote is immediately filled.
*/
@Override
public Quote submitSellQuote( EconomicAgent seller, int price, Good good, Department department) {
assert getSellers().contains(seller); //you should be registered if you are here
if(MacroII.SAFE_MODE) //double check the good isn't already on sale
Preconditions.checkState(seller.getModel().getCurrentPhase().equals(ActionOrder.TRADE));
Preconditions.checkArgument(price>=0);
if(MacroII.SAFE_MODE) //double check the good isn't already on sale
for(Quote x : asks){
assert x.getGood() != good; //make sure it wasn't put in already
}
Quote q = Quote.newSellerQuote(seller,price,good);
if(department != null)
q.setOriginator(department);
asks.add(q); //addSalesDepartmentListener it to the asks
//tell the log
//todo logtodo
orderHandler.reactToNewQuote(asks,bids,this);
if(asks.contains(q)) //if it's still in
return q; //return the quote to the seller
else //it crossed and was immediately removed!
return Quote.emptySellQuote(good); //if it was crossed, return a null quote
}
/**
* Submit a sell quote on a specific good
*
* @param seller the agent making the sale
* @param price the price at/above which the agent is willing to sell
* @param good the good to sell
* @return the quote made
*/
@Override
public Quote submitSellQuote( EconomicAgent seller, int price, Good good) {
return submitSellQuote(seller,price,good,null);
}
/**
* If the seller changes its mind and wants to remove its sell quote, call this
*
* @param q quote to cancel
*/
@Override
public void removeSellQuote(Quote q) {
boolean removedSuccessfully = asks.remove(q); //remove it from the asks!
if(!removedSuccessfully)
throw new IllegalArgumentException("Removed a quote we didn't have. Error");
//tell the logs
//todo logtodo
}
/**
* Submit a buy quote
*
* @param buyer the agent trying to buy
* @param price the price at/below which the agent is willing to buy
* @return quote made
*/
@Override
public Quote submitBuyQuote( EconomicAgent buyer, int price, Department department) {
assert getBuyers().contains(buyer) : buyer + " ---- " + getBuyers() + " ---- " + this.getGoodType(); //you should be registered if you are here
if(MacroII.SAFE_MODE) //double check the good isn't already on sale
Preconditions.checkState(buyer.getModel().getCurrentPhase().equals(ActionOrder.TRADE));
Quote q = Quote.newBuyerQuote(buyer, price, goodType);
if(department != null)
q.setOriginator(department);
bids.add(q); //addSalesDepartmentListener it to the asks
//notify the listeners (if the order book is visible)
Quote bestAsk = asks.peek();
if(isBestBuyPriceVisible())
for(BidListener listener : bidListeners)
listener.newBidEvent(buyer,price,bestAsk);
//tell the GUI
//todo logtodo
orderHandler.reactToNewQuote(asks, bids, this);
if(bids.contains(q)) //if it's still in
return q; //return the quote to the seller
else //it crossed and was immediately removed!
return Quote.emptyBidQuote(goodType); //if it was crossed, return a null quote
}
/**
* Submit a buy quote
*
* @param buyer the agent trying to buy
* @param price the price at/below which the agent is willing to buy
* @return quote made
*/
@Override
public Quote submitBuyQuote( EconomicAgent buyer, int price) {
return submitBuyQuote(buyer,price,null);
}
/**
* If the buyer changes its mind and wants to remove its purchase quote, call this
*
* @param q quote to cancel
*/
@Override
public void removeBuyQuote(Quote q) {
boolean removedSuccessfully = bids.remove(q); //remove it from the asks!
if(!removedSuccessfully)
throw new IllegalArgumentException("Removed a quote we didn't have. Error");
notifyListenersAndGUIQuoteHasBeenRemoved(q);
}
/**
* Cancel a list of buy quotes
*
* @param quotes quotes to cancel
*/
@Override
public void removeBuyQuotes( Collection<Quote> quotes) {
Preconditions.checkArgument(!quotes.isEmpty());
bids.removeAll(quotes);
for(Quote q : quotes)
notifyListenersAndGUIQuoteHasBeenRemoved(q);
}
@Override
public void removeSellQuotes(Collection<Quote> quotes) {
Preconditions.checkArgument(!quotes.isEmpty());
asks.removeAll(quotes);
for(Quote q : quotes)
notifyListenersAndGUIQuoteHasBeenRemoved(q);
}
private void notifyListenersAndGUIQuoteHasBeenRemoved(Quote q) {
//notify the listeners (if the order book is visible)
if(isBestBuyPriceVisible())
for(BidListener listener : bidListeners)
listener.removedBidEvent(q.getAgent(),q);
//tell the GUI
//todo logtodo
}
/**
* Remove all these quotes by the buyer
*
*
* @param buyer the buyer whose quotes we want to clear
* @return the set of quotes removed
*/
@Override
public Collection<Quote> removeAllBuyQuoteByBuyer(EconomicAgent buyer) {
//create the set of buy quotes to remove
Set<Quote> buyQuotesToRemove = new HashSet<>();
for(Quote q : bids)
{
if(q.getAgent().equals(buyer))
buyQuotesToRemove.add(q);
}
if(buyQuotesToRemove.isEmpty()) //nothing to remove!
return buyQuotesToRemove;
//non empty!
boolean b = bids.removeAll(buyQuotesToRemove);
assert b;
//now tell the listeners
for(Quote q : buyQuotesToRemove)
notifyListenersAndGUIQuoteHasBeenRemoved(q);
return buyQuotesToRemove;
}
/**
* Remove all these quotes by the seller
*
* @param seller the buyer whose quotes we want to clear
* @return the set of quotes removed
*/
@Override
public Collection<Quote> removeAllSellQuoteBySeller(EconomicAgent seller) {
//create the set of buy quotes to remove
Set<Quote> askQuotes = new HashSet<>();
for(Quote q : asks)
{
if(q.getAgent().equals(seller))
askQuotes.add(q);
}
if(askQuotes.isEmpty()) //nothing to remove!
return askQuotes;
//non empty!
boolean b = asks.removeAll(askQuotes);
assert b;
//now tell the listeners
for(Quote q : askQuotes)
notifyListenersAndGUIQuoteHasBeenRemoved(q);
return askQuotes;
}
/**
* Best bid and asks are visible.
*/
@Override
public boolean isBestSalePriceVisible() {
return true;
}
/**
* Asks the market to return the best (lowest) price for a good on sale at the market
*
* @return the best price or -1 if there are none
*/
@Override
public int getBestSellPrice() throws IllegalAccessException {
if(!isBestSalePriceVisible())
throw new IllegalAccessException();
if(asks.isEmpty()) //if the ask is empty returns -1
return -1;
else
return asks.peek().getPriceQuoted(); //returns best ask
}
/**
* Asks the market to return the owner of the best ask price in the market
*
* @return the best seller or NULL if there is none
* @throws IllegalAccessException thrown by markets that do not allow such information.
*/
@Override
public EconomicAgent getBestSeller() throws IllegalAccessException {
if(!isBestSalePriceVisible()) //don't show it if it's illegal
throw new IllegalAccessException();
if(asks.isEmpty()) //if the ask is empty returns -1
return null;
else
return asks.peek().getAgent(); //returns best ask
}
/**
* Best bid and asks are visible.
*/
@Override
public boolean isBestBuyPriceVisible() {
return true;
}
/**
* Asks the market to return the best (highest) offer for buying a good at the market
*
* @return the best price or -1 if there are none
* @throws IllegalAccessException thrown by markets that do not allow such information.
*/
@Override
public int getBestBuyPrice() throws IllegalAccessException{
if(bids.isEmpty()) //if the ask is empty returns -1
return -1;
else
return bids.peek().getPriceQuoted(); //returns best ask
}
/**
* Asks the market to return the owner of the best offer in the market
*
* @return the best buyer or NULL if there is none
* @throws IllegalAccessException thrown by markets that do not allow such information.
*/
@Override
public EconomicAgent getBestBuyer() throws IllegalAccessException{
if(!isBestBuyPriceVisible()) //don't show it if it's illegal
throw new IllegalAccessException();
if(bids.isEmpty()) //if the ask is empty returns -1
return null;
else
return bids.peek().getAgent(); //returns best ask
}
/**
* Can I get an iterator to cycle through all the quotes?
*
* @return true if it's possible
*/
@Override
public boolean areAllQuotesVisibile() {
return true;
}
/**
* Get an iterator to cycle through all the bids
*
* @return the iterator
*/
@Override
public Iterator<Quote> getIteratorForBids() throws IllegalAccessException{
return bids.iterator();
}
/**
* Get an iterator to cycle through all the bids
*
* @return the iterator
*/
@Override
public Iterator<Quote> getIteratorForAsks() throws IllegalAccessException{
return asks.iterator();
}
/**
* The order book adds the histogram viewer to the market inspector.
*/
protected TabbedInspector buildInspector()
{
assert MacroII.hasGUI();
TabbedInspector inspector = super.buildInspector();
histogramGenerator = new HistogramGenerator();
//add the 2 series
histogramGenerator.addSeries(null,50,"bids",null);
histogramGenerator.addSeries(null,50,"asks",null);
//decorate the asks and bids
this.bids = new HistogramDecoratedPriorityBook((PriorityQueue)bids,histogramGenerator,0,"Bids");
this.asks = new HistogramDecoratedPriorityBook((PriorityQueue)asks,histogramGenerator,1,"Asks");
Inspector orderBookViewer = new Inspector() {
@Override
public void updateInspector() {
histogramGenerator.update();
// this.repaint();
}
};
orderBookViewer.setLayout(new BorderLayout());
orderBookViewer.add(histogramGenerator.getChartPanel());
inspector.addInspector(orderBookViewer,"Order Book View");
return inspector;
}
public boolean containsQuotesFromThisBuyer(EconomicAgent buyer)
{
for(Quote q: bids)
if(q.getAgent().equals(buyer))
return true;
return false;
}
/**
* Returns how many asks are currently in the market
*/
public int numberOfAsks() {
return asks.size();
}
/**
* Returns how many bids are currently in the market
*/
public int numberOfBids() {
return bids.size();
}
@Override
public void turnOff() {
super.turnOff();
orderHandler.turnOff();
}
public OrderHandler getOrderHandler() {
return orderHandler;
}
/**
* this setter requires a link to the model to start the new handler
* @param orderHandler
* @param model
*/
public void setOrderHandler(OrderHandler orderHandler, MacroII model) {
assert this.orderHandler != null;
this.orderHandler.turnOff(); //turn off the old one!
//set the new order handler
this.orderHandler = orderHandler;
//start it!
if(model.hasStarted())
this.orderHandler.start(model,asks,bids,this);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.action;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
public class UpdateModelSnapshotAction extends ActionType<UpdateModelSnapshotAction.Response> {
public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction();
public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/update";
private UpdateModelSnapshotAction() {
super(NAME, Response::new);
}
public static class Request extends ActionRequest implements ToXContentObject {
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, ModelSnapshotField.SNAPSHOT_ID);
PARSER.declareString(Request::setDescription, ModelSnapshot.DESCRIPTION);
PARSER.declareBoolean(Request::setRetain, ModelSnapshot.RETAIN);
}
public static Request parseRequest(String jobId, String snapshotId, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (jobId != null) {
request.jobId = jobId;
}
if (snapshotId != null) {
request.snapshotId = snapshotId;
}
return request;
}
private String jobId;
private String snapshotId;
private String description;
private Boolean retain;
public Request() {
}
public Request(StreamInput in) throws IOException {
super(in);
jobId = in.readString();
snapshotId = in.readString();
description = in.readOptionalString();
retain = in.readOptionalBoolean();
}
public Request(String jobId, String snapshotId) {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshotField.SNAPSHOT_ID.getPreferredName());
}
public String getJobId() {
return jobId;
}
public String getSnapshotId() {
return snapshotId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Boolean getRetain() {
return retain;
}
public void setRetain(Boolean retain) {
this.retain = retain;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(jobId);
out.writeString(snapshotId);
out.writeOptionalString(description);
out.writeOptionalBoolean(retain);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), snapshotId);
if (description != null) {
builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description);
}
if (retain != null) {
builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(jobId, snapshotId, description, retain);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(jobId, other.jobId)
&& Objects.equals(snapshotId, other.snapshotId)
&& Objects.equals(description, other.description)
&& Objects.equals(retain, other.retain);
}
}
public static class Response extends ActionResponse implements StatusToXContentObject {
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
private static final ParseField MODEL = new ParseField("model");
private final ModelSnapshot model;
public Response(StreamInput in) throws IOException {
super(in);
model = new ModelSnapshot(in);
}
public Response(ModelSnapshot modelSnapshot) {
model = modelSnapshot;
}
public ModelSnapshot getModel() {
return model;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
model.writeTo(out);
}
@Override
public RestStatus status() {
return RestStatus.OK;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ACKNOWLEDGED.getPreferredName(), true);
builder.field(MODEL.getPreferredName());
builder = model.toXContent(builder, params);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(model);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Response other = (Response) obj;
return Objects.equals(model, other.model);
}
@Override
public final String toString() {
return Strings.toString(this);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.