gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.apache.velocity.runtime.parser.node; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.velocity.app.event.EventHandlerUtil; import org.apache.velocity.context.InternalContextAdapter; import org.apache.velocity.exception.MethodInvocationException; import org.apache.velocity.exception.TemplateInitException; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.runtime.RuntimeConstants.SpaceGobbling; import org.apache.velocity.runtime.parser.LogContext; import org.apache.velocity.runtime.parser.Parser; import org.apache.velocity.runtime.parser.Token; import org.apache.velocity.util.introspection.Info; import java.io.IOException; import java.io.Writer; /** * Node for the #set directive * * @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a> * @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a> * @version $Id$ */ public class ASTSetDirective extends SimpleNode { private String leftReference = ""; private Node right = null; private ASTReference left = null; private boolean isInitialized; private String prefix = ""; private String postfix = ""; /* * '#' and '$' prefix characters eaten by javacc MORE mode */ private String morePrefix = ""; /** * This is really immutable after the init, so keep one for this node */ protected Info uberInfo; /** * Indicates if we are running in strict reference mode. */ protected boolean strictRef = false; /** * @param id */ public ASTSetDirective(int id) { super(id); } /** * @param p * @param id */ public ASTSetDirective(Parser p, int id) { super(p, id); } /** * @see org.apache.velocity.runtime.parser.node.SimpleNode#jjtAccept(org.apache.velocity.runtime.parser.node.ParserVisitor, java.lang.Object) */ public Object jjtAccept(ParserVisitor visitor, Object data) { return visitor.visit(this, data); } /** * simple init. We can get the RHS and LHS as the the tree structure is static * @param context * @param data * @return Init result. * @throws TemplateInitException */ public synchronized Object init(InternalContextAdapter context, Object data) throws TemplateInitException { /** This method is synchronized to prevent double initialization or initialization while rendering **/ if (!isInitialized) { /* * init the tree correctly */ super.init( context, data ); /* * handle '$' and '#' chars prefix */ Token t = getFirstToken(); int pos = -1; while (t != null && (pos = t.image.lastIndexOf(rsvc.getParserConfiguration().getHashChar())) == -1) { t = t.next; } if (t != null && pos > 0) { morePrefix = t.image.substring(0, pos); } uberInfo = new Info(getTemplateName(), getLine(), getColumn()); right = getRightHandSide(); left = getLeftHandSide(); strictRef = rsvc.getBoolean(RuntimeConstants.RUNTIME_REFERENCES_STRICT, false); /* * grab this now. No need to redo each time */ leftReference = left.firstImage.substring(1); /* handle backward compatible space gobbling if asked so */ if (rsvc.getSpaceGobbling() == SpaceGobbling.BC) { Node previousNode = null; for (int brother = 0; brother < parent.jjtGetNumChildren(); ++brother) { Node node = parent.jjtGetChild(brother); if (node == this) break; previousNode = node; } if (previousNode == null) prefix = ""; else if (previousNode instanceof ASTText) { ASTText text = (ASTText)previousNode; if (text.getCtext().matches("[ \t]*")) { text.setCtext(""); } } else prefix = ""; } isInitialized = true; cleanupParserAndTokens(); } return data; } /** * set indentation prefix * @param prefix */ public void setPrefix(String prefix) { this.prefix = prefix; } /** * get indentation prefix * @return indentation prefix */ public String getPrefix() { return prefix; } /** * set indentation postfix * @param postfix */ public void setPostfix(String postfix) { this.postfix = postfix; } /** * get indentation postfix * @return indentation prefix */ public String getPostfix() { return postfix; } /** * puts the value of the RHS into the context under the key of the LHS * @param context * @param writer * @return True if rendering was sucessful. * @throws IOException * @throws MethodInvocationException */ public boolean render( InternalContextAdapter context, Writer writer) throws IOException, MethodInvocationException { try { rsvc.getLogContext().pushLogContext(this, uberInfo); SpaceGobbling spaceGobbling = rsvc.getSpaceGobbling(); /* Velocity 1.x space gobbling for #set is rather wacky: prefix is eaten *only* if previous token is not a text node. We handle this by appropriately emptying the prefix in BC mode. */ if (morePrefix.length() > 0 || spaceGobbling.compareTo(SpaceGobbling.LINES) < 0) { writer.write(prefix); } writer.write(morePrefix); /* * get the RHS node, and its value */ Object value = right.value(context); if ( value == null && !strictRef) { String rightReference = null; if (right instanceof ASTExpression) { rightReference = ((ASTExpression) right).lastImage; } EventHandlerUtil.invalidSetMethod(rsvc, context, leftReference, rightReference, uberInfo); } if (morePrefix.length() > 0 || spaceGobbling == SpaceGobbling.NONE) { writer.write(postfix); } return left.setValue(context, value); } finally { rsvc.getLogContext().popLogContext(); StringBuilder builder; } } /** * Returns the string "#set($<i>reference</i> = ...)". RHS is not rendered. This method is only * used for displaying the VTL stacktrace when a rendering error is encountered when runtime.log.track_location is true. * @return */ @Override public String literal() { if (literal != null) { return literal; } StringBuilder builder = new StringBuilder(); builder.append("#set(").append(left.literal()).append(" = ...)"); return literal = builder.toString(); } /** * returns the ASTReference that is the LHS of the set statement * * @return left hand side of #set statement */ private ASTReference getLeftHandSide() { return (ASTReference) jjtGetChild(0); } /** * returns the RHS Node of the set statement * * @return right hand side of #set statement */ private Node getRightHandSide() { return jjtGetChild(1); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotwireless.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * LoRaWAN object for create functions. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotwireless-2020-11-22/LoRaWANDevice" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class LoRaWANDevice implements Serializable, Cloneable, StructuredPojo { /** * <p> * The DevEUI value. * </p> */ private String devEui; /** * <p> * The ID of the device profile for the new wireless device. * </p> */ private String deviceProfileId; /** * <p> * The ID of the service profile. * </p> */ private String serviceProfileId; /** * <p> * OTAA device object for v1.1 for create APIs * </p> */ private OtaaV11 otaaV1_1; /** * <p> * OTAA device object for create APIs for v1.0.x * </p> */ private OtaaV10X otaaV1_0_x; /** * <p> * ABP device object for create APIs for v1.1 * </p> */ private AbpV11 abpV1_1; /** * <p> * LoRaWAN object for create APIs * </p> */ private AbpV10X abpV1_0_x; private FPorts fPorts; /** * <p> * The DevEUI value. * </p> * * @param devEui * The DevEUI value. */ public void setDevEui(String devEui) { this.devEui = devEui; } /** * <p> * The DevEUI value. * </p> * * @return The DevEUI value. */ public String getDevEui() { return this.devEui; } /** * <p> * The DevEUI value. * </p> * * @param devEui * The DevEUI value. * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withDevEui(String devEui) { setDevEui(devEui); return this; } /** * <p> * The ID of the device profile for the new wireless device. * </p> * * @param deviceProfileId * The ID of the device profile for the new wireless device. */ public void setDeviceProfileId(String deviceProfileId) { this.deviceProfileId = deviceProfileId; } /** * <p> * The ID of the device profile for the new wireless device. * </p> * * @return The ID of the device profile for the new wireless device. */ public String getDeviceProfileId() { return this.deviceProfileId; } /** * <p> * The ID of the device profile for the new wireless device. * </p> * * @param deviceProfileId * The ID of the device profile for the new wireless device. * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withDeviceProfileId(String deviceProfileId) { setDeviceProfileId(deviceProfileId); return this; } /** * <p> * The ID of the service profile. * </p> * * @param serviceProfileId * The ID of the service profile. */ public void setServiceProfileId(String serviceProfileId) { this.serviceProfileId = serviceProfileId; } /** * <p> * The ID of the service profile. * </p> * * @return The ID of the service profile. */ public String getServiceProfileId() { return this.serviceProfileId; } /** * <p> * The ID of the service profile. * </p> * * @param serviceProfileId * The ID of the service profile. * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withServiceProfileId(String serviceProfileId) { setServiceProfileId(serviceProfileId); return this; } /** * <p> * OTAA device object for v1.1 for create APIs * </p> * * @param otaaV1_1 * OTAA device object for v1.1 for create APIs */ public void setOtaaV1_1(OtaaV11 otaaV1_1) { this.otaaV1_1 = otaaV1_1; } /** * <p> * OTAA device object for v1.1 for create APIs * </p> * * @return OTAA device object for v1.1 for create APIs */ public OtaaV11 getOtaaV1_1() { return this.otaaV1_1; } /** * <p> * OTAA device object for v1.1 for create APIs * </p> * * @param otaaV1_1 * OTAA device object for v1.1 for create APIs * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withOtaaV1_1(OtaaV11 otaaV1_1) { setOtaaV1_1(otaaV1_1); return this; } /** * <p> * OTAA device object for create APIs for v1.0.x * </p> * * @param otaaV1_0_x * OTAA device object for create APIs for v1.0.x */ public void setOtaaV1_0_x(OtaaV10X otaaV1_0_x) { this.otaaV1_0_x = otaaV1_0_x; } /** * <p> * OTAA device object for create APIs for v1.0.x * </p> * * @return OTAA device object for create APIs for v1.0.x */ public OtaaV10X getOtaaV1_0_x() { return this.otaaV1_0_x; } /** * <p> * OTAA device object for create APIs for v1.0.x * </p> * * @param otaaV1_0_x * OTAA device object for create APIs for v1.0.x * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withOtaaV1_0_x(OtaaV10X otaaV1_0_x) { setOtaaV1_0_x(otaaV1_0_x); return this; } /** * <p> * ABP device object for create APIs for v1.1 * </p> * * @param abpV1_1 * ABP device object for create APIs for v1.1 */ public void setAbpV1_1(AbpV11 abpV1_1) { this.abpV1_1 = abpV1_1; } /** * <p> * ABP device object for create APIs for v1.1 * </p> * * @return ABP device object for create APIs for v1.1 */ public AbpV11 getAbpV1_1() { return this.abpV1_1; } /** * <p> * ABP device object for create APIs for v1.1 * </p> * * @param abpV1_1 * ABP device object for create APIs for v1.1 * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withAbpV1_1(AbpV11 abpV1_1) { setAbpV1_1(abpV1_1); return this; } /** * <p> * LoRaWAN object for create APIs * </p> * * @param abpV1_0_x * LoRaWAN object for create APIs */ public void setAbpV1_0_x(AbpV10X abpV1_0_x) { this.abpV1_0_x = abpV1_0_x; } /** * <p> * LoRaWAN object for create APIs * </p> * * @return LoRaWAN object for create APIs */ public AbpV10X getAbpV1_0_x() { return this.abpV1_0_x; } /** * <p> * LoRaWAN object for create APIs * </p> * * @param abpV1_0_x * LoRaWAN object for create APIs * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withAbpV1_0_x(AbpV10X abpV1_0_x) { setAbpV1_0_x(abpV1_0_x); return this; } /** * @param fPorts */ public void setFPorts(FPorts fPorts) { this.fPorts = fPorts; } /** * @return */ public FPorts getFPorts() { return this.fPorts; } /** * @param fPorts * @return Returns a reference to this object so that method calls can be chained together. */ public LoRaWANDevice withFPorts(FPorts fPorts) { setFPorts(fPorts); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDevEui() != null) sb.append("DevEui: ").append(getDevEui()).append(","); if (getDeviceProfileId() != null) sb.append("DeviceProfileId: ").append(getDeviceProfileId()).append(","); if (getServiceProfileId() != null) sb.append("ServiceProfileId: ").append(getServiceProfileId()).append(","); if (getOtaaV1_1() != null) sb.append("OtaaV1_1: ").append(getOtaaV1_1()).append(","); if (getOtaaV1_0_x() != null) sb.append("OtaaV1_0_x: ").append(getOtaaV1_0_x()).append(","); if (getAbpV1_1() != null) sb.append("AbpV1_1: ").append(getAbpV1_1()).append(","); if (getAbpV1_0_x() != null) sb.append("AbpV1_0_x: ").append(getAbpV1_0_x()).append(","); if (getFPorts() != null) sb.append("FPorts: ").append(getFPorts()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LoRaWANDevice == false) return false; LoRaWANDevice other = (LoRaWANDevice) obj; if (other.getDevEui() == null ^ this.getDevEui() == null) return false; if (other.getDevEui() != null && other.getDevEui().equals(this.getDevEui()) == false) return false; if (other.getDeviceProfileId() == null ^ this.getDeviceProfileId() == null) return false; if (other.getDeviceProfileId() != null && other.getDeviceProfileId().equals(this.getDeviceProfileId()) == false) return false; if (other.getServiceProfileId() == null ^ this.getServiceProfileId() == null) return false; if (other.getServiceProfileId() != null && other.getServiceProfileId().equals(this.getServiceProfileId()) == false) return false; if (other.getOtaaV1_1() == null ^ this.getOtaaV1_1() == null) return false; if (other.getOtaaV1_1() != null && other.getOtaaV1_1().equals(this.getOtaaV1_1()) == false) return false; if (other.getOtaaV1_0_x() == null ^ this.getOtaaV1_0_x() == null) return false; if (other.getOtaaV1_0_x() != null && other.getOtaaV1_0_x().equals(this.getOtaaV1_0_x()) == false) return false; if (other.getAbpV1_1() == null ^ this.getAbpV1_1() == null) return false; if (other.getAbpV1_1() != null && other.getAbpV1_1().equals(this.getAbpV1_1()) == false) return false; if (other.getAbpV1_0_x() == null ^ this.getAbpV1_0_x() == null) return false; if (other.getAbpV1_0_x() != null && other.getAbpV1_0_x().equals(this.getAbpV1_0_x()) == false) return false; if (other.getFPorts() == null ^ this.getFPorts() == null) return false; if (other.getFPorts() != null && other.getFPorts().equals(this.getFPorts()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDevEui() == null) ? 0 : getDevEui().hashCode()); hashCode = prime * hashCode + ((getDeviceProfileId() == null) ? 0 : getDeviceProfileId().hashCode()); hashCode = prime * hashCode + ((getServiceProfileId() == null) ? 0 : getServiceProfileId().hashCode()); hashCode = prime * hashCode + ((getOtaaV1_1() == null) ? 0 : getOtaaV1_1().hashCode()); hashCode = prime * hashCode + ((getOtaaV1_0_x() == null) ? 0 : getOtaaV1_0_x().hashCode()); hashCode = prime * hashCode + ((getAbpV1_1() == null) ? 0 : getAbpV1_1().hashCode()); hashCode = prime * hashCode + ((getAbpV1_0_x() == null) ? 0 : getAbpV1_0_x().hashCode()); hashCode = prime * hashCode + ((getFPorts() == null) ? 0 : getFPorts().hashCode()); return hashCode; } @Override public LoRaWANDevice clone() { try { return (LoRaWANDevice) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.iotwireless.model.transform.LoRaWANDeviceMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.system.eventhub.producer; import com.microsoft.azure.eventhubs.EventHubClient; import com.microsoft.azure.eventhubs.EventHubPartitionRuntimeInformation; import com.microsoft.azure.eventhubs.EventHubRuntimeInformation; import com.microsoft.azure.eventhubs.PartitionReceiver; import com.microsoft.azure.eventhubs.PartitionSender; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.stream.Collectors; import org.apache.samza.config.MapConfig; import org.apache.samza.system.OutgoingMessageEnvelope; import org.apache.samza.system.SystemStream; import org.apache.samza.system.eventhub.EventHubConfig; import org.apache.samza.system.eventhub.Interceptor; import org.apache.samza.system.eventhub.MockEventHubClientManagerFactory; import org.apache.samza.system.eventhub.TestMetricsRegistry; import org.apache.samza.system.eventhub.admin.PassThroughInterceptor; import org.apache.samza.system.eventhub.producer.EventHubSystemProducer.PartitioningMethod; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import static org.apache.samza.system.eventhub.MockEventHubConfigFactory.*; @RunWith(PowerMockRunner.class) @PrepareForTest({EventHubRuntimeInformation.class, EventHubPartitionRuntimeInformation.class, EventHubClient.class, PartitionReceiver.class, PartitionSender.class}) public class TestEventHubSystemProducer { private static final String SOURCE = "TestEventHubSystemProducer"; private static List<String> generateMessages(int numMsg) { Random rand = new Random(System.currentTimeMillis()); List<String> messages = new ArrayList<>(); for (int i = 0; i < numMsg; i++) { messages.add("message payload: " + rand.nextInt()); } return messages; } @Test public void testSendingToSpecificPartitions() throws Exception { String systemName = "eventhubs"; String streamName = "testStream"; int numEvents = 10; int partitionId0 = 0; int partitionId1 = 1; TestMetricsRegistry testMetrics = new TestMetricsRegistry(); Map<String, Interceptor> interceptor = new HashMap<>(); interceptor.put(streamName, new PassThroughInterceptor()); List<String> outgoingMessagesP0 = generateMessages(numEvents); List<String> outgoingMessagesP1 = generateMessages(numEvents); // Set configs Map<String, String> configMap = new HashMap<>(); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1); configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.PARTITION_KEY_AS_PARTITION.toString()); MapConfig config = new MapConfig(configMap); MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory(); EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptor, testMetrics); SystemStream systemStream = new SystemStream(systemName, streamName); producer.register(SOURCE); producer.start(); outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes()))); outgoingMessagesP1.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId1, null, message.getBytes()))); // Retrieve sent data List<String> receivedData0 = factory.getSentData(systemName, streamName, partitionId0) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); List<String> receivedData1 = factory.getSentData(systemName, streamName, partitionId1) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); Assert.assertTrue(outgoingMessagesP0.equals(receivedData0)); Assert.assertTrue(outgoingMessagesP1.equals(receivedData1)); } @Test public void testSkipLargeMessageCheck() throws Exception { String systemName = "eventhubs"; String streamName = "testLMStream"; int numEvents = 10; int partitionId0 = 0; TestMetricsRegistry testMetrics = new TestMetricsRegistry(); Map<String, Interceptor> interceptor = new HashMap<>(); interceptor.put(streamName, new PassThroughInterceptor()); List<String> outgoingMessagesP0 = generateMessages(numEvents / 2); outgoingMessagesP0.add("1234567890123456789012345678901234567890"); outgoingMessagesP0.addAll(generateMessages(numEvents / 2)); // Set configs Map<String, String> configMap = new HashMap<>(); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY); configMap.put(String.format(EventHubConfig.CONFIG_SKIP_MESSAGES_LARGER_THAN, systemName), "-1"); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1); configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.PARTITION_KEY_AS_PARTITION.toString()); MapConfig config = new MapConfig(configMap); MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory(); EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptor, testMetrics); SystemStream systemStream = new SystemStream(systemName, streamName); producer.register(SOURCE); producer.start(); outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes()))); // Retrieve sent data List<String> receivedData0 = factory.getSentData(systemName, streamName, partitionId0) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); Assert.assertEquals(outgoingMessagesP0.size(), receivedData0.size()); } @Test public void testSendingLargeMessage() throws Exception { String systemName = "eventhubs"; String streamName = "testLMStream"; int numEvents = 10; int partitionId0 = 0; TestMetricsRegistry testMetrics = new TestMetricsRegistry(); Map<String, Interceptor> interceptor = new HashMap<>(); interceptor.put(streamName, new PassThroughInterceptor()); List<String> outgoingMessagesP0 = generateMessages(numEvents / 2); outgoingMessagesP0.add("1234567890123456789012345678901234567890"); outgoingMessagesP0.addAll(generateMessages(numEvents / 2)); // Set configs Map<String, String> configMap = new HashMap<>(); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY); configMap.put(String.format(EventHubConfig.CONFIG_SKIP_MESSAGES_LARGER_THAN, systemName), "30"); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1); configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.PARTITION_KEY_AS_PARTITION.toString()); MapConfig config = new MapConfig(configMap); MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory(); EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptor, testMetrics); SystemStream systemStream = new SystemStream(systemName, streamName); producer.register(SOURCE); producer.start(); outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes()))); // Retrieve sent data List<String> receivedData0 = factory.getSentData(systemName, streamName, partitionId0) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); Assert.assertEquals(outgoingMessagesP0.size(), receivedData0.size() + 1); } @Test public void testSendingToSpecificPartitionsWithInterceptor() throws Exception { String systemName = "eventhubs"; String streamName = "testStream"; int numEvents = 10; int partitionId0 = 0; int partitionId1 = 1; Interceptor interceptor = new SwapFirstLastByteInterceptor(); TestMetricsRegistry testMetrics = new TestMetricsRegistry(); Map<String, Interceptor> interceptors = new HashMap<>(); interceptors.put(streamName, interceptor); List<String> outgoingMessagesP0 = generateMessages(numEvents); List<String> outgoingMessagesP1 = generateMessages(numEvents); // Set configs Map<String, String> configMap = new HashMap<>(); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1); configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.PARTITION_KEY_AS_PARTITION.toString()); MapConfig config = new MapConfig(configMap); MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory(); EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptors, testMetrics); SystemStream systemStream = new SystemStream(systemName, streamName); producer.register(SOURCE); producer.start(); outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes()))); outgoingMessagesP1.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId1, null, message.getBytes()))); // Retrieve sent data List<String> receivedData0 = factory.getSentData(systemName, streamName, partitionId0) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); List<String> receivedData1 = factory.getSentData(systemName, streamName, partitionId1) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); List<String> expectedP0 = outgoingMessagesP0.stream() .map(message -> new String(interceptor.intercept(message.getBytes()))) .collect(Collectors.toList()); List<String> expectedP1 = outgoingMessagesP1.stream() .map(message -> new String(interceptor.intercept(message.getBytes()))) .collect(Collectors.toList()); Assert.assertTrue(expectedP0.equals(receivedData0)); Assert.assertTrue(expectedP1.equals(receivedData1)); } @Test public void testSendingToEventHubHashing() throws Exception { String systemName = "eventhubs"; String streamName = "testStream"; int numEvents = 10; String partitionId0 = "124"; String partitionId1 = "235"; TestMetricsRegistry testMetrics = new TestMetricsRegistry(); Map<String, Interceptor> interceptor = new HashMap<>(); interceptor.put(streamName, new PassThroughInterceptor()); List<String> outgoingMessagesP0 = generateMessages(numEvents); List<String> outgoingMessagesP1 = generateMessages(numEvents); // Set configs Map<String, String> configMap = new HashMap<>(); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY); configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1); // mod 2 on the partitionid to simulate consistent hashing configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.EVENT_HUB_HASHING.toString()); MapConfig config = new MapConfig(configMap); MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory(); EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptor, testMetrics); SystemStream systemStream = new SystemStream(systemName, streamName); producer.register(SOURCE); producer.start(); outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes()))); outgoingMessagesP1.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId1, null, message.getBytes()))); // Retrieve sent data List<String> receivedData0 = factory.getSentData(systemName, streamName, 0) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); List<String> receivedData1 = factory.getSentData(systemName, streamName, 1) .stream() .map(eventData -> new String(eventData.getBytes())) .collect(Collectors.toList()); Assert.assertTrue(outgoingMessagesP0.equals(receivedData0)); Assert.assertTrue(outgoingMessagesP1.equals(receivedData1)); } }
import static org.junit.Assert.*; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class Hw2pr2Test { /* * Testing notString */ @Test public void testNotString0() { assertTrue("notString failed! expected 'not candy', got "+Hw2pr2.notString("candy"),Hw2pr2.notString("candy").equals("not candy")); } @Test public void testNotString1() { assertTrue("notString failed! expected 'not x', got "+Hw2pr2.notString("x"),Hw2pr2.notString("x").equals("not x")); } @Test public void testNotString2() { assertTrue("notString failed! expected 'not bad', got "+Hw2pr2.notString("not bad"),Hw2pr2.notString("not bad").equals("not bad")); } @Test public void testNotString3() { assertTrue("notString failed! expected 'not bad', got "+Hw2pr2.notString("bad"),Hw2pr2.notString("bad").equals("not bad")); } @Test public void testNotString4() { assertTrue("notString failed! expected 'not', got "+Hw2pr2.notString("not"),Hw2pr2.notString("not").equals("not")); } @Test public void testNotString5() { assertTrue("notString failed! expected 'not is not', got "+Hw2pr2.notString("is not"),Hw2pr2.notString("is not").equals("not is not")); } @Test public void testNotString6() { assertTrue("notString failed! expected 'not no', got "+Hw2pr2.notString("no"),Hw2pr2.notString("no").equals("not no")); } /* * Testing missingChar */ @Test public void testMissingChar0() { assertTrue(Hw2pr2.missingChar("kitten", 1).equals("ktten")); } @Test public void testMissingChar1() { assertTrue(Hw2pr2.missingChar("kitten", 0).equals("itten")); } @Test public void testMissingChar2() { assertTrue(Hw2pr2.missingChar("kitten", 4).equals("kittn")); } @Test public void testMissingChar3() { assertTrue(Hw2pr2.missingChar("Hi", 0).equals("i")); } @Test public void testMissingChar4() { assertTrue(Hw2pr2.missingChar("Hi", 1).equals("H")); } @Test public void testMissingChar5() { assertTrue(Hw2pr2.missingChar("code", 0).equals("ode")); } @Test public void testMissingChar6() { assertTrue(Hw2pr2.missingChar("code", 1).equals("cde")); } @Test public void testMissingChar7() { assertTrue(Hw2pr2.missingChar("code", 2).equals("coe")); } @Test public void testMissingChar8() { assertTrue(Hw2pr2.missingChar("code", 3).equals("cod")); } @Test public void testMissingChar9() { assertTrue(Hw2pr2.missingChar("chocolate", 8).equals("chocolat")); } /* * Testing FrontBack */ @Test public void testfrontBack0() { assertTrue(Hw2pr2.frontBack("code").equals("eodc")); } @Test public void testfrontBack1() { assertTrue(Hw2pr2.frontBack("a").equals("a")); } @Test public void testfrontBack2() { assertTrue(Hw2pr2.frontBack("ab").equals("ba")); } @Test public void testfrontBack3() { assertTrue(Hw2pr2.frontBack("abc").equals("cba")); } @Test public void testfrontBack4() { assertTrue(Hw2pr2.frontBack("").equals("")); } @Test public void testfrontBack5() { assertTrue(Hw2pr2.frontBack("Chocolate").equals("ehocolatC")); } @Test public void testfrontBack6() { assertTrue(Hw2pr2.frontBack("aavJ").equals("Java")); } @Test public void testfrontBack7() { assertTrue(Hw2pr2.frontBack("hello").equals("oellh")); } /* * Testing front3 */ @Test public void testfront3_0() { assertTrue(Hw2pr2.front3("Java").equals("JavJavJav")); } @Test public void testfront3_1() { assertTrue(Hw2pr2.front3("Chocolate").equals("ChoChoCho")); } @Test public void testfront3_2() { assertTrue(Hw2pr2.front3("abc").equals("abcabcabc")); } @Test public void testfront3_3() { assertTrue(Hw2pr2.front3("abcXYZ").equals("abcabcabc")); } @Test public void testfront3_4() { assertTrue(Hw2pr2.front3("ab").equals("ababab")); } @Test public void testfront3_5() { assertTrue(Hw2pr2.front3("a").equals("aaa")); } @Test public void testfront3_6() { assertTrue(Hw2pr2.front3("").equals("")); } /* * Testing backAround */ @Test public void testBackAround0() { assertTrue(Hw2pr2.backAround("cat").equals("tcatt")); } @Test public void testBackAround1() { assertTrue(Hw2pr2.backAround("Hello").equals("oHelloo")); } @Test public void testBackAround2() { assertTrue(Hw2pr2.backAround("a").equals("aaa")); } @Test public void testBackAround3() { assertTrue(Hw2pr2.backAround("abc").equals("cabcc")); } @Test public void testBackAround4() { assertTrue(Hw2pr2.backAround("read").equals("dreadd")); } @Test public void testBackAround5() { assertTrue(Hw2pr2.backAround("boo").equals("obooo")); } /* * Testing front22 */ @Test public void testfront22_0() { assertTrue(Hw2pr2.front22("kitten").equals("kikittenki")); } @Test public void testfront22_1() { assertTrue(Hw2pr2.front22("Ha").equals("HaHaHa")); } @Test public void testfront22_2() { assertTrue(Hw2pr2.front22("abc").equals("ababcab")); } @Test public void testfront22_3() { assertTrue(Hw2pr2.front22("ab").equals("ababab")); } @Test public void testfront22_4() { assertTrue(Hw2pr2.front22("a").equals("aaa")); } @Test public void testfront22_5() { assertTrue(Hw2pr2.front22("").equals("")); } @Test public void testfront22_6() { assertTrue(Hw2pr2.front22("Logic").equals("LoLogicLo")); } /* * Testing startHi */ @Test public void testStartHi0() { assertTrue(Hw2pr2.startHi("hi there")); } @Test public void testStartHi1() { assertTrue(Hw2pr2.startHi("hi")); } @Test public void testStartHi2() { assertTrue(Hw2pr2.startHi("hi ho")); } @Test public void testStartHi3() { assertFalse(Hw2pr2.startHi("hello hi")); } @Test public void testStartHi4() { assertFalse(Hw2pr2.startHi("he")); } @Test public void testStartHi5() { assertFalse(Hw2pr2.startHi("h")); } @Test public void testStartHi6() { assertFalse(Hw2pr2.startHi("")); } @Test public void testStartHi7() { assertFalse(Hw2pr2.startHi("ho hi")); } /* * Testing startOz */ @Test public void testStartOz0() { assertTrue(Hw2pr2.startOz("ozymandias").equals("oz")); } @Test public void testStartOz1() { assertTrue(Hw2pr2.startOz("bzoo").equals("z")); } @Test public void testStartOz2() { assertTrue(Hw2pr2.startOz("oxx").equals("o")); } @Test public void testStartOz3() { assertTrue(Hw2pr2.startOz("oz").equals("oz")); } @Test public void testStartOz4() { assertTrue(Hw2pr2.startOz("ounce").equals("o")); } @Test public void testStartOz5() { assertTrue(Hw2pr2.startOz("o").equals("o")); } @Test public void testStartOz6() { assertTrue(Hw2pr2.startOz("abc").equals("")); } @Test public void testStartOz7() { assertTrue(Hw2pr2.startOz("").equals("")); } @Test public void testStartOz8() { assertTrue(Hw2pr2.startOz("zoo").equals("")); } @Test public void testStartOz9() { assertTrue(Hw2pr2.startOz("aztec").equals("z")); } @Test public void testStartOz10() { assertTrue(Hw2pr2.startOz("zzzz").equals("z")); } @Test public void testStartOz11() { assertTrue(Hw2pr2.startOz("oznic").equals("oz")); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotanalytics; import javax.annotation.Generated; import com.amazonaws.services.iotanalytics.model.*; import com.amazonaws.*; /** * Abstract implementation of {@code AWSIoTAnalytics}. Convenient method forms pass through to the corresponding * overload that takes a request object, which throws an {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAWSIoTAnalytics implements AWSIoTAnalytics { protected AbstractAWSIoTAnalytics() { } @Override public BatchPutMessageResult batchPutMessage(BatchPutMessageRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CancelPipelineReprocessingResult cancelPipelineReprocessing(CancelPipelineReprocessingRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateChannelResult createChannel(CreateChannelRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDatasetResult createDataset(CreateDatasetRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDatasetContentResult createDatasetContent(CreateDatasetContentRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreateDatastoreResult createDatastore(CreateDatastoreRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public CreatePipelineResult createPipeline(CreatePipelineRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteChannelResult deleteChannel(DeleteChannelRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDatasetResult deleteDataset(DeleteDatasetRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDatasetContentResult deleteDatasetContent(DeleteDatasetContentRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteDatastoreResult deleteDatastore(DeleteDatastoreRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeletePipelineResult deletePipeline(DeletePipelineRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DescribeChannelResult describeChannel(DescribeChannelRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DescribeDatasetResult describeDataset(DescribeDatasetRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DescribeDatastoreResult describeDatastore(DescribeDatastoreRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DescribeLoggingOptionsResult describeLoggingOptions(DescribeLoggingOptionsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DescribePipelineResult describePipeline(DescribePipelineRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetDatasetContentResult getDatasetContent(GetDatasetContentRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListChannelsResult listChannels(ListChannelsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListDatasetContentsResult listDatasetContents(ListDatasetContentsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListDatasetsResult listDatasets(ListDatasetsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListDatastoresResult listDatastores(ListDatastoresRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListPipelinesResult listPipelines(ListPipelinesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public PutLoggingOptionsResult putLoggingOptions(PutLoggingOptionsRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RunPipelineActivityResult runPipelineActivity(RunPipelineActivityRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public SampleChannelDataResult sampleChannelData(SampleChannelDataRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public StartPipelineReprocessingResult startPipelineReprocessing(StartPipelineReprocessingRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public TagResourceResult tagResource(TagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UntagResourceResult untagResource(UntagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateChannelResult updateChannel(UpdateChannelRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateDatasetResult updateDataset(UpdateDatasetRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdateDatastoreResult updateDatastore(UpdateDatastoreRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UpdatePipelineResult updatePipeline(UpdatePipelineRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public void shutdown() { throw new java.lang.UnsupportedOperationException(); } @Override public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } }
package com.orientechnologies.orient.core.index.hashindex.local.cache; import com.orientechnologies.common.directmemory.ODirectMemoryPointer; import com.orientechnologies.common.serialization.types.OIntegerSerializer; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageSegmentConfiguration; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.exception.OAllCacheEntriesAreUsedException; import com.orientechnologies.orient.core.storage.fs.OFileClassic; import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage; import com.orientechnologies.orient.core.storage.impl.local.paginated.base.ODurablePage; import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.*; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Random; import java.util.Set; import java.util.zip.CRC32; @Test public class ReadWriteDiskCacheTest { private int systemOffset = OIntegerSerializer.INT_SIZE + 3 * OLongSerializer.LONG_SIZE; private OReadWriteDiskCache buffer; private OLocalPaginatedStorage storageLocal; private String fileName; private byte seed; private ODiskWriteAheadLog writeAheadLog; @BeforeClass public void beforeClass() throws IOException { OGlobalConfiguration.FILE_LOCK.setValue(Boolean.FALSE); String buildDirectory = System.getProperty("buildDirectory"); if (buildDirectory == null) buildDirectory = "."; storageLocal = (OLocalPaginatedStorage) Orient.instance().loadStorage("plocal:" + buildDirectory + "/ReadWriteDiskCacheTest"); storageLocal.create(null); storageLocal.close(true, false); fileName = "readWriteDiskCacheTest.tst"; OWALRecordsFactory.INSTANCE.registerNewRecord((byte) 128, WriteAheadLogTest.TestRecord.class); } @BeforeMethod public void beforeMethod() throws IOException { closeBufferAndDeleteFile(); initBuffer(); Random random = new Random(); seed = (byte) (random.nextInt() & 0xFF); } private void closeBufferAndDeleteFile() throws IOException { if (buffer != null) { buffer.close(); buffer = null; } if (writeAheadLog != null) { writeAheadLog.delete(); writeAheadLog = null; } File testFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + "readWriteDiskCacheTest.tst"); if (testFile.exists()) { Assert.assertTrue(testFile.delete()); } File idMapFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + "name_id_map.cm"); if (idMapFile.exists()) { Assert.assertTrue(idMapFile.delete()); } } @AfterClass public void afterClass() throws IOException { if (buffer != null) { buffer.delete(); buffer = null; } if (writeAheadLog != null) { writeAheadLog.delete(); writeAheadLog = null; } storageLocal.delete(); File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst"); if (file.exists()) { Assert.assertTrue(file.delete()); file.getParentFile().delete(); } } private void initBuffer() throws IOException { buffer = new OReadWriteDiskCache(4 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 15000 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 8 + systemOffset, 10000, -1, storageLocal, writeAheadLog, true, false); } public void testAddFourItems() throws IOException { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); for (int i = 0; i < 4; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 4); buffer.flushBuffer(); for (int i = 0; i < 4; i++) { assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(0, 0)); } } public void testFrequentlyReadItemsAreMovedInAm() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[10]; for (int i = 0; i < 10; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i)); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } buffer.clear(); for (int i = 0; i < 10; i++) assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i)); for (int i = 0; i < 8; i++) { entries[i] = buffer.load(fileId, i, false); buffer.release(entries[i]); } for (int i = 2; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 2); Assert.assertEquals(a1in.size(), 2); Assert.assertEquals(a1out.size(), 2); for (int i = 2; i < 4; i++) { OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber( 1, i)); Assert.assertEquals(am.get(fileId, i), lruEntry); } for (int i = 4; i < 6; i++) { OCacheEntry lruEntry = generateRemovedEntry(fileId, i); Assert.assertEquals(a1out.get(fileId, i), lruEntry); } for (int i = 6; i < 8; i++) { OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber( 1, i)); Assert.assertEquals(a1in.get(fileId, i), lruEntry); } } public void testCacheShouldCreateFileIfItIsNotExisted() throws Exception { buffer.openFile(fileName); File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst"); Assert.assertTrue(file.exists()); Assert.assertTrue(file.isFile()); } public void testFrequentlyAddItemsAreMovedInAm() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[10]; for (int i = 0; i < 10; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i)); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(a1in.size(), 4); Assert.assertEquals(a1out.size(), 2); Assert.assertEquals(am.size(), 0); for (int i = 6; i < 10; i++) { OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber( 0, 0)); Assert.assertEquals(a1in.get(fileId, i), lruEntry); } for (int i = 4; i < 6; i++) { OCacheEntry lruEntry = generateRemovedEntry(fileId, i); Assert.assertEquals(a1out.get(fileId, i), lruEntry); } for (int i = 4; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); buffer.release(entries[i]); } Assert.assertEquals(am.size(), 2); Assert.assertEquals(a1in.size(), 2); Assert.assertEquals(a1out.size(), 2); for (int i = 4; i < 6; i++) { OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber( 1, i)); Assert.assertEquals(am.get(fileId, i), lruEntry); } for (int i = 6; i < 8; i++) { OCacheEntry lruEntry = generateRemovedEntry(fileId, i); Assert.assertEquals(a1out.get(fileId, i), lruEntry); } for (int i = 8; i < 10; i++) { OCacheEntry lruEntry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber( 0, 0)); Assert.assertEquals(a1in.get(fileId, i), lruEntry); } buffer.flushBuffer(); for (int i = 0; i < 10; i++) assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i)); } public void testReadFourItems() throws IOException { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); setLsn(entries[i].getCachePointer().getDataPointer(), new OLogSequenceNumber(1, i)); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } buffer.clear(); for (int i = 0; i < 4; i++) { assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(1, i)); } for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); for (int i = 0; i < 4; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(1, i)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 4); } public void testLoadAndLockForReadShouldHitCache() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry cacheEntry = buffer.load(fileId, 0, false); buffer.release(cacheEntry); LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); OCacheEntry entry = generateEntry(fileId, 0, cacheEntry.getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.size(), 1); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } public void testCloseFileShouldFlushData() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); for (int i = 0; i < 4; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 4); buffer.closeFile(fileId); for (int i = 0; i < 4; i++) { assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, new OLogSequenceNumber(0, 0)); } } public void testCloseFileShouldRemoveFilePagesFromBuffer() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, (byte) i }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); for (int i = 0; i < 4; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 4); buffer.closeFile(fileId); Assert.assertEquals(buffer.getA1out().size(), 0); Assert.assertEquals(buffer.getA1in().size(), 0); Assert.assertEquals(buffer.getAm().size(), 0); } public void testDeleteFileShouldDeleteFileFromHardDrive() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; byte[][] content = new byte[4][]; for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); content[i] = entries[i].getCachePointer().getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } buffer.deleteFile(fileId); buffer.flushBuffer(); for (int i = 0; i < 4; i++) { File file = new File(storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst"); Assert.assertFalse(file.exists()); } } public void testFlushData() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[4]; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; ++j) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, (byte) j, (byte) i }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); Assert.assertEquals(a1out.size(), 0); for (int i = 0; i < 4; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 4); buffer.flushFile(fileId); for (int i = 0; i < 4; i++) { assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 3, (byte) i }, new OLogSequenceNumber(0, 0)); } } public void testIfNotEnoughSpaceOldPagesShouldBeMovedToA1Out() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[6]; for (int i = 0; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } LRUList am = buffer.getAm(); LRUList a1in = buffer.getA1in(); LRUList a1out = buffer.getA1out(); Assert.assertEquals(am.size(), 0); for (int i = 0; i < 2; i++) { OCacheEntry entry = generateRemovedEntry(fileId, i); Assert.assertEquals(a1out.get(entry.fileId, entry.pageIndex), entry); } for (int i = 2; i < 6; i++) { OCacheEntry entry = generateEntry(fileId, i, entries[i].getCachePointer().getDataPointer(), false, new OLogSequenceNumber(0, 0)); Assert.assertEquals(a1in.get(entry.fileId, entry.pageIndex), entry); } Assert.assertEquals(buffer.getFilledUpTo(fileId), 6); buffer.flushBuffer(); for (int i = 0; i < 6; i++) { assertFile(i, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, new OLogSequenceNumber(0, 0)); } } public void testIfAllPagesAreUsedInA1InCacheSizeShouldBeIncreased() throws Exception { boolean oldIncreaseOnDemand = OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.getValueAsBoolean(); OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(true); long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[5]; for (int i = 0; i < 5; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); if (i - 4 >= 0) { buffer.load(fileId, i - 4, false); entries[i - 4].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) (i - 4), 1, 2, seed, 4, 5, 6, 7 }, 0, 8); } } for (int i = 0; i < 5; i++) { entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); if (i - 4 >= 0) { buffer.release(entries[i - 4]); } } int maxSize = buffer.getMaxSize(); Assert.assertEquals(maxSize, 5); OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(oldIncreaseOnDemand); } public void testIfAllPagesAreUsedInAmCacheSizeShouldBeIncreased() throws Exception { boolean oldIncreaseOnDemand = OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.getValueAsBoolean(); OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(true); long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[20]; for (int i = 0; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } for (int i = 0; i < 4; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); } for (int i = 0; i < 4; i++) { entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } int maxSize = buffer.getMaxSize(); Assert.assertEquals(maxSize, 5); OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(oldIncreaseOnDemand); } @Test(expectedExceptions = OAllCacheEntriesAreUsedException.class) public void testIfAllPagesAreUsedExceptionShouldBeThrown() throws Exception { boolean oldIncreaseOnDemand = OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.getValueAsBoolean(); OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(false); long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[5]; try { for (int i = 0; i < 5; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); if (i - 4 >= 0) { buffer.load(fileId, i - 4, false); entries[i - 4].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) (i - 4), 1, 2, seed, 4, 5, 6, 7 }, 0, 8); } } } finally { for (int i = 0; i < 4; i++) { entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } OGlobalConfiguration.SERVER_CACHE_INCREASE_ON_DEMAND.setValue(oldIncreaseOnDemand); } } public void testDataVerificationOK() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[6]; for (int i = 0; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } Assert.assertTrue(buffer.checkStoredPages(null).length == 0); } public void testMagicNumberIsBroken() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[6]; for (int i = 0; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } buffer.flushBuffer(); byte[] brokenMagicNumber = new byte[OIntegerSerializer.INT_SIZE]; OIntegerSerializer.INSTANCE.serializeNative(23, brokenMagicNumber, 0); updateFilePage(2, 0, brokenMagicNumber); updateFilePage(4, 0, brokenMagicNumber); OPageDataVerificationError[] pageErrors = buffer.checkStoredPages(null); Assert.assertEquals(2, pageErrors.length); Assert.assertTrue(pageErrors[0].incorrectMagicNumber); Assert.assertFalse(pageErrors[0].incorrectCheckSum); Assert.assertEquals(2, pageErrors[0].pageIndex); Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[0].fileName); Assert.assertTrue(pageErrors[1].incorrectMagicNumber); Assert.assertFalse(pageErrors[1].incorrectCheckSum); Assert.assertEquals(4, pageErrors[1].pageIndex); Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[1].fileName); } public void testCheckSumIsBroken() throws Exception { long fileId = buffer.openFile(fileName); OCacheEntry[] entries = new OCacheEntry[6]; for (int i = 0; i < 6; i++) { entries[i] = buffer.load(fileId, i, false); entries[i].getCachePointer().acquireExclusiveLock(); entries[i].markDirty(); entries[i].getCachePointer().getDataPointer() .set(systemOffset + OWOWCache.PAGE_PADDING, new byte[] { (byte) i, 1, 2, seed, 4, 5, 6, 7 }, 0, 8); entries[i].getCachePointer().releaseExclusiveLock(); buffer.release(entries[i]); } buffer.flushBuffer(); byte[] brokenByte = new byte[1]; brokenByte[0] = 13; updateFilePage(2, systemOffset + 2, brokenByte); updateFilePage(4, systemOffset + 4, brokenByte); OPageDataVerificationError[] pageErrors = buffer.checkStoredPages(null); Assert.assertEquals(2, pageErrors.length); Assert.assertFalse(pageErrors[0].incorrectMagicNumber); Assert.assertTrue(pageErrors[0].incorrectCheckSum); Assert.assertEquals(2, pageErrors[0].pageIndex); Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[0].fileName); Assert.assertFalse(pageErrors[1].incorrectMagicNumber); Assert.assertTrue(pageErrors[1].incorrectCheckSum); Assert.assertEquals(4, pageErrors[1].pageIndex); Assert.assertEquals("readWriteDiskCacheTest.tst", pageErrors[1].fileName); } public void testFlushTillLSN() throws Exception { closeBufferAndDeleteFile(); File file = new File(storageLocal.getConfiguration().getDirectory()); if (!file.exists()) file.mkdir(); writeAheadLog = new ODiskWriteAheadLog(1024, -1, 10 * 1024, storageLocal); final OStorageSegmentConfiguration segmentConfiguration = new OStorageSegmentConfiguration(storageLocal.getConfiguration(), "readWriteDiskCacheTest.tst", 0); segmentConfiguration.fileType = OFileClassic.NAME; buffer = new OReadWriteDiskCache(4 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 2 * (8 + systemOffset + 2 * OWOWCache.PAGE_PADDING), 8 + systemOffset, 10000, -1, storageLocal, writeAheadLog, true, false); long fileId = buffer.openFile(fileName); OLogSequenceNumber lsnToFlush = null; for (int i = 0; i < 8; i++) { OCacheEntry cacheEntry = buffer.load(fileId, i, false); OCachePointer dataPointer = cacheEntry.getCachePointer(); dataPointer.acquireExclusiveLock(); OLogSequenceNumber pageLSN = writeAheadLog.log(new WriteAheadLogTest.TestRecord(30, false)); setLsn(dataPointer.getDataPointer(), pageLSN); if (i == 5) lsnToFlush = pageLSN; cacheEntry.markDirty(); dataPointer.releaseExclusiveLock(); buffer.release(cacheEntry); } Assert.assertEquals(writeAheadLog.getFlushedLSN(), lsnToFlush); } private void updateFilePage(long pageIndex, long offset, byte[] value) throws IOException { String path = storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst"; OFileClassic fileClassic = new OFileClassic(); fileClassic.init(path, "rw"); fileClassic.open(); fileClassic.write(pageIndex * (8 + systemOffset) + offset, value, value.length, 0); fileClassic.synch(); fileClassic.close(); } private void assertFile(long pageIndex, byte[] value, OLogSequenceNumber lsn) throws IOException { String path = storageLocal.getConfiguration().getDirectory() + "/readWriteDiskCacheTest.tst"; OFileClassic fileClassic = new OFileClassic(); fileClassic.init(path, "r"); fileClassic.open(); byte[] content = new byte[8 + systemOffset]; fileClassic.read(pageIndex * (8 + systemOffset), content, 8 + systemOffset); Assert.assertEquals(Arrays.copyOfRange(content, systemOffset, 8 + systemOffset), value); long magicNumber = OLongSerializer.INSTANCE.deserializeNative(content, 0); Assert.assertEquals(magicNumber, OWOWCache.MAGIC_NUMBER); CRC32 crc32 = new CRC32(); crc32.update(content, OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE, content.length - OIntegerSerializer.INT_SIZE - OLongSerializer.LONG_SIZE); int crc = OIntegerSerializer.INSTANCE.deserializeNative(content, OLongSerializer.LONG_SIZE); Assert.assertEquals(crc, (int) crc32.getValue()); long segment = OLongSerializer.INSTANCE.deserializeNative(content, ODurablePage.WAL_SEGMENT_OFFSET); long position = OLongSerializer.INSTANCE.deserializeNative(content, ODurablePage.WAL_POSITION_OFFSET); OLogSequenceNumber readLsn = new OLogSequenceNumber(segment, position); Assert.assertEquals(readLsn, lsn); fileClassic.close(); } private OCacheEntry generateEntry(long fileId, long pageIndex, ODirectMemoryPointer pointer, boolean dirty, OLogSequenceNumber lsn) { return new OCacheEntry(fileId, pageIndex, new OCachePointer(pointer, lsn), dirty); } private OCacheEntry generateRemovedEntry(long fileId, long pageIndex) { return new OCacheEntry(fileId, pageIndex, null, false); } private void setLsn(ODirectMemoryPointer dataPointer, OLogSequenceNumber lsn) { OLongSerializer.INSTANCE.serializeInDirectMemory(lsn.getSegment(), dataPointer, OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE + OWOWCache.PAGE_PADDING); OLongSerializer.INSTANCE.serializeInDirectMemory(lsn.getPosition(), dataPointer, OIntegerSerializer.INT_SIZE + 2 * OLongSerializer.LONG_SIZE + OWOWCache.PAGE_PADDING); } }
package de.mycrobase.ssim.ed.mesh; import java.nio.FloatBuffer; import java.util.Random; import com.jme3.bounding.BoundingBox; import com.jme3.math.ColorRGBA; import com.jme3.math.Vector3f; import com.jme3.scene.Mesh; import com.jme3.scene.VertexBuffer; import com.jme3.scene.VertexBuffer.Format; import com.jme3.scene.VertexBuffer.Type; import com.jme3.scene.VertexBuffer.Usage; import com.jme3.util.BufferUtils; import de.mycrobase.ssim.ed.util.TempVars; public class RainParticles extends Mesh { private static final float DeviationScale = 0.075f; private int maxNumDrops; private float size; private Random random; private FloatBuffer positionBuffer; private FloatBuffer colorBuffer; private FloatBuffer tcBuffer; private Vector3f[] velocities; private VertexBuffer positionVBO; private VertexBuffer colorVBO; private VertexBuffer tcVBO; private float dropLength; private float dropLengthVar; private ColorRGBA dropColor; private ColorRGBA dropColorVar; private float dropVelocity; private float dropVelocityVar; private float minY; private float maxY; private float initY; private Vector3f windVelocity; public RainParticles(int maxNumDrops, float size) { this.maxNumDrops = maxNumDrops; this.size = size; random = new Random(); initGeometry(); } public int getMaxNumDrops() { return maxNumDrops; } public float getDropLength() { return dropLength; } public void setDropLength(float dropLength) { this.dropLength = dropLength; } public float getDropLengthVar() { return dropLengthVar; } public void setDropLengthVar(float dropLengthVar) { this.dropLengthVar = dropLengthVar; } public ColorRGBA getDropColor() { return dropColor; } public void setDropColor(ColorRGBA dropColor) { this.dropColor = dropColor; } public ColorRGBA getDropColorVar() { return dropColorVar; } public void setDropColorVar(ColorRGBA dropColorVar) { this.dropColorVar = dropColorVar; } public float getDropVelocity() { return dropVelocity; } public void setDropVelocity(float dropVelocity) { this.dropVelocity = dropVelocity; } public float getDropVelocityVar() { return dropVelocityVar; } public void setDropVelocityVar(float dropVelocityVar) { this.dropVelocityVar = dropVelocityVar; } public float getMinY() { return minY; } public void setMinY(float minY) { this.minY = minY; } public float getMaxY() { return maxY; } public void setMaxY(float maxY) { this.maxY = maxY; } public float getInitY() { return initY; } public void setInitY(float initY) { this.initY = initY; } public Vector3f getWindVelocity() { return windVelocity; } public void setWindVelocity(Vector3f windVelocity) { this.windVelocity = windVelocity; } public void initFirstDrops() { TempVars vars = TempVars.get(); for(int i = 0; i < maxNumDrops; i++) { Vector3f dir = getVaryingDirection(vars.vect1); // position (upper end) float x = random.nextFloat() * size; float y = random.nextFloat() * (maxY-minY) + initY; float z = random.nextFloat() * size; // drop length in m, will displace lower end with this float length = getVaryingLength(); Vector3f disp = vars.vect2.set(dir).multLocal(length); positionBuffer.put(x).put(y).put(z); positionBuffer.put(x + disp.x).put(y + disp.y).put(z + disp.z); // color (both ends) ColorRGBA c = getVaryingColor(vars.color1); colorBuffer.put(c.r).put(c.g).put(c.b).put(c.a); colorBuffer.put(c.r).put(c.g).put(c.b).put(c.a); // texture coordinate (one component, used as drop ID in shader) float relId = (float) i/maxNumDrops; tcBuffer.put(relId).put(relId); // velocity (no shader parameter, only used on CPU) // This needs new Vector3f instance since velocities[i] might be // uninitialized. velocities[i] = dir.mult(getVaryingVelocity()); } positionBuffer.rewind(); colorBuffer.rewind(); tcBuffer.rewind(); vars.release(); positionVBO.updateData(positionBuffer); colorVBO.updateData(colorBuffer); tcVBO.updateData(tcBuffer); } public void update(float dt) { TempVars vars = TempVars.get(); int n = 0; for(int i = 0; i < maxNumDrops; i++) { // get y from second drop position (lower end) float curY = positionBuffer.get((i*2+1)*3 + 1); // reinitialize drop if it's below minY if(curY < minY) { Vector3f dir = getVaryingDirection(vars.vect1); float x = random.nextFloat() * size; float y = maxY + (curY-minY); float z = random.nextFloat() * size; float length = getVaryingLength(); Vector3f disp = vars.vect2.set(dir).multLocal(length); positionBuffer.put(n, x); n++; positionBuffer.put(n, y); n++; positionBuffer.put(n, z); n++; positionBuffer.put(n, x + disp.x); n++; positionBuffer.put(n, y + disp.y); n++; positionBuffer.put(n, z + disp.z); n++; ColorRGBA c = getVaryingColor(vars.color1); int nc = i*2*4; colorBuffer.put(nc+0, c.r); colorBuffer.put(nc+1, c.g); colorBuffer.put(nc+2, c.b); colorBuffer.put(nc+3, c.a); colorBuffer.put(nc+4, c.r); colorBuffer.put(nc+5, c.g); colorBuffer.put(nc+6, c.b); colorBuffer.put(nc+7, c.a); // reuse old Vector3f velocities[i].set(dir); velocities[i].multLocal(getVaryingVelocity()); } else { Vector3f dist = vars.vect1.set(velocities[i]); dist.multLocal(dt); positionBuffer.put(n, positionBuffer.get(n) + dist.x); n++; positionBuffer.put(n, positionBuffer.get(n) + dist.y); n++; positionBuffer.put(n, positionBuffer.get(n) + dist.z); n++; positionBuffer.put(n, positionBuffer.get(n) + dist.x); n++; positionBuffer.put(n, positionBuffer.get(n) + dist.y); n++; positionBuffer.put(n, positionBuffer.get(n) + dist.z); n++; } } vars.release(); positionVBO.updateData(positionBuffer); colorVBO.updateData(colorBuffer); } private void initGeometry() { // vertex data positionBuffer = BufferUtils.createFloatBuffer(maxNumDrops * 2 * 3); colorBuffer = BufferUtils.createFloatBuffer(maxNumDrops * 2 * 4); tcBuffer = BufferUtils.createFloatBuffer(maxNumDrops * 2 * 1); // CPU only data velocities = new Vector3f[maxNumDrops]; positionVBO = new VertexBuffer(Type.Position); positionVBO.setupData(Usage.Stream, 3, Format.Float, positionBuffer); setBuffer(positionVBO); colorVBO = new VertexBuffer(Type.Color); colorVBO.setupData(Usage.Stream, 4, Format.Float, colorBuffer); setBuffer(colorVBO); tcVBO = new VertexBuffer(Type.TexCoord); tcVBO.setupData(Usage.Static, 1, Format.Float, tcBuffer); setBuffer(tcVBO); setMode(Mode.Lines); setBound(new BoundingBox( new Vector3f(-1.0f*size, 1f*minY, -1.0f*size), new Vector3f(+2.0f*size, 2f*maxY, +2.0f*size) )); } private Vector3f getVaryingDirection(Vector3f store) { if(store == null) { store = new Vector3f(); } // take sum of weighted windVelocity (in m/s) and -UNIT_Y, then // normalize, no random variation included atm store.set(windVelocity); store.multLocal(DeviationScale); store.addLocal(0f, -1f, 0f); store.normalizeLocal(); return store; } private float getVaryingVelocity() { return dropVelocity + random.nextFloat() * dropVelocityVar; } private float getVaryingLength() { return dropLength + random.nextFloat() * dropLengthVar; } private ColorRGBA getVaryingColor(ColorRGBA store) { float v = random.nextFloat(); store.r = dropColor.r + v * dropColorVar.r; store.g = dropColor.g + v * dropColorVar.g; store.b = dropColor.b + v * dropColorVar.b; store.a = dropColor.a + v * dropColorVar.a; return store; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.ilm.history; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING; import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.INDEX_TEMPLATE_VERSION; import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.TEMPLATE_ILM_HISTORY; /** * The {@link ILMHistoryStore} handles indexing {@link ILMHistoryItem} documents into the * appropriate index. It sets up a {@link BulkProcessor} for indexing in bulk, and handles creation * of the index/alias as needed for ILM policies. */ public class ILMHistoryStore implements Closeable { private static final Logger logger = LogManager.getLogger(ILMHistoryStore.class); public static final String ILM_HISTORY_INDEX_PREFIX = "ilm-history-" + INDEX_TEMPLATE_VERSION + "-"; public static final String ILM_HISTORY_ALIAS = "ilm-history-" + INDEX_TEMPLATE_VERSION; private final boolean ilmHistoryEnabled; private final BulkProcessor processor; private final ThreadPool threadPool; public ILMHistoryStore(Settings nodeSettings, Client client, ClusterService clusterService, ThreadPool threadPool) { this.ilmHistoryEnabled = LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); this.threadPool = threadPool; this.processor = BulkProcessor.builder( new OriginSettingClient(client, INDEX_LIFECYCLE_ORIGIN)::bulk, new BulkProcessor.Listener() { @Override public void beforeBulk(long executionId, BulkRequest request) { // Prior to actually performing the bulk, we should ensure the index exists, and // if we were unable to create it or it was in a bad state, we should not // attempt to index documents. try { final CompletableFuture<Boolean> indexCreated = new CompletableFuture<>(); ensureHistoryIndex(client, clusterService.state(), ActionListener.wrap(indexCreated::complete, ex -> { logger.warn("failed to create ILM history store index prior to issuing bulk request", ex); indexCreated.completeExceptionally(ex); })); indexCreated.get(2, TimeUnit.MINUTES); } catch (Exception e) { logger.warn(new ParameterizedMessage("unable to index the following ILM history items:\n{}", request.requests().stream() .filter(dwr -> (dwr instanceof IndexRequest)) .map(dwr -> ((IndexRequest) dwr)) .map(IndexRequest::sourceAsMap) .map(Object::toString) .collect(Collectors.joining("\n"))), e); throw new ElasticsearchException(e); } if (logger.isTraceEnabled()) { logger.info("about to index: {}", request.requests().stream() .map(dwr -> ((IndexRequest) dwr).sourceAsMap()) .map(Objects::toString) .collect(Collectors.joining(","))); } } @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { long items = request.numberOfActions(); if (logger.isTraceEnabled()) { logger.trace("indexed [{}] items into ILM history index [{}], items: {}", items, Arrays.stream(response.getItems()) .map(BulkItemResponse::getIndex) .distinct() .collect(Collectors.joining(",")), request.requests().stream() .map(dwr -> ((IndexRequest) dwr).sourceAsMap()) .map(Objects::toString) .collect(Collectors.joining(","))); } if (response.hasFailures()) { Map<String, String> failures = Arrays.stream(response.getItems()) .filter(BulkItemResponse::isFailed) .collect(Collectors.toMap(BulkItemResponse::getId, BulkItemResponse::getFailureMessage)); logger.error("failures: [{}]", failures); } } @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { long items = request.numberOfActions(); logger.error(new ParameterizedMessage("failed to index {} items into ILM history index", items), failure); } }) .setBulkActions(100) .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB)) .setFlushInterval(TimeValue.timeValueSeconds(5)) .setConcurrentRequests(1) .setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(1000), 3)) .build(); } /** * Attempts to asynchronously index an ILM history entry */ public void putAsync(ILMHistoryItem item) { if (ilmHistoryEnabled == false) { logger.trace("not recording ILM history item because [{}] is [false]: [{}]", LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.getKey(), item); return; } logger.trace("queueing ILM history item for indexing [{}]: [{}]", ILM_HISTORY_ALIAS, item); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { item.toXContent(builder, ToXContent.EMPTY_PARAMS); IndexRequest request = new IndexRequest(ILM_HISTORY_ALIAS).source(builder); // TODO: remove the threadpool wrapping when the .add call is non-blocking // (it can currently execute the bulk request occasionally) // see: https://github.com/elastic/elasticsearch/issues/50440 threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { try { processor.add(request); } catch (Exception e) { logger.error(new ParameterizedMessage("failed add ILM history item to queue for index [{}]: [{}]", ILM_HISTORY_ALIAS, item), e); } }); } catch (IOException exception) { logger.error(new ParameterizedMessage("failed to queue ILM history item in index [{}]: [{}]", ILM_HISTORY_ALIAS, item), exception); } } /** * Checks if the ILM history index exists, and if not, creates it. * * @param client The client to use to create the index if needed * @param state The current cluster state, to determine if the alias exists * @param listener Called after the index has been created. `onResponse` called with `true` if the index was created, * `false` if it already existed. */ @SuppressWarnings("unchecked") static void ensureHistoryIndex(Client client, ClusterState state, ActionListener<Boolean> listener) { final String initialHistoryIndexName = ILM_HISTORY_INDEX_PREFIX + "000001"; final IndexAbstraction ilmHistory = state.metadata().getIndicesLookup().get(ILM_HISTORY_ALIAS); final IndexAbstraction initialHistoryIndex = state.metadata().getIndicesLookup().get(initialHistoryIndexName); if (ilmHistory == null && initialHistoryIndex == null) { // No alias or index exists with the expected names, so create the index with appropriate alias logger.debug("creating ILM history index [{}]", initialHistoryIndexName); // Template below should be already defined as real index template but it can be deleted. To avoid race condition with its // recreation we apply settings and mappings ourselves byte[] templateBytes = TEMPLATE_ILM_HISTORY.loadBytes(); Map<String, Object> templateAsMap = XContentHelper.convertToMap(new BytesArray(templateBytes, 0, templateBytes.length), false, XContentType.JSON).v2(); client.admin().indices().prepareCreate(initialHistoryIndexName) .setSettings((Map<String, ?>) templateAsMap.get("settings")) .setMapping((Map<String, Object>) templateAsMap.get("mappings")) .setWaitForActiveShards(1) .addAlias(new Alias(ILM_HISTORY_ALIAS).writeIndex(true).isHidden(true)) .execute(new ActionListener<>() { @Override public void onResponse(CreateIndexResponse response) { listener.onResponse(true); } @Override public void onFailure(Exception e) { if (e instanceof ResourceAlreadyExistsException) { // The index didn't exist before we made the call, there was probably a race - just ignore this logger.debug("index [{}] was created after checking for its existence, likely due to a concurrent call", initialHistoryIndexName); listener.onResponse(false); } else { listener.onFailure(e); } } }); } else if (ilmHistory == null) { // alias does not exist but initial index does, something is broken listener.onFailure(new IllegalStateException("ILM history index [" + initialHistoryIndexName + "] already exists but does not have alias [" + ILM_HISTORY_ALIAS + "]")); } else if (ilmHistory.getType() == IndexAbstraction.Type.ALIAS) { if (ilmHistory.getWriteIndex() != null) { // The alias exists and has a write index, so we're good listener.onResponse(false); } else { // The alias does not have a write index, so we can't index into it listener.onFailure(new IllegalStateException("ILM history alias [" + ILM_HISTORY_ALIAS + "does not have a write index")); } } else if (ilmHistory.getType() != IndexAbstraction.Type.ALIAS) { // This is not an alias, error out listener.onFailure(new IllegalStateException("ILM history alias [" + ILM_HISTORY_ALIAS + "] already exists as concrete index")); } else { logger.error("unexpected IndexOrAlias for [{}]: [{}]", ILM_HISTORY_ALIAS, ilmHistory); assert false : ILM_HISTORY_ALIAS + " cannot be both an alias and not an alias simultaneously"; } } @Override public void close() { try { processor.awaitClose(10, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.warn("failed to shut down ILM history bulk processor after 10 seconds", e); } } }
package org.apache.helix.manager.zk; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import static org.apache.helix.HelixConstants.ChangeType.CONFIG; import static org.apache.helix.HelixConstants.ChangeType.CURRENT_STATE; import static org.apache.helix.HelixConstants.ChangeType.EXTERNAL_VIEW; import static org.apache.helix.HelixConstants.ChangeType.IDEAL_STATE; import static org.apache.helix.HelixConstants.ChangeType.LIVE_INSTANCE; import static org.apache.helix.HelixConstants.ChangeType.MESSAGE; import static org.apache.helix.HelixConstants.ChangeType.MESSAGES_CONTROLLER; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.I0Itec.zkclient.IZkChildListener; import org.I0Itec.zkclient.IZkDataListener; import org.I0Itec.zkclient.exception.ZkNoNodeException; import org.apache.helix.BaseDataAccessor; import org.apache.helix.ControllerChangeListener; import org.apache.helix.CurrentStateChangeListener; import org.apache.helix.ExternalViewChangeListener; import org.apache.helix.HelixConnection; import org.apache.helix.HelixConstants.ChangeType; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixException; import org.apache.helix.HelixManager; import org.apache.helix.HelixProperty; import org.apache.helix.HelixRole; import org.apache.helix.IdealStateChangeListener; import org.apache.helix.InstanceConfigChangeListener; import org.apache.helix.LiveInstanceChangeListener; import org.apache.helix.MessageListener; import org.apache.helix.NotificationContext; import org.apache.helix.NotificationContext.Type; import org.apache.helix.PropertyKey; import org.apache.helix.PropertyPathConfig; import org.apache.helix.ScopedConfigChangeListener; import org.apache.helix.ZNRecord; import org.apache.helix.model.CurrentState; import org.apache.helix.model.ExternalView; import org.apache.helix.model.IdealState; import org.apache.helix.model.InstanceConfig; import org.apache.helix.model.LiveInstance; import org.apache.helix.model.Message; import org.apache.log4j.Logger; import org.apache.zookeeper.Watcher.Event.EventType; /** * We need to synchronize on {@link ZkHelixConnection} instead of {@link HelixManager} to avoid * dead-lock. Otherwise an example deadlock scenario would be: * 1) main-thread calls ZkHelixConnection#disconnect(), results in: * - ZkHelixController#reset(), holding ZkHelixConnection, waiting HelixConnectionAdaptor * 2) zk-event-thread calls CallbackHandler#handleChildChange(), results in: * - CallbackHandler#invoke(), holding HelixConnectionAdaptor, waiting ZkHelixConnection */ public class ZkCallbackHandler implements IZkChildListener, IZkDataListener { private static Logger logger = Logger.getLogger(ZkCallbackHandler.class); /** * define the next possible notification types */ private static Map<Type, List<Type>> nextNotificationType = new HashMap<Type, List<Type>>(); static { nextNotificationType.put(Type.INIT, Arrays.asList(Type.CALLBACK, Type.FINALIZE)); nextNotificationType.put(Type.CALLBACK, Arrays.asList(Type.CALLBACK, Type.FINALIZE)); nextNotificationType.put(Type.FINALIZE, Arrays.asList(Type.INIT)); } private final String _path; private final Object _listener; private final EventType[] _eventTypes; private final ChangeType _changeType; private final ZkClient _zkClient; private final AtomicLong _lastNotificationTimeStamp; private final HelixRole _role; private final HelixManager _manager; private final String _instanceName; private final HelixConnection _connection; private final HelixDataAccessor _accessor; private final PropertyKey _propertyKey; /** * maintain the expected notification types * this is fix for HELIX-195: race condition between FINALIZE callbacks and Zk callbacks */ private List<NotificationContext.Type> _expectTypes = nextNotificationType.get(Type.FINALIZE); public ZkCallbackHandler(HelixRole role, ZkClient client, PropertyKey propertyKey, Object listener, EventType[] eventTypes, ChangeType changeType) { if (listener == null) { throw new HelixException("listener could not be null"); } _role = role; _manager = new ZKHelixManager(role); _instanceName = role.getId().stringify(); _connection = role.getConnection(); _accessor = _connection.createDataAccessor(role.getClusterId()); _zkClient = client; _propertyKey = propertyKey; _path = propertyKey.getPath(); _listener = listener; _eventTypes = eventTypes; _changeType = changeType; _lastNotificationTimeStamp = new AtomicLong(System.nanoTime()); init(); } public Object getListener() { return _listener; } public String getPath() { return _path; } public void invoke(NotificationContext changeContext) throws Exception { // This allows the listener to work with one change at a time synchronized (_connection) { Type type = changeContext.getType(); if (!_expectTypes.contains(type)) { logger.warn("Skip processing callbacks for listener: " + _listener + ", path: " + _path + ", expected types: " + _expectTypes + " but was " + type); return; } _expectTypes = nextNotificationType.get(type); // Builder keyBuilder = _accessor.keyBuilder(); long start = System.currentTimeMillis(); if (logger.isInfoEnabled()) { logger.info(Thread.currentThread().getId() + " START:INVOKE " + _path + " listener:" + _listener.getClass().getCanonicalName()); } if (_changeType == IDEAL_STATE) { IdealStateChangeListener idealStateChangeListener = (IdealStateChangeListener) _listener; subscribeForChanges(changeContext, _path, true, true); List<IdealState> idealStates = _accessor.getChildValues(_propertyKey); idealStateChangeListener.onIdealStateChange(idealStates, changeContext); } else if (_changeType == ChangeType.INSTANCE_CONFIG) { subscribeForChanges(changeContext, _path, true, true); InstanceConfigChangeListener listener = (InstanceConfigChangeListener) _listener; List<InstanceConfig> configs = _accessor.getChildValues(_propertyKey); listener.onInstanceConfigChange(configs, changeContext); } else if (_changeType == CONFIG) { subscribeForChanges(changeContext, _path, true, true); ScopedConfigChangeListener listener = (ScopedConfigChangeListener) _listener; List<HelixProperty> configs = _accessor.getChildValues(_propertyKey); listener.onConfigChange(configs, changeContext); } else if (_changeType == LIVE_INSTANCE) { LiveInstanceChangeListener liveInstanceChangeListener = (LiveInstanceChangeListener) _listener; subscribeForChanges(changeContext, _path, true, true); List<LiveInstance> liveInstances = _accessor.getChildValues(_propertyKey); liveInstanceChangeListener.onLiveInstanceChange(liveInstances, changeContext); } else if (_changeType == CURRENT_STATE) { CurrentStateChangeListener currentStateChangeListener = (CurrentStateChangeListener) _listener; subscribeForChanges(changeContext, _path, true, true); String instanceName = PropertyPathConfig.getInstanceNameFromPath(_path); List<CurrentState> currentStates = _accessor.getChildValues(_propertyKey); currentStateChangeListener.onStateChange(instanceName, currentStates, changeContext); } else if (_changeType == MESSAGE) { MessageListener messageListener = (MessageListener) _listener; subscribeForChanges(changeContext, _path, true, false); String instanceName = PropertyPathConfig.getInstanceNameFromPath(_path); List<Message> messages = _accessor.getChildValues(_propertyKey); messageListener.onMessage(instanceName, messages, changeContext); } else if (_changeType == MESSAGES_CONTROLLER) { MessageListener messageListener = (MessageListener) _listener; subscribeForChanges(changeContext, _path, true, false); List<Message> messages = _accessor.getChildValues(_propertyKey); messageListener.onMessage(_instanceName, messages, changeContext); } else if (_changeType == EXTERNAL_VIEW) { ExternalViewChangeListener externalViewListener = (ExternalViewChangeListener) _listener; subscribeForChanges(changeContext, _path, true, true); List<ExternalView> externalViewList = _accessor.getChildValues(_propertyKey); externalViewListener.onExternalViewChange(externalViewList, changeContext); } else if (_changeType == ChangeType.CONTROLLER) { ControllerChangeListener controllerChangelistener = (ControllerChangeListener) _listener; subscribeForChanges(changeContext, _path, true, false); controllerChangelistener.onControllerChange(changeContext); } long end = System.currentTimeMillis(); if (logger.isInfoEnabled()) { logger.info(Thread.currentThread().getId() + " END:INVOKE " + _path + " listener:" + _listener.getClass().getCanonicalName() + " Took: " + (end - start) + "ms"); } } } private void subscribeChildChange(String path, NotificationContext context) { NotificationContext.Type type = context.getType(); if (type == NotificationContext.Type.INIT || type == NotificationContext.Type.CALLBACK) { logger.info(_instanceName + " subscribes child-change. path: " + path + ", listener: " + _listener); _zkClient.subscribeChildChanges(path, this); } else if (type == NotificationContext.Type.FINALIZE) { logger.info(_instanceName + " unsubscribe child-change. path: " + path + ", listener: " + _listener); _zkClient.unsubscribeChildChanges(path, this); } } private void subscribeDataChange(String path, NotificationContext context) { NotificationContext.Type type = context.getType(); if (type == NotificationContext.Type.INIT || type == NotificationContext.Type.CALLBACK) { if (logger.isDebugEnabled()) { logger.debug(_instanceName + " subscribe data-change. path: " + path + ", listener: " + _listener); } _zkClient.subscribeDataChanges(path, this); } else if (type == NotificationContext.Type.FINALIZE) { logger.info(_instanceName + " unsubscribe data-change. path: " + path + ", listener: " + _listener); _zkClient.unsubscribeDataChanges(path, this); } } // TODO watchParent is always true. consider remove it private void subscribeForChanges(NotificationContext context, String path, boolean watchParent, boolean watchChild) { if (watchParent) { subscribeChildChange(path, context); } if (watchChild) { try { switch (_changeType) { case CURRENT_STATE: case IDEAL_STATE: case EXTERNAL_VIEW: { // check if bucketized BaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_zkClient); List<ZNRecord> records = baseAccessor.getChildren(path, null, 0); for (ZNRecord record : records) { HelixProperty property = new HelixProperty(record); String childPath = path + "/" + record.getId(); int bucketSize = property.getBucketSize(); if (bucketSize > 0) { // subscribe both data-change and child-change on bucketized parent node // data-change gives a delete-callback which is used to remove watch subscribeChildChange(childPath, context); subscribeDataChange(childPath, context); // subscribe data-change on bucketized child List<String> bucketizedChildNames = _zkClient.getChildren(childPath); if (bucketizedChildNames != null) { for (String bucketizedChildName : bucketizedChildNames) { String bucketizedChildPath = childPath + "/" + bucketizedChildName; subscribeDataChange(bucketizedChildPath, context); } } } else { subscribeDataChange(childPath, context); } } break; } default: { List<String> childNames = _zkClient.getChildren(path); if (childNames != null) { for (String childName : childNames) { String childPath = path + "/" + childName; subscribeDataChange(childPath, context); } } break; } } } catch (ZkNoNodeException e) { logger.warn("fail to subscribe child/data change. path: " + path + ", listener: " + _listener, e); } } } public EventType[] getEventTypes() { return _eventTypes; } /** * Invoke the listener so that it sets up the initial values from the zookeeper if any * exists */ public void init() { updateNotificationTime(System.nanoTime()); try { NotificationContext changeContext = new NotificationContext(_manager); changeContext.setType(NotificationContext.Type.INIT); changeContext.setPathChanged(_path); invoke(changeContext); } catch (Exception e) { String msg = "Exception while invoking init callback for listener:" + _listener; ZKExceptionHandler.getInstance().handle(msg, e); } } @Override public void handleDataChange(String dataPath, Object data) { try { updateNotificationTime(System.nanoTime()); if (dataPath != null && dataPath.startsWith(_path)) { NotificationContext changeContext = new NotificationContext(_manager); changeContext.setType(NotificationContext.Type.CALLBACK); changeContext.setPathChanged(_path); invoke(changeContext); } } catch (Exception e) { String msg = "exception in handling data-change. path: " + dataPath + ", listener: " + _listener; ZKExceptionHandler.getInstance().handle(msg, e); } } @Override public void handleDataDeleted(String dataPath) { try { updateNotificationTime(System.nanoTime()); if (dataPath != null && dataPath.startsWith(_path)) { logger.info(_instanceName + " unsubscribe data-change. path: " + dataPath + ", listener: " + _listener); _zkClient.unsubscribeDataChanges(dataPath, this); // only needed for bucketized parent, but OK if we don't have child-change // watch on the bucketized parent path logger.info(_instanceName + " unsubscribe child-change. path: " + dataPath + ", listener: " + _listener); _zkClient.unsubscribeChildChanges(dataPath, this); // No need to invoke() since this event will handled by child-change on parent-node // NotificationContext changeContext = new NotificationContext(_manager); // changeContext.setType(NotificationContext.Type.CALLBACK); // invoke(changeContext); } } catch (Exception e) { String msg = "exception in handling data-delete-change. path: " + dataPath + ", listener: " + _listener; ZKExceptionHandler.getInstance().handle(msg, e); } } @Override public void handleChildChange(String parentPath, List<String> currentChilds) { try { updateNotificationTime(System.nanoTime()); if (parentPath != null && parentPath.startsWith(_path)) { NotificationContext changeContext = new NotificationContext(_manager); if (currentChilds == null) { // parentPath has been removed if (parentPath.equals(_path)) { // _path has been removed, remove this listener _manager.removeListener(_propertyKey, _listener); } changeContext.setType(NotificationContext.Type.FINALIZE); } else { changeContext.setType(NotificationContext.Type.CALLBACK); } changeContext.setPathChanged(_path); invoke(changeContext); } } catch (Exception e) { String msg = "exception in handling child-change. instance: " + _instanceName + ", parentPath: " + parentPath + ", listener: " + _listener; ZKExceptionHandler.getInstance().handle(msg, e); } } /** * Invoke the listener for the last time so that the listener could clean up resources */ public void reset() { try { NotificationContext changeContext = new NotificationContext(_manager); changeContext.setType(NotificationContext.Type.FINALIZE); changeContext.setPathChanged(_path); invoke(changeContext); } catch (Exception e) { String msg = "Exception while resetting the listener:" + _listener; ZKExceptionHandler.getInstance().handle(msg, e); } } private void updateNotificationTime(long nanoTime) { long l = _lastNotificationTimeStamp.get(); while (nanoTime > l) { boolean b = _lastNotificationTimeStamp.compareAndSet(l, nanoTime); if (b) { break; } else { l = _lastNotificationTimeStamp.get(); } } } }
/* * Copyright 2012 - 2016 Anton Tananaev (anton@traccar.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar.protocol; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.traccar.BaseProtocolDecoder; import org.traccar.DeviceSession; import org.traccar.helper.BcdUtil; import org.traccar.helper.DateBuilder; import org.traccar.helper.Parser; import org.traccar.helper.PatternBuilder; import org.traccar.helper.UnitsConverter; import org.traccar.model.Position; import java.net.SocketAddress; import java.nio.charset.StandardCharsets; import java.util.regex.Pattern; public class Jt600ProtocolDecoder extends BaseProtocolDecoder { public Jt600ProtocolDecoder(Jt600Protocol protocol) { super(protocol); } private static double convertCoordinate(int raw) { int degrees = raw / 1000000; double minutes = (raw % 1000000) / 10000.0; return degrees + minutes / 60; } private Position decodeBinary(ChannelBuffer buf, Channel channel, SocketAddress remoteAddress) { Position position = new Position(); position.setProtocol(getProtocolName()); buf.readByte(); // header boolean longFormat = buf.getUnsignedByte(buf.readerIndex()) == 0x75; String id = String.valueOf(Long.parseLong(ChannelBuffers.hexDump(buf.readBytes(5)))); DeviceSession deviceSession = getDeviceSession(channel, remoteAddress, id); if (deviceSession == null) { return null; } position.setDeviceId(deviceSession.getDeviceId()); if (longFormat) { buf.readUnsignedByte(); // protocol } int version = buf.readUnsignedByte() >> 4; buf.readUnsignedShort(); // length DateBuilder dateBuilder = new DateBuilder() .setDay(BcdUtil.readInteger(buf, 2)) .setMonth(BcdUtil.readInteger(buf, 2)) .setYear(BcdUtil.readInteger(buf, 2)) .setHour(BcdUtil.readInteger(buf, 2)) .setMinute(BcdUtil.readInteger(buf, 2)) .setSecond(BcdUtil.readInteger(buf, 2)); position.setTime(dateBuilder.getDate()); double latitude = convertCoordinate(BcdUtil.readInteger(buf, 8)); double longitude = convertCoordinate(BcdUtil.readInteger(buf, 9)); byte flags = buf.readByte(); position.setValid((flags & 0x1) == 0x1); if ((flags & 0x2) == 0) { latitude = -latitude; } position.setLatitude(latitude); if ((flags & 0x4) == 0) { longitude = -longitude; } position.setLongitude(longitude); position.setSpeed(BcdUtil.readInteger(buf, 2)); position.setCourse(buf.readUnsignedByte() * 2.0); if (longFormat) { position.set(Position.KEY_ODOMETER, buf.readUnsignedInt() * 1000); position.set(Position.KEY_SATELLITES, buf.readUnsignedByte()); buf.readUnsignedInt(); // vehicle id combined position.set(Position.KEY_STATUS, buf.readUnsignedShort()); int battery = buf.readUnsignedByte(); if (battery == 0xff) { position.set(Position.KEY_CHARGE, true); } else { position.set(Position.KEY_BATTERY, battery + "%"); } position.set(Position.KEY_CID, buf.readUnsignedShort()); position.set(Position.KEY_LAC, buf.readUnsignedShort()); position.set(Position.KEY_GSM, buf.readUnsignedByte()); position.set(Position.KEY_INDEX, buf.readUnsignedByte()); } else if (version == 1) { position.set(Position.KEY_SATELLITES, buf.readUnsignedByte()); position.set(Position.KEY_POWER, buf.readUnsignedByte()); buf.readByte(); // other flags and sensors position.setAltitude(buf.readUnsignedShort()); int cid = buf.readUnsignedShort(); int lac = buf.readUnsignedShort(); if (cid != 0 && lac != 0) { position.set(Position.KEY_CID, cid); position.set(Position.KEY_LAC, lac); } position.set(Position.KEY_GSM, buf.readUnsignedByte()); } else if (version == 2) { int fuel = buf.readUnsignedByte() << 8; position.set(Position.KEY_STATUS, buf.readUnsignedInt()); position.set(Position.KEY_ODOMETER, buf.readUnsignedInt() * 1000); fuel += buf.readUnsignedByte(); position.set(Position.KEY_FUEL, fuel); } return position; } private static final Pattern PATTERN_W01 = new PatternBuilder() .text("(") .number("(d+),") // id .text("W01,") // type .number("(ddd)(dd.dddd),") // longitude .expression("([EW]),") .number("(dd)(dd.dddd),") // latitude .expression("([NS]),") .expression("([AV]),") // validity .number("(dd)(dd)(dd),") // date (ddmmyy) .number("(dd)(dd)(dd),") // time .number("(d+),") // speed .number("(d+),") // course .number("(d+),") // power .number("(d+),") // gps signal .number("(d+),") // gsm signal .number("(d+),") // alert type .any() .compile(); private Position decodeW01(String sentence, Channel channel, SocketAddress remoteAddress) { Parser parser = new Parser(PATTERN_W01, sentence); if (!parser.matches()) { return null; } DeviceSession deviceSession = getDeviceSession(channel, remoteAddress, parser.next()); if (deviceSession == null) { return null; } Position position = new Position(); position.setProtocol(getProtocolName()); position.setDeviceId(deviceSession.getDeviceId()); position.setLongitude(parser.nextCoordinate()); position.setLatitude(parser.nextCoordinate()); position.setValid(parser.next().equals("A")); DateBuilder dateBuilder = new DateBuilder() .setDateReverse(parser.nextInt(), parser.nextInt(), parser.nextInt()) .setTime(parser.nextInt(), parser.nextInt(), parser.nextInt()); position.setTime(dateBuilder.getDate()); position.setSpeed(UnitsConverter.knotsFromKph(parser.nextDouble())); position.setCourse(parser.nextDouble()); position.set(Position.KEY_POWER, parser.nextDouble()); return position; } private static final Pattern PATTERN_U01 = new PatternBuilder() .text("(") .number("(d+),") // id .number("(Udd),") // type .number("d+,").optional() // alarm .number("(dd)(dd)(dd),") // date (ddmmyy) .number("(dd)(dd)(dd),") // time .expression("([TF]),") // validity .number("(d+.d+),([NS]),") // latitude .number("(d+.d+),([EW]),") // longitude .number("(d+.?d*),") // speed .number("(d+),") // course .number("(d+),") // satellites .number("(d+%),") // battery .expression("([01]+),") // status .number("(d+),") // cid .number("(d+),") // lac .number("(d+),") // gsm signal .number("(d+),") // odometer .number("(d+)") // serial number .number(",(xx)").optional() // checksum .any() .compile(); private Position decodeU01(String sentence, Channel channel, SocketAddress remoteAddress) { Parser parser = new Parser(PATTERN_U01, sentence); if (!parser.matches()) { return null; } DeviceSession deviceSession = getDeviceSession(channel, remoteAddress, parser.next()); if (deviceSession == null) { return null; } String type = parser.next(); Position position = new Position(); position.setProtocol(getProtocolName()); position.setDeviceId(deviceSession.getDeviceId()); DateBuilder dateBuilder = new DateBuilder() .setDateReverse(parser.nextInt(), parser.nextInt(), parser.nextInt()) .setTime(parser.nextInt(), parser.nextInt(), parser.nextInt()); position.setTime(dateBuilder.getDate()); position.setValid(parser.next().equals("T")); position.setLatitude(parser.nextCoordinate(Parser.CoordinateFormat.DEG_HEM)); position.setLongitude(parser.nextCoordinate(Parser.CoordinateFormat.DEG_HEM)); position.setSpeed(UnitsConverter.knotsFromMph(parser.nextDouble())); position.setCourse(parser.nextDouble()); position.set(Position.KEY_SATELLITES, parser.nextInt()); position.set(Position.KEY_BATTERY, parser.next()); position.set(Position.KEY_STATUS, parser.nextInt(2)); position.set(Position.KEY_CID, parser.nextInt()); position.set(Position.KEY_LAC, parser.nextInt()); position.set(Position.KEY_GSM, parser.nextInt()); position.set(Position.KEY_ODOMETER, parser.nextLong() * 1000); position.set(Position.KEY_INDEX, parser.nextInt()); if (channel != null) { if (type.equals("U01") || type.equals("U02") || type.equals("U03")) { channel.write("(S39)"); } else if (type.equals("U06")) { channel.write("(S20)"); } } return position; } @Override protected Object decode( Channel channel, SocketAddress remoteAddress, Object msg) throws Exception { ChannelBuffer buf = (ChannelBuffer) msg; char first = (char) buf.getByte(0); if (first == '$') { return decodeBinary(buf, channel, remoteAddress); } else if (first == '(') { String sentence = buf.toString(StandardCharsets.US_ASCII); if (sentence.contains("W01")) { return decodeW01(sentence, channel, remoteAddress); } else { return decodeU01(sentence, channel, remoteAddress); } } return null; } }
package net.mgsx.game.core.ui; import com.badlogic.gdx.Input; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.math.Frustum; import com.badlogic.gdx.math.Matrix3; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Quaternion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.ui.Button; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Label.LabelStyle; import com.badlogic.gdx.scenes.scene2d.ui.SelectBox; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.ui.TextField; import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener; import com.badlogic.gdx.scenes.scene2d.utils.ClickListener; import com.badlogic.gdx.scenes.scene2d.utils.DragListener; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.ObjectSet; import net.mgsx.game.core.annotations.Editable; import net.mgsx.game.core.annotations.EditableComponent; import net.mgsx.game.core.annotations.EditableSystem; import net.mgsx.game.core.helpers.ReflectionHelper; import net.mgsx.game.core.ui.accessors.Accessor; import net.mgsx.game.core.ui.accessors.AccessorScanner; import net.mgsx.game.core.ui.accessors.FieldAccessorWrapper; import net.mgsx.game.core.ui.events.AccessorHelpEvent; import net.mgsx.game.core.ui.widgets.BitsWidget; import net.mgsx.game.core.ui.widgets.BlendWidget; import net.mgsx.game.core.ui.widgets.BooleanWidget; import net.mgsx.game.core.ui.widgets.ColorWidget; import net.mgsx.game.core.ui.widgets.FloatWidget; import net.mgsx.game.core.ui.widgets.IntegerWidget; import net.mgsx.game.core.ui.widgets.VoidWidget; public class EntityEditor extends Table { public static ObjectSet<Class> excludedTypes = new ObjectSet<Class>(); static{ excludedTypes.addAll(Matrix4.class, Matrix3.class, Frustum.class); } public static final ObjectMap<Class, FieldEditor> defaultTypeEditors = new ObjectMap<Class, FieldEditor>(); public static class Config{ public ObjectMap<Accessor, FieldEditor> accessorEditors = new ObjectMap<Accessor, FieldEditor>(); public ObjectMap<Class, FieldEditor> typeEditors = new ObjectMap<Class, FieldEditor>(); public Config() { typeEditors.putAll(defaultTypeEditors); } } final public Config config; private Array<Object> stack = new Array<Object>(); private final boolean annotationBased; public EntityEditor(Skin skin) { this(skin, false); } public EntityEditor(Skin skin, boolean annotationBased) { this(null, annotationBased, skin); } public EntityEditor(Object entity, Skin skin) { this(entity, false, skin); } public EntityEditor(Object entity, boolean annotationBased, Skin skin) { super(skin); this.annotationBased = annotationBased; this.config = new Config(); setEntity(entity); } private EntityEditor(Object entity, boolean annotationBased, Skin skin, Array<Object> stack, Config config) { super(skin); this.annotationBased = annotationBased; this.stack = stack; this.config = config; generate(entity, this); } public void setEntity(Object entity) { stack.clear(); clearChildren(); if(entity != null) { generate(entity, this); } } public void generate(final Object entity, final Table table) { // prevent cycles if(entity == null || stack.contains(entity, true)) return; stack.add(entity); if(annotationBased){ boolean match = false; if(entity.getClass().getAnnotation(Editable.class) != null) match = true; if(entity.getClass().getAnnotation(EditableSystem.class) != null) match = true; if(entity.getClass().getAnnotation(EditableComponent.class) != null) match = true; if(net.mgsx.game.core.tools.Tool.class.isAssignableFrom(entity.getClass())) match = true; if(!match) return; } // scan class to get all accessors for(final Accessor accessor : AccessorScanner.scan(entity, annotationBased)) { // filter arrays type and some predefined types. if(accessor.getType().isArray()) continue; if(excludedTypes.contains(accessor.getType())) continue; Label accessorLabel = new Label(accessor.getName(), table.getSkin()); table.add(accessorLabel).fill().left(); if(accessor.config() == null || accessor.config().doc().isEmpty()){ table.add().fill(); }else{ final TextButton btHelp = new TextButton("?", table.getSkin()); table.add(btHelp).fill(); btHelp.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { btHelp.fire(new AccessorHelpEvent(accessor)); } }); } // TODO this method never return false ... if(!createControl(table, entity, accessor, stack, config)) { // create recursively on missing type (object) // TODO background ? accessorLabel.setStyle(table.getSkin().get("tab-left", LabelStyle.class)); Table sub = new Table(getSkin()); sub.setBackground(getSkin().getDrawable("default-window-body-right")); table.add(sub).expand().fill().left(); generate(accessor.get(), sub); } table.row(); } } private static void createSlider2D(Table table, Object entity, String name, final Quaternion q) { Label ctrl = new Label("CTRL", table.getSkin()); // TODO ? ctrl.setTouchable(Touchable.enabled); ctrl.addListener(new DragListener(){ Quaternion m = new Quaternion(); @Override public void drag(InputEvent event, float x, float y, int pointer) { float dx = getDeltaX(); float dy = getDeltaY(); q.mul(m.setEulerAngles(dx, dy,0)); event.getStage().cancelTouchFocusExcept(this, event.getTarget()); event.cancel(); } }); table.add(ctrl); } static private void createSlider(final Table table, final Object rootEntity, final Accessor rootField, final Object entity, final Accessor accessor){ boolean dynamic = rootField.config() != null && rootField.config().realtime(); boolean readonly = rootField.config() != null && rootField.config().readonly(); final Label label = new FloatWidget(accessor, dynamic, readonly, table.getSkin()); table.add(label); } public static Button createBoolean(Skin skin, boolean value) { final TextButton btCheck = new TextButton(String.valueOf(value), skin, "toggle"); btCheck.setChecked(value); btCheck.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { btCheck.setText(String.valueOf(btCheck.isChecked())); } }); return btCheck; } public static boolean createControl(final Table table, final Object entity, final Accessor accessor) { return createControl(table, entity, accessor, new Array<Object>(), new Config()); } private static boolean createControl(final Table table, final Object entity, final Accessor accessor, Array<Object> stack, Config config) { Skin skin = table.getSkin(); // find appropriate control in following order : // 1 - explicit editor for accessor // 2 - annotation on accessor (predefined types) // 3 - primitive types FieldEditor accessorEditor = config.accessorEditors.get(accessor); if(accessorEditor != null){ table.add(accessorEditor.create(accessor, skin)); return true; } FieldEditor typeEditor = config.typeEditors.get(accessor.getType()); if(typeEditor != null){ table.add(typeEditor.create(accessor, skin)); return true; } Editable accessorConfig = accessor.config(); if(accessorConfig != null && accessorConfig.editor() != DefaultFieldEditor.class){ // TODO cache factory as singleton ... FieldEditor editor = ReflectionHelper.newInstance(accessorConfig.editor()); table.add(editor.create(accessor, skin)); return true; } // XXX doesn't support inherited ... Editable typeConfig = (Editable)accessor.getType().getAnnotation(Editable.class); if(typeConfig != null && typeConfig.editor() != DefaultFieldEditor.class){ // TODO cache factory as singleton ... FieldEditor editor = ReflectionHelper.newInstance(typeConfig.editor()); table.add(editor.create(accessor, skin)); return true; } if(accessorConfig != null ){ switch(accessorConfig.type()){ case BITS: table.add(BitsWidget.instance.create(accessor, skin)); return true; case BLEND_MODE: table.add(BlendWidget.instance.create(accessor, skin)); return true; // TODO others ... default: } } if(accessor.getType() == void.class){ table.add(VoidWidget.instance.create(accessor, skin)); }else if(accessor.getType() == int.class){ table.add(IntegerWidget.unlimited.create(accessor, skin)).fill(); }else if(accessor.getType() == long.class){ table.add(IntegerWidget.unlimited.create(accessor, skin)).fill(); }else if(accessor.getType() == short.class){ table.add(IntegerWidget.unsignedShort.create(accessor, skin)).fill(); }else if(accessor.getType() == float.class){ createSlider(table, entity, accessor, entity, accessor); }else if(accessor.getType() == String.class){ final TextField field = new TextField(String.valueOf(accessor.get()), skin); table.add(field); field.addListener(new ChangeListener(){ @Override public void changed(ChangeEvent event, Actor actor) { accessor.set(field.getText()); } }); field.addListener(new ClickListener(){ @Override public void touchDragged(InputEvent event, float x, float y, int pointer) { field.getStage().cancelTouchFocusExcept(field.getDefaultInputListener(), field); super.touchDragged(event, x, y, pointer); } @Override public boolean keyDown(InputEvent event, int keycode) { if(keycode == Input.Keys.ENTER) field.getStage().setKeyboardFocus(null); return super.keyDown(event, keycode); } }); }else if(accessor.getType() == boolean.class){ table.add(BooleanWidget.instance.create(accessor, skin)); }else if(accessor.getType() == Vector2.class){ Vector2 v = (Vector2)accessor.get(); Table sub = new Table(table.getSkin()); sub.add("("); createSlider(sub, entity, accessor, v, new FieldAccessorWrapper(accessor, "x")); sub.add(","); createSlider(sub, entity, accessor, v, new FieldAccessorWrapper(accessor, "y")); sub.add(")"); table.add(sub); }else if(accessor.getType() == Vector3.class){ Vector3 v = (Vector3)accessor.get(); Table sub = new Table(table.getSkin()); sub.add("("); createSlider(sub, entity, accessor, v, new FieldAccessorWrapper(accessor, "x")); sub.add(","); createSlider(sub, entity, accessor, v, new FieldAccessorWrapper(accessor, "y")); sub.add(","); createSlider(sub, entity, accessor, v, new FieldAccessorWrapper(accessor, "z")); sub.add(")"); table.add(sub); }else if(accessor.getType() == Quaternion.class){ Quaternion q = (Quaternion)accessor.get(); createSlider2D(table, entity, accessor.getName(), q); }else if(accessor.getType() == Color.class){ table.add(ColorWidget.instance.create(accessor, skin)); }else if(accessor.getType().isEnum()){ // TODO EnumWidget final SelectBox<Object> selector = new SelectBox<Object>(skin); Array<Object> values = new Array<Object>(); for(Object o : accessor.getType().getEnumConstants()) values.add(o); selector.setItems(values); selector.setSelected(accessor.get()); selector.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { accessor.set(selector.getSelected()); } }); table.add(selector); }else{ // allow non editable type scan since current accessor has Editable annotation. boolean editableTypeOnly = false; table.add(new EntityEditor(accessor.get(), editableTypeOnly, skin, stack, config)).row(); } return true; } }
/* * Copyright 2020 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.build.android.desugar.langmodel; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Collection; import java.util.function.Predicate; import org.objectweb.asm.Type; /** * Represents the identifiable name of a Java class or interface with convenient conversions among * different names. */ @AutoValue public abstract class ClassName implements TypeMappable<ClassName>, Comparable<ClassName> { public static final String IN_PROCESS_LABEL = "__desugar__/"; private static final String IMMUTABLE_LABEL_LABEL = "__final__/"; public static final String TYPE_ADAPTER_PACKAGE_ROOT = "com/google/devtools/build/android/desugar/typeadapter/"; public static final TypeMapper IN_PROCESS_LABEL_STRIPPER = new TypeMapper(className -> className.stripPackagePrefix(IN_PROCESS_LABEL)); public static final TypeMapper IMMUTABLE_LABEL_STRIPPER = new TypeMapper(className -> className.stripPackagePrefix(IMMUTABLE_LABEL_LABEL)); private static final String TYPE_ADAPTER_SUFFIX = "Adapter"; public static final String TYPE_CONVERTER_SUFFIX = "Converter"; /** * The primitive type as specified at * https://docs.oracle.com/javase/specs/jvms/se11/html/jvms-2.html#jvms-2.3 */ private static final ImmutableMap<String, Type> PRIMITIVES_TYPES = ImmutableMap.<String, Type>builder() .put("V", Type.VOID_TYPE) .put("Z", Type.BOOLEAN_TYPE) .put("C", Type.CHAR_TYPE) .put("B", Type.BYTE_TYPE) .put("S", Type.SHORT_TYPE) .put("I", Type.INT_TYPE) .put("F", Type.FLOAT_TYPE) .put("J", Type.LONG_TYPE) .put("D", Type.DOUBLE_TYPE) .buildOrThrow(); /** * The primitive type as specified at * https://docs.oracle.com/javase/specs/jvms/se11/html/jvms-2.html#jvms-2.3 */ private static final ImmutableMap<ClassName, ClassName> PRIMITIVES_TO_BOXED_TYPES = ImmutableMap.<ClassName, ClassName>builder() .put(ClassName.create(Type.VOID_TYPE), ClassName.create("java/lang/Void")) .put(ClassName.create(Type.BOOLEAN_TYPE), ClassName.create("java/lang/Boolean")) .put(ClassName.create(Type.CHAR_TYPE), ClassName.create("java/lang/Character")) .put(ClassName.create(Type.BYTE_TYPE), ClassName.create("java/lang/Byte")) .put(ClassName.create(Type.SHORT_TYPE), ClassName.create("java/lang/Short")) .put(ClassName.create(Type.INT_TYPE), ClassName.create("java/lang/Integer")) .put(ClassName.create(Type.FLOAT_TYPE), ClassName.create("java/lang/Float")) .put(ClassName.create(Type.LONG_TYPE), ClassName.create("java/lang/Long")) .put(ClassName.create(Type.DOUBLE_TYPE), ClassName.create("java/lang/Double")) .buildOrThrow(); private static final ImmutableBiMap<String, String> SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS = ImmutableBiMap.<String, String>builder() .put("java/time/", "j$/time/") .put("java/lang/Desugar", "j$/lang/Desugar") .put("java/io/Desugar", "j$/io/Desugar") .put("java/io/UncheckedIOException", "j$/io/UncheckedIOException") .put("java/util/stream/", "j$/util/stream/") .put("java/util/function/", "j$/util/function/") .put("java/util/Desugar", "j$/util/Desugar") .put("java/util/DoubleSummaryStatistics", "j$/util/DoubleSummaryStatistics") .put("java/util/IntSummaryStatistics", "j$/util/IntSummaryStatistics") .put("java/util/LongSummaryStatistics", "j$/util/LongSummaryStatistics") .put("java/util/Objects", "j$/util/Objects") .put("java/util/Optional", "j$/util/Optional") .put("java/util/PrimitiveIterator", "j$/util/PrimitiveIterator") .put("java/util/Spliterator", "j$/util/Spliterator") .put("java/util/StringJoiner", "j$/util/StringJoiner") .put("java/util/concurrent/ConcurrentHashMap", "j$/util/concurrent/ConcurrentHashMap") .put("java/util/concurrent/ThreadLocalRandom", "j$/util/concurrent/ThreadLocalRandom") .put( "java/util/concurrent/atomic/DesugarAtomic", "j$/util/concurrent/atomic/DesugarAtomic") .put("javadesugar/testing/", "jd$/testing/") .put("sun/misc/Desugar", "j$/sun/misc/Desugar") .put("jdk/internal/util/", "j$/jdk/internal/util/") .buildOrThrow(); public static final TypeMapper SHADOWED_TO_MIRRORED_TYPE_MAPPER = new TypeMapper(ClassName::shadowedToMirrored); public static final TypeMapper IMMUTABLE_LABEL_ATTACHER = new TypeMapper(ClassName::withCoreTypeImmutableLabel); public static ClassName create(String binaryName) { checkArgument( !binaryName.contains("."), "Expected a binary/internal class name ('/'-delimited) instead of a qualified name." + " Actual: (%s)", binaryName); return new AutoValue_ClassName(binaryName); } public static ClassName create(Class<?> clazz) { return create(Type.getType(clazz)); } public static ClassName create(Type asmType) { return create(asmType.getInternalName()); } public static ClassName fromClassFileName(String fileName) { checkArgument( fileName.endsWith(".class"), "Expected a class file (*.class). Actual: (%s).", fileName); return ClassName.create(fileName.substring(0, fileName.length() - ".class".length())); } private static void checkPackagePrefixFormat(String prefix) { checkArgument( prefix.isEmpty() || prefix.endsWith("/"), "Expected (%s) to be a package prefix of ending with '/'.", prefix); checkArgument( !prefix.contains("."), "Expected a '/'-delimited binary name instead of a '.'-delimited qualified name for %s", prefix); } /** * The textual binary name used to index the class name, as defined at, * https://docs.oracle.com/javase/specs/jvms/se11/html/jvms-4.html#jvms-4.2.1 */ public abstract String binaryName(); public final Type toAsmObjectType() { return isPrimitive() ? PRIMITIVES_TYPES.get(binaryName()) : Type.getObjectType(binaryName()); } public final ClassName toBoxedType() { checkState(isPrimitive(), "Expected a primitive type for type boxing, but got %s", this); return PRIMITIVES_TO_BOXED_TYPES.get(this); } public final boolean isPrimitive() { return PRIMITIVES_TYPES.containsKey(binaryName()); } public final boolean isWideType() { return "D".equals(binaryName()) || "J".equals(binaryName()); } public final boolean isBoxedType() { return PRIMITIVES_TO_BOXED_TYPES.containsValue(this); } public final String qualifiedName() { return binaryName().replace('/', '.'); } public ClassName innerClass(String innerClassSimpleName) { return ClassName.create(binaryName() + '$' + innerClassSimpleName); } public final String getPackageName() { String binaryName = binaryName(); int i = binaryName.lastIndexOf('/'); return i < 0 ? "" : binaryName.substring(0, i + 1); } public final String simpleName() { String binaryName = binaryName(); int i = binaryName.lastIndexOf('/'); return i < 0 ? binaryName : binaryName.substring(i + 1); } public final ClassName withSimpleNameSuffix(String suffix) { return ClassName.create(binaryName() + suffix); } public final String classFilePathName() { return binaryName() + ".class"; } public final boolean hasInProcessLabel() { return hasPackagePrefix(IN_PROCESS_LABEL); } public final boolean hasImmutableLabel() { return hasPackagePrefix(IMMUTABLE_LABEL_LABEL); } /** * Returns a new instance of {@link ClassName} that represents the owner class of a single adapter * method for an Android SDK API. * * <p>The implementation has to guarantee generating different class names for different target * methods to be adapted, including overloaded API methods, in order to avoid adapter class name * clashing from separate compilation units. */ final ClassName typeAdapterOwner(int invocationSiteTag) { checkState( !hasInProcessLabel() && !hasImmutableLabel(), "Expected a label-free type: Actual(%s)", this); checkState( isAndroidDomainType(), "Expected an Android SDK type to have an adapter: Actual (%s)", this); String binaryName = String.format( "%s%s$%x$%s", TYPE_ADAPTER_PACKAGE_ROOT, binaryName(), invocationSiteTag, TYPE_ADAPTER_SUFFIX); return ClassName.create(binaryName); } /** * Returns a new instance of {@code ClassName} that represents the owner class with conversion * methods between JDK built-in types and desguar-mirrored types. */ public final ClassName typeConverterOwner() { checkState( !hasInProcessLabel() && !hasImmutableLabel(), "Expected a label-free type: Actual(%s)", this); checkState( isDesugarShadowedType(), "Expected an JDK built-in type to have an converter: Actual (%s)", this); return withSimpleNameSuffix(TYPE_CONVERTER_SUFFIX).withPackagePrefix(TYPE_ADAPTER_PACKAGE_ROOT); } /** * Returns a new instance of {@code ClassName} attached with an immutable label which marks the * type is not subject to further desugar operations until the final label striping. */ public final ClassName withCoreTypeImmutableLabel() { return isDesugarShadowedType() ? withPackagePrefix(IMMUTABLE_LABEL_LABEL) : this; } /** * Returns a new instance of {@code ClassName} that is the desugar-mirrored core type (e.g. {@code * j$/time/MonthDay}) of the current shadowed built-in core type, assuming {@code this} instance * is a desugared-shadowed built-in core type. */ public final ClassName shadowedToMirrored() { return SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS.keySet().stream() .filter(this::hasPackagePrefix) .map( prefix -> replacePackagePrefix(prefix, SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS.get(prefix))) .findAny() .orElse(this); } /** * Returns a new instance of {@code ClassName} that is a shadowed built-in core type (e.g. {@code * java/time/MonthDay}) of the current desugar-mirrored core type, assuming {@code this} instance * is a desugar-mirrored core type. */ public final ClassName mirroredToShadowed() { ImmutableBiMap<String, String> verbatimTypeMappings = SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS.inverse(); return verbatimTypeMappings.keySet().stream() .filter(this::hasPackagePrefix) .map(prefix -> replacePackagePrefix(prefix, verbatimTypeMappings.get(prefix))) .findAny() .orElse(this); } public final ClassName withPackagePrefix(String prefix) { checkPackagePrefixFormat(prefix); return ClassName.create(prefix + binaryName()); } public final boolean hasPackagePrefix(String prefix) { return binaryName().startsWith(prefix); } public final boolean hasAnyPackagePrefix(String... prefixes) { return Arrays.stream(prefixes).anyMatch(this::hasPackagePrefix); } public final boolean hasAnyPackagePrefix(Collection<String> prefixes) { return prefixes.stream().anyMatch(this::hasPackagePrefix); } public final boolean isDesugarEligible() { return !isInDesugarRuntimeLibrary(); } public final boolean isAndroidDomainType() { return hasAnyPackagePrefix("android/", "androidx/"); } public final boolean isInDesugarRuntimeLibrary() { return hasAnyPackagePrefix( "com/google/devtools/build/android/desugar/runtime/", TYPE_ADAPTER_PACKAGE_ROOT); } public final boolean isDesugarShadowedType() { return hasAnyPackagePrefix(SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS.keySet()); } public final boolean isDesugarMirroredType() { return hasAnyPackagePrefix(SHADOWED_TO_MIRRORED_TYPE_PREFIX_MAPPINGS.values()); } private ClassName stripPackagePrefix(String prefix) { return hasPackagePrefix(prefix) ? stripRequiredPackagePrefix(prefix) : this; } private ClassName stripRequiredPackagePrefix(String prefix) { return replacePackagePrefix(/* originalPrefix= */ prefix, /* targetPrefix= */ ""); } private ClassName replacePackagePrefix(String originalPrefix, String targetPrefix) { checkState( hasPackagePrefix(originalPrefix), "Expected %s to have a package prefix of (%s) before stripping.", this, originalPrefix); // checkPackagePrefixFormat(targetPrefix); return ClassName.create(targetPrefix + binaryName().substring(originalPrefix.length())); } public boolean acceptTypeFilter(Predicate<ClassName> typeFilter) { return typeFilter.test(this); } @Override public ClassName acceptTypeMapper(TypeMapper typeMapper) { return typeMapper.map(this); } @Override public int compareTo(ClassName other) { return binaryName().compareTo(other.binaryName()); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.alerts.actions.file; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.hawkular.alerts.actions.api.ActionMessage; import org.hawkular.alerts.actions.api.ActionPluginSender; import org.hawkular.alerts.actions.api.ActionResponseMessage; import org.hawkular.alerts.api.model.action.Action; import org.hawkular.alerts.api.model.condition.AvailabilityCondition; import org.hawkular.alerts.api.model.condition.AvailabilityConditionEval; import org.hawkular.alerts.api.model.condition.Condition; import org.hawkular.alerts.api.model.condition.ConditionEval; import org.hawkular.alerts.api.model.condition.ThresholdCondition; import org.hawkular.alerts.api.model.condition.ThresholdConditionEval; import org.hawkular.alerts.api.model.dampening.Dampening; import org.hawkular.alerts.api.model.data.AvailabilityType; import org.hawkular.alerts.api.model.data.Data; import org.hawkular.alerts.api.model.event.Alert; import org.hawkular.alerts.api.model.trigger.Mode; import org.hawkular.alerts.api.model.trigger.Trigger; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * @author Lucas Ponce */ public class FilePluginTest { private FilePlugin filePlugin; private static ActionMessage openThresholdMsg; private static ActionMessage ackThresholdMsg; private static ActionMessage resolvedThresholdMsg; private static ActionMessage openAvailMsg; private static ActionMessage ackAvailMsg; private static ActionMessage resolvedAvailMsg; private static ActionMessage openTwoCondMsg; private static ActionMessage ackTwoCondMsg; private static ActionMessage resolvedTwoCondMsg; public static class TestActionMessage implements ActionMessage { Action action; public TestActionMessage(Action action) { this.action = action; } @Override public Action getAction() { return action; } } @Before public void preparePlugin() { filePlugin = new FilePlugin(); filePlugin.sender = new TestActionSender(); } @BeforeClass public static void prepareMessages() { final String tenantId = "test-tenant"; final String rtTriggerId = "rt-trigger-jboss"; final String rtDataId = "rt-jboss-data"; final String avTriggerId = "av-trigger-jboss"; final String avDataId = "av-jboss-data"; final String mixTriggerId = "mix-trigger-jboss"; /* Alert definition for threshold */ Trigger rtTrigger = new Trigger(tenantId, rtTriggerId, "http://www.jboss.org"); ThresholdCondition rtFiringCondition = new ThresholdCondition(tenantId, rtTriggerId, Mode.FIRING, rtDataId, ThresholdCondition.Operator.GT, 1000d); ThresholdCondition rtResolveCondition = new ThresholdCondition(tenantId, rtTriggerId, Mode.AUTORESOLVE, rtDataId, ThresholdCondition.Operator.LTE, 1000d); Dampening rtFiringDampening = Dampening.forStrictTime(tenantId, rtTriggerId, Mode.FIRING, 10000); /* Demo bad data for threshold */ Data rtBadData = Data.forNumeric(tenantId, rtDataId, System.currentTimeMillis(), 1001d); /* Manual alert creation for threshold */ Alert rtAlertOpen = new Alert(rtTrigger.getTenantId(), rtTrigger, getEvalList(rtFiringCondition, rtBadData)); rtAlertOpen.setDampening(rtFiringDampening); rtAlertOpen.setStatus(Alert.Status.OPEN); /* Manual Action creation for threshold */ Map<String, String> props = new HashMap<>(); props.put("path", "target/file-tests"); Action openThresholdAction = new Action(tenantId, "email", "email-to-test", rtAlertOpen); openThresholdAction.setProperties(props); openThresholdMsg = new TestActionMessage(openThresholdAction); Alert rtAlertAck = new Alert(rtTrigger.getTenantId(), rtTrigger, getEvalList(rtFiringCondition, rtBadData)); rtAlertAck.setDampening(rtFiringDampening); rtAlertAck.setStatus(Alert.Status.ACKNOWLEDGED); rtAlertAck.setAckBy("Test ACK user"); rtAlertAck.setAckTime(System.currentTimeMillis() + 10000); rtAlertAck.addNote("Test ACK user", "Test ACK notes"); Action ackThresholdAction = new Action(tenantId, "email", "email-to-test", rtAlertAck); ackThresholdAction.setProperties(props); ackThresholdMsg = new TestActionMessage(ackThresholdAction); /* Demo good data to resolve a threshold alert */ Data rtGoodData = Data.forNumeric(tenantId, rtDataId, System.currentTimeMillis() + 20000, 998d); Alert rtAlertResolved = new Alert(rtTrigger.getTenantId(), rtTrigger, getEvalList(rtFiringCondition, rtBadData)); rtAlertResolved.setDampening(rtFiringDampening); rtAlertResolved.setStatus(Alert.Status.RESOLVED); rtAlertResolved.setResolvedBy("Test RESOLVED user"); rtAlertResolved.setResolvedTime(System.currentTimeMillis() + 20000); rtAlertResolved.addNote("Test RESOLVED user", "Test RESOLVED notes"); rtAlertResolved.setResolvedEvalSets(getEvalList(rtResolveCondition, rtGoodData)); Action resolvedThresholdAction = new Action(tenantId, "email", "email-to-test", rtAlertResolved); resolvedThresholdAction.setProperties(props); resolvedThresholdMsg = new TestActionMessage(resolvedThresholdAction); /* Alert definition for availability */ Trigger avTrigger = new Trigger(tenantId, avTriggerId, "http://www.jboss.org"); AvailabilityCondition avFiringCondition = new AvailabilityCondition(tenantId, avTriggerId, Mode.FIRING, avDataId, AvailabilityCondition.Operator.NOT_UP); AvailabilityCondition avResolveCondition = new AvailabilityCondition(tenantId, avTriggerId, Mode.AUTORESOLVE, avDataId, AvailabilityCondition.Operator.UP); Dampening avFiringDampening = Dampening.forStrictTime(tenantId, avTriggerId, Mode.FIRING, 10000); /* Demo bad data for availability */ Data avBadData = Data .forAvailability(tenantId, avDataId, System.currentTimeMillis(), AvailabilityType.DOWN); /* Manual alert creation for availability */ Alert avAlertOpen = new Alert(avTrigger.getTenantId(), avTrigger, getEvalList(avFiringCondition, avBadData)); avAlertOpen.setDampening(avFiringDampening); avAlertOpen.setStatus(Alert.Status.OPEN); /* Manual Action creation for availability */ props = new HashMap<>(); props.put("path", "target/file-tests"); Action openAvailabilityAction = new Action(tenantId, "email", "email-to-test", avAlertOpen); openAvailabilityAction.setProperties(props); openAvailMsg = new TestActionMessage(openAvailabilityAction); Alert avAlertAck = new Alert(avTrigger.getTenantId(), avTrigger, getEvalList(avFiringCondition, avBadData)); avAlertAck.setDampening(avFiringDampening); avAlertAck.setStatus(Alert.Status.ACKNOWLEDGED); avAlertAck.setAckBy("Test ACK user"); avAlertAck.setAckTime(System.currentTimeMillis() + 10000); avAlertAck.addNote("Test ACK user", "Test ACK notes"); Action ackAvailabilityAction = new Action(tenantId, "email", "email-to-test", avAlertAck); ackAvailabilityAction.setProperties(props); ackAvailMsg = new TestActionMessage(ackAvailabilityAction); /* Demo good data to resolve a availability alert */ Data avGoodData = Data.forAvailability(tenantId, avDataId, System.currentTimeMillis() + 20000, AvailabilityType.UP); Alert avAlertResolved = new Alert(avTrigger.getTenantId(), avTrigger, getEvalList(avFiringCondition, avBadData)); avAlertResolved.setDampening(avFiringDampening); avAlertResolved.setStatus(Alert.Status.RESOLVED); avAlertResolved.setResolvedBy("Test RESOLVED user"); avAlertResolved.setResolvedTime(System.currentTimeMillis() + 20000); avAlertResolved.addNote("Test RESOLVED user", "Test RESOLVED notes"); avAlertResolved.setResolvedEvalSets(getEvalList(avResolveCondition, avGoodData)); Action resolvedAvailabilityAction = new Action(tenantId, "email", "email-to-test", avAlertResolved); resolvedAvailabilityAction.setProperties(props); resolvedAvailMsg = new TestActionMessage(resolvedAvailabilityAction); /* Alert definition for two conditions */ Trigger mixTrigger = new Trigger(tenantId, mixTriggerId, "http://www.jboss.org"); ThresholdCondition mixRtFiringCondition = new ThresholdCondition(tenantId, mixTriggerId, Mode.FIRING, rtDataId, ThresholdCondition.Operator.GT, 1000d); ThresholdCondition mixRtResolveCondition = new ThresholdCondition(tenantId, mixTriggerId, Mode.AUTORESOLVE, rtDataId, ThresholdCondition.Operator.LTE, 1000d); AvailabilityCondition mixAvFiringCondition = new AvailabilityCondition(tenantId, mixTriggerId, Mode.FIRING, avDataId, AvailabilityCondition.Operator.NOT_UP); AvailabilityCondition mixAvResolveCondition = new AvailabilityCondition(tenantId, mixTriggerId, Mode.AUTORESOLVE, avDataId, AvailabilityCondition.Operator.UP); Dampening mixFiringDampening = Dampening.forStrictTime(tenantId, mixTriggerId, Mode.FIRING, 10000); /* Demo bad data for two conditions */ rtBadData = Data.forNumeric(tenantId, rtDataId, System.currentTimeMillis(), 1003d); avBadData = Data.forAvailability(tenantId, avDataId, System.currentTimeMillis(), AvailabilityType.DOWN); /* Manual alert creation for two conditions */ List<Condition> mixConditions = new ArrayList<>(); mixConditions.add(mixRtFiringCondition); mixConditions.add(mixAvFiringCondition); List<Data> mixBadData = new ArrayList<>(); mixBadData.add(rtBadData); mixBadData.add(avBadData); Alert mixAlertOpen = new Alert(mixTrigger.getTenantId(), mixTrigger, getEvalList(mixConditions, mixBadData)); mixAlertOpen.setDampening(mixFiringDampening); mixAlertOpen.setStatus(Alert.Status.OPEN); /* Manual Action creation for two conditions */ props = new HashMap<>(); props.put("path", "target/file-tests"); Action openTwoCondAction = new Action(tenantId, "email", "email-to-test", mixAlertOpen); openTwoCondAction.setProperties(props); openTwoCondMsg = new TestActionMessage(openTwoCondAction); Alert mixAlertAck = new Alert(mixTrigger.getTenantId(), mixTrigger, getEvalList(mixConditions, mixBadData)); mixAlertAck.setDampening(mixFiringDampening); mixAlertAck.setStatus(Alert.Status.ACKNOWLEDGED); mixAlertAck.setAckBy("Test ACK user"); mixAlertAck.setAckTime(System.currentTimeMillis() + 10000); mixAlertAck.addNote("Test ACK user", "Test ACK notes"); Action ackTwoCondAction = new Action(tenantId, "email", "email-to-test", mixAlertAck); ackTwoCondAction.setProperties(props); ackTwoCondMsg = new TestActionMessage(ackTwoCondAction); /* Demo good data for two conditions */ rtGoodData = Data.forNumeric(tenantId, rtDataId, System.currentTimeMillis() + 20000, 997d); avGoodData = Data.forAvailability(tenantId, avDataId, System.currentTimeMillis() + 20000, AvailabilityType.UP); List<Condition> mixResolveConditions = new ArrayList<>(); mixResolveConditions.add(mixRtResolveCondition); mixResolveConditions.add(mixAvResolveCondition); List<Data> mixGoodData = new ArrayList<>(); mixGoodData.add(rtGoodData); mixGoodData.add(avGoodData); Alert mixAlertResolved = new Alert(mixTrigger.getTenantId(), mixTrigger, getEvalList(mixConditions, mixBadData)); mixAlertResolved.setDampening(mixFiringDampening); mixAlertResolved.setStatus(Alert.Status.ACKNOWLEDGED); mixAlertResolved.setStatus(Alert.Status.RESOLVED); mixAlertResolved.setResolvedBy("Test RESOLVED user"); mixAlertResolved.setResolvedTime(System.currentTimeMillis() + 20000); mixAlertResolved.addNote("Test RESOLVED user", "Test RESOLVED notes"); mixAlertResolved.setResolvedEvalSets(getEvalList(mixResolveConditions, mixGoodData)); Action resolvedTwoCondAction = new Action(tenantId, "email", "email-to-test", mixAlertResolved); resolvedTwoCondAction.setProperties(props); resolvedTwoCondMsg = new TestActionMessage(resolvedTwoCondAction); } private static List<Set<ConditionEval>> getEvalList(Condition condition, Data data) { ConditionEval eval = null; if (condition instanceof ThresholdCondition) { eval = new ThresholdConditionEval((ThresholdCondition) condition, data); } if (condition instanceof AvailabilityCondition) { eval = new AvailabilityConditionEval((AvailabilityCondition) condition, data); } Set<ConditionEval> tEvalsSet = new HashSet<>(); tEvalsSet.add(eval); List<Set<ConditionEval>> tEvalsList = new ArrayList<>(); tEvalsList.add(tEvalsSet); return tEvalsList; } private static List<Set<ConditionEval>> getEvalList(List<Condition> condition, List<Data> data) { ConditionEval eval = null; Set<ConditionEval> tEvalsSet = new HashSet<>(); for (int i = 0; i < condition.size(); i++) { if (condition.get(i) instanceof ThresholdCondition) { eval = new ThresholdConditionEval((ThresholdCondition) condition.get(i), data.get(i)); } if (condition.get(i) instanceof AvailabilityCondition) { eval = new AvailabilityConditionEval((AvailabilityCondition) condition.get(i), data.get(i)); } tEvalsSet.add(eval); } List<Set<ConditionEval>> tEvalsList = new ArrayList<>(); tEvalsList.add(tEvalsSet); return tEvalsList; } @Test public void thresholdTest() throws Exception { filePlugin.process(openThresholdMsg); filePlugin.process(ackThresholdMsg); filePlugin.process(resolvedThresholdMsg); } @Test public void availabilityTest() throws Exception { filePlugin.process(openAvailMsg); filePlugin.process(ackAvailMsg); filePlugin.process(resolvedAvailMsg); } @Test public void mixedTest() throws Exception { filePlugin.process(openTwoCondMsg); filePlugin.process(ackTwoCondMsg); filePlugin.process(resolvedTwoCondMsg); } public class TestActionResponseMessage implements ActionResponseMessage { ActionResponseMessage.Operation operation; Map<String, String> payload; public TestActionResponseMessage() { this.operation = ActionResponseMessage.Operation.RESULT; this.payload = new HashMap<>(); } public TestActionResponseMessage(ActionResponseMessage.Operation operation) { this.operation = operation; this.payload = new HashMap<>(); } @Override public Operation getOperation() { return operation; } @Override public Map<String, String> getPayload() { return payload; } } public class TestActionSender implements ActionPluginSender { @Override public ActionResponseMessage createMessage(ActionResponseMessage.Operation operation) { return new TestActionResponseMessage(operation); } @Override public void send(ActionResponseMessage msg) throws Exception { // Nothing to do } } }
// Copyright (c) 2014 Darach Ennis < darach at gmail dot com >. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. package ebnfdoc; import java.nio.ByteBuffer; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.InputStream; import java.io.IOException; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.List; import java.util.LinkedList; import java.util.Properties; import org.stringtemplate.v4.ST; import org.stringtemplate.v4.STGroup; import org.stringtemplate.v4.STGroupFile; public class EbnfStVisitor implements Ebnf.Visitor { private final Properties options; private final File destDir; private final String tplFile; private final STGroup stGroup; private FileWriter fos = null; private int prodId = 0; public EbnfStVisitor(final Properties options) { this.options = options; this.destDir = new File(options == null ? "doc" : options.getProperty("destDir", "doc")); this.tplFile = options == null ? "ebnfdoc/md.stg" : options.getProperty("tplFile", "ebnfdoc/md.stg"); stGroup = new STGroupFile(tplFile); if (!destDir.exists()) { if (!destDir.mkdir()) { throw new EbnfDocException("Cannot create destination doc directory " + destDir); } } }; public void visitHeader(final File srcFile, final EbnfParser.EbnfContext node) throws IOException { final String doc = doc(node.doc()); fos = new FileWriter(destDir + "/" + srcFile.getName() + ".md"); ST prologTpl = stGroup.getInstanceOf("prolog"); fos.append(prologTpl.render()); ST docTpl = stGroup.getInstanceOf("doc"); docTpl.add("doc",doc); fos.append(docTpl.render()); ST productionsTpl = stGroup.getInstanceOf("productions"); fos.append(productionsTpl.render()); for (EbnfParser.StatementContext child : node.statement()) { visitStatement(child); } ST epilogTpl = stGroup.getInstanceOf("epilog"); fos.append(prologTpl.render()); try { fos.close(); } catch(IOException ignore) { } }; public void visitStatement(final EbnfParser.StatementContext node) throws IOException { final String doc = doc(node.doc()); final String lhsId = node.lhs().Id().getText(); ++prodId; // 1-based ST statementPrologTpl = stGroup.getInstanceOf("statementProlog"); statementPrologTpl.add("num",""+prodId); statementPrologTpl.add("name",""+lhsId); fos.append(statementPrologTpl.render()); visitRhs(node.rhs()); ST semicolonTpl = stGroup.getInstanceOf("semicolon"); fos.append(semicolonTpl.render()); ST docTpl = stGroup.getInstanceOf("doc"); docTpl.add("doc",doc); fos.append(docTpl.render()); ST statementEpilogTpl = stGroup.getInstanceOf("statementEpilog"); fos.append(statementEpilogTpl.render()); } public void visitRhs(final EbnfParser.RhsContext node) throws IOException { if (node.doc() != null) { final String doc = doc(node.doc()); ST docTpl = stGroup.getInstanceOf("doc"); docTpl.add("doc",doc); fos.append(docTpl.render()); } visitClause(node.clause(0)); for(int i = 1; i < node.clause().size(); i++) { EbnfParser.OpContext op = node.op(i-1); if (op.PipePipe() != null) { ST alternativeTpl = stGroup.getInstanceOf("long_alternative"); fos.append(alternativeTpl.render()); } if (op.Pipe() != null) { ST alternativeTpl = stGroup.getInstanceOf("short_alternative"); fos.append(alternativeTpl.render()); } if (op.Comma() != null) { ST concatenativeTpl = stGroup.getInstanceOf("concatenative"); fos.append(concatenativeTpl.render()); } visitClause(node.clause(i)); } } public void visitClause(final EbnfParser.ClauseContext node) throws IOException { final boolean hasBrace = node.LBrace() != null; final boolean hasSquigly = node.LSquigly() != null; final boolean hasCurly = node.LCurly() != null; final boolean hasQuestion = node.Question(0) != null; final boolean hasLiteral = node.literal() != null; if (hasBrace) { ST boTpl = stGroup.getInstanceOf("braceOpen"); fos.append(boTpl.render()); visitRhs(node.rhs()); ST bcTpl = stGroup.getInstanceOf("braceClose"); fos.append(bcTpl.render()); } if (hasSquigly) { ST soTpl = stGroup.getInstanceOf("squiglyOpen"); fos.append(soTpl.render()); visitRhs(node.rhs()); ST scTpl = stGroup.getInstanceOf("squiglyClose"); fos.append(scTpl.render()); } if (hasCurly) { ST coTpl = stGroup.getInstanceOf("curlyOpen"); fos.append(coTpl.render()); visitRhs(node.rhs()); ST ccTpl = stGroup.getInstanceOf("curlyClose"); fos.append(ccTpl.render()); } if (hasQuestion) { ST qoTpl = stGroup.getInstanceOf("questionOpen"); fos.append(qoTpl.render()); ST identifierTpl = stGroup.getInstanceOf("identifier"); identifierTpl.add("text",node.Id().getText()); fos.append(identifierTpl.render()); ST qcTpl = stGroup.getInstanceOf("questionClose"); fos.append(qcTpl.render()); } if (hasLiteral) { visitLiteral(node.literal()); } } public void visitLiteral(EbnfParser.LiteralContext node) throws IOException { if (node.Id() != null) { ST identifierTpl = stGroup.getInstanceOf("identifier"); identifierTpl.add("text",node.Id().getText()); fos.append(identifierTpl.render()); } if (node.Terminal() != null) { ST terminalTpl = stGroup.getInstanceOf("terminal"); terminalTpl.add("text",node.Terminal().getText()); fos.append(node.Terminal().getText()); } } private String doc(final EbnfParser.DocContext doc) throws IOException { final String docc = doc == null ? null : doc.DOC_COMMENT().getText(); final String docx = docc == null ? null : docc.substring(3); return docx == null ? "" : EbnfDocComment.process(stGroup, docx) + "\n\n"; } private static void copyStreamToFile(InputStream source, File dest) throws IOException { ReadableByteChannel ic = null; FileChannel oc = null; final ByteBuffer bf = ByteBuffer.allocate(4096); try { ic = Channels.newChannel(source); oc = new FileOutputStream(dest).getChannel(); for(;;) { if(ic.read(bf) == -1) break; bf.flip(); oc.write(bf); bf.clear(); } } finally { ic.close(); oc.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.net.URI; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.component.kafka.serde.KafkaHeaderSerializer; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.support.DefaultAsyncProducer; import org.apache.camel.util.KeyValueHolder; import org.apache.camel.util.URISupport; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.internals.RecordHeader; import org.apache.kafka.common.utils.Bytes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KafkaProducer extends DefaultAsyncProducer { private static final Logger LOG = LoggerFactory.getLogger(KafkaProducer.class); @SuppressWarnings("rawtypes") private org.apache.kafka.clients.producer.KafkaProducer kafkaProducer; private final KafkaEndpoint endpoint; private ExecutorService workerPool; private boolean shutdownWorkerPool; private volatile boolean closeKafkaProducer; public KafkaProducer(KafkaEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; } Properties getProps() { Properties props = endpoint.getConfiguration().createProducerProperties(); endpoint.updateClassProperties(props); String brokers = endpoint.getComponent().getKafkaClientFactory().getBrokers(endpoint.getConfiguration()); if (brokers != null) { props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); } return props; } @SuppressWarnings("rawtypes") public org.apache.kafka.clients.producer.KafkaProducer getKafkaProducer() { return kafkaProducer; } /** * To use a custom {@link org.apache.kafka.clients.producer.KafkaProducer} instance. */ @SuppressWarnings("rawtypes") public void setKafkaProducer(org.apache.kafka.clients.producer.KafkaProducer kafkaProducer) { this.kafkaProducer = kafkaProducer; } public ExecutorService getWorkerPool() { return workerPool; } public void setWorkerPool(ExecutorService workerPool) { this.workerPool = workerPool; } @Override @SuppressWarnings("rawtypes") protected void doStart() throws Exception { Properties props = getProps(); if (kafkaProducer == null) { ClassLoader threadClassLoader = Thread.currentThread().getContextClassLoader(); try { // Kafka uses reflection for loading authentication settings, // use its classloader Thread.currentThread() .setContextClassLoader(org.apache.kafka.clients.producer.KafkaProducer.class.getClassLoader()); LOG.trace("Creating KafkaProducer"); kafkaProducer = endpoint.getComponent().getKafkaClientFactory().getProducer(props); closeKafkaProducer = true; } finally { Thread.currentThread().setContextClassLoader(threadClassLoader); } LOG.debug("Created KafkaProducer: {}", kafkaProducer); } // if we are in asynchronous mode we need a worker pool if (!endpoint.getConfiguration().isSynchronous() && workerPool == null) { workerPool = endpoint.createProducerExecutor(); // we create a thread pool so we should also shut it down shutdownWorkerPool = true; } } @Override protected void doStop() throws Exception { if (kafkaProducer != null && closeKafkaProducer) { LOG.debug("Closing KafkaProducer: {}", kafkaProducer); kafkaProducer.close(); kafkaProducer = null; } if (shutdownWorkerPool && workerPool != null) { int timeout = endpoint.getConfiguration().getShutdownTimeout(); LOG.debug("Shutting down Kafka producer worker threads with timeout {} millis", timeout); endpoint.getCamelContext().getExecutorServiceManager().shutdownGraceful(workerPool, timeout); workerPool = null; } } @SuppressWarnings({ "unchecked", "rawtypes" }) protected Iterator<KeyValueHolder<Object, ProducerRecord>> createRecorder(Exchange exchange) throws Exception { String topic = endpoint.getConfiguration().getTopic(); // must remove header so its not propagated Object overrideTopic = exchange.getIn().removeHeader(KafkaConstants.OVERRIDE_TOPIC); if (overrideTopic != null) { LOG.debug("Using override topic: {}", overrideTopic); topic = overrideTopic.toString(); } if (topic == null) { // if topic property was not received from configuration or header // parameters take it from the remaining URI topic = URISupport.extractRemainderPath(new URI(endpoint.getEndpointUri()), true); } // extracting headers which need to be propagated List<Header> propagatedHeaders = getPropagatedHeaders(exchange, endpoint.getConfiguration()); Object msg = exchange.getIn().getBody(); // is the message body a list or something that contains multiple values Iterator<Object> iterator = null; if (msg instanceof Iterable) { iterator = ((Iterable<Object>) msg).iterator(); } else if (msg instanceof Iterator) { iterator = (Iterator<Object>) msg; } if (iterator != null) { final Iterator<Object> msgList = iterator; final String msgTopic = topic; return new Iterator<KeyValueHolder<Object, ProducerRecord>>() { @Override public boolean hasNext() { return msgList.hasNext(); } @Override public KeyValueHolder<Object, ProducerRecord> next() { // must convert each entry of the iterator into the value // according to the serializer Object next = msgList.next(); String innerTopic = msgTopic; Object innerKey = null; Integer innerPartitionKey = null; boolean hasPartitionKey = false; boolean hasMessageKey = false; Object value = next; Exchange ex = null; Object body = next; if (next instanceof Exchange || next instanceof Message) { Exchange innerExchange = null; Message innerMmessage = null; if (next instanceof Exchange) { innerExchange = (Exchange) next; innerMmessage = innerExchange.getIn(); } else { innerMmessage = (Message) next; } if (innerMmessage.getHeader(KafkaConstants.OVERRIDE_TOPIC) != null) { innerTopic = (String) innerMmessage.removeHeader(KafkaConstants.OVERRIDE_TOPIC); } if (innerMmessage.getHeader(KafkaConstants.PARTITION_KEY) != null) { innerPartitionKey = endpoint.getConfiguration().getPartitionKey() != null ? endpoint.getConfiguration().getPartitionKey() : innerMmessage.getHeader(KafkaConstants.PARTITION_KEY, Integer.class); hasPartitionKey = innerPartitionKey != null; } if (innerMmessage.getHeader(KafkaConstants.KEY) != null) { innerKey = endpoint.getConfiguration().getKey() != null ? endpoint.getConfiguration().getKey() : innerMmessage.getHeader(KafkaConstants.KEY); final Object messageKey = innerKey != null ? tryConvertToSerializedType(innerExchange, innerKey, endpoint.getConfiguration().getKeySerializer()) : null; hasMessageKey = messageKey != null; } ex = innerExchange == null ? exchange : innerExchange; value = tryConvertToSerializedType(ex, innerMmessage.getBody(), endpoint.getConfiguration().getValueSerializer()); } if (hasPartitionKey && hasMessageKey) { return new KeyValueHolder( body, new ProducerRecord(innerTopic, innerPartitionKey, null, innerKey, value, propagatedHeaders)); } else if (hasMessageKey) { return new KeyValueHolder( body, new ProducerRecord(innerTopic, null, null, innerKey, value, propagatedHeaders)); } else { return new KeyValueHolder( body, new ProducerRecord(innerTopic, null, null, null, value, propagatedHeaders)); } } @Override public void remove() { msgList.remove(); } }; } // endpoint take precedence over header configuration final Integer partitionKey = endpoint.getConfiguration().getPartitionKey() != null ? endpoint.getConfiguration().getPartitionKey() : exchange.getIn().getHeader(KafkaConstants.PARTITION_KEY, Integer.class); final boolean hasPartitionKey = partitionKey != null; // endpoint take precedence over header configuration Object key = endpoint.getConfiguration().getKey() != null ? endpoint.getConfiguration().getKey() : exchange.getIn().getHeader(KafkaConstants.KEY); final Object messageKey = key != null ? tryConvertToSerializedType(exchange, key, endpoint.getConfiguration().getKeySerializer()) : null; final boolean hasMessageKey = messageKey != null; // must convert each entry of the iterator into the value according to // the serializer Object value = tryConvertToSerializedType(exchange, msg, endpoint.getConfiguration().getValueSerializer()); ProducerRecord record; if (hasPartitionKey && hasMessageKey) { record = new ProducerRecord(topic, partitionKey, null, key, value, propagatedHeaders); } else if (hasMessageKey) { record = new ProducerRecord(topic, null, null, key, value, propagatedHeaders); } else { record = new ProducerRecord(topic, null, null, null, value, propagatedHeaders); } return Collections.singletonList(new KeyValueHolder<Object, ProducerRecord>((Object) exchange, record)).iterator(); } private List<Header> getPropagatedHeaders(Exchange exchange, KafkaConfiguration getConfiguration) { HeaderFilterStrategy headerFilterStrategy = getConfiguration.getHeaderFilterStrategy(); KafkaHeaderSerializer headerSerializer = getConfiguration.getHeaderSerializer(); return exchange.getIn().getHeaders().entrySet().stream() .filter(entry -> shouldBeFiltered(entry, exchange, headerFilterStrategy)) .map(entry -> getRecordHeader(entry, headerSerializer)).filter(Objects::nonNull).collect(Collectors.toList()); } private boolean shouldBeFiltered( Map.Entry<String, Object> entry, Exchange exchange, HeaderFilterStrategy headerFilterStrategy) { return !headerFilterStrategy.applyFilterToCamelHeaders(entry.getKey(), entry.getValue(), exchange); } private RecordHeader getRecordHeader(Map.Entry<String, Object> entry, KafkaHeaderSerializer headerSerializer) { byte[] headerValue = headerSerializer.serialize(entry.getKey(), entry.getValue()); if (headerValue == null) { return null; } return new RecordHeader(entry.getKey(), headerValue); } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) // Camel calls this method if the endpoint isSynchronous(), as the // KafkaEndpoint creates a SynchronousDelegateProducer for it public void process(Exchange exchange) throws Exception { Iterator<KeyValueHolder<Object, ProducerRecord>> c = createRecorder(exchange); List<KeyValueHolder<Object, Future<RecordMetadata>>> futures = new LinkedList<>(); List<RecordMetadata> recordMetadatas = new ArrayList<>(); if (endpoint.getConfiguration().isRecordMetadata()) { if (exchange.hasOut()) { exchange.getOut().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas); } else { exchange.getIn().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas); } } while (c.hasNext()) { KeyValueHolder<Object, ProducerRecord> exrec = c.next(); ProducerRecord rec = exrec.getValue(); if (LOG.isDebugEnabled()) { LOG.debug("Sending message to topic: {}, partition: {}, key: {}", rec.topic(), rec.partition(), rec.key()); } futures.add(new KeyValueHolder(exrec.getKey(), kafkaProducer.send(rec))); } for (KeyValueHolder<Object, Future<RecordMetadata>> f : futures) { // wait for them all to be sent List<RecordMetadata> metadata = Collections.singletonList(f.getValue().get()); recordMetadatas.addAll(metadata); Exchange innerExchange = null; if (f.getKey() instanceof Exchange) { innerExchange = (Exchange) f.getKey(); if (innerExchange != null) { if (endpoint.getConfiguration().isRecordMetadata()) { if (innerExchange.hasOut()) { innerExchange.getOut().setHeader(KafkaConstants.KAFKA_RECORDMETA, metadata); } else { innerExchange.getIn().setHeader(KafkaConstants.KAFKA_RECORDMETA, metadata); } } } } Message innerMessage = null; if (f.getKey() instanceof Message) { innerMessage = (Message) f.getKey(); if (innerMessage != null) { if (endpoint.getConfiguration().isRecordMetadata()) { innerMessage.setHeader(KafkaConstants.KAFKA_RECORDMETA, metadata); } } } } } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public boolean process(Exchange exchange, AsyncCallback callback) { try { Iterator<KeyValueHolder<Object, ProducerRecord>> c = createRecorder(exchange); KafkaProducerCallBack cb = new KafkaProducerCallBack(exchange, callback); while (c.hasNext()) { cb.increment(); KeyValueHolder<Object, ProducerRecord> exrec = c.next(); ProducerRecord rec = exrec.getValue(); if (LOG.isDebugEnabled()) { LOG.debug("Sending message to topic: {}, partition: {}, key: {}", rec.topic(), rec.partition(), rec.key()); } List<Callback> delegates = new ArrayList<>(Arrays.asList(cb)); if (exrec.getKey() != null) { delegates.add(new KafkaProducerCallBack(exrec.getKey())); } kafkaProducer.send(rec, new DelegatingCallback(delegates.toArray(new Callback[0]))); } return cb.allSent(); } catch (Exception ex) { exchange.setException(ex); } callback.done(true); return true; } /** * Attempts to convert the object to the same type as the value serializer specified */ protected Object tryConvertToSerializedType(Exchange exchange, Object object, String valueSerializer) { Object answer = null; if (exchange == null) { return object; } if (KafkaConstants.KAFKA_DEFAULT_SERIALIZER.equals(valueSerializer)) { answer = exchange.getContext().getTypeConverter().tryConvertTo(String.class, exchange, object); } else if ("org.apache.kafka.common.serialization.ByteArraySerializer".equals(valueSerializer)) { answer = exchange.getContext().getTypeConverter().tryConvertTo(byte[].class, exchange, object); } else if ("org.apache.kafka.common.serialization.ByteBufferSerializer".equals(valueSerializer)) { answer = exchange.getContext().getTypeConverter().tryConvertTo(ByteBuffer.class, exchange, object); } else if ("org.apache.kafka.common.serialization.BytesSerializer".equals(valueSerializer)) { // we need to convert to byte array first byte[] array = exchange.getContext().getTypeConverter().tryConvertTo(byte[].class, exchange, object); if (array != null) { answer = new Bytes(array); } } return answer != null ? answer : object; } private final class DelegatingCallback implements Callback { private final List<Callback> callbacks; public DelegatingCallback(Callback... callbacks) { this.callbacks = Arrays.asList(callbacks); } @Override public void onCompletion(RecordMetadata metadata, Exception exception) { callbacks.forEach(c -> c.onCompletion(metadata, exception)); } } private final class KafkaProducerCallBack implements Callback { private final Object body; private final AsyncCallback callback; private final AtomicInteger count = new AtomicInteger(1); private final List<RecordMetadata> recordMetadatas = new ArrayList<>(); KafkaProducerCallBack(Object body, AsyncCallback callback) { this.body = body; this.callback = callback; if (endpoint.getConfiguration().isRecordMetadata()) { if (body instanceof Exchange) { Exchange ex = (Exchange) body; if (ex.hasOut()) { ex.getOut().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas); } else { ex.getIn().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas); } } if (body instanceof Message) { Message msg = (Message) body; msg.setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas); } } } public KafkaProducerCallBack(Exchange exchange) { this(exchange, null); } public KafkaProducerCallBack(Message message) { this(message, null); } public KafkaProducerCallBack(Object body) { this(body, null); } void increment() { count.incrementAndGet(); } boolean allSent() { if (count.decrementAndGet() == 0) { LOG.trace("All messages sent, continue routing."); // was able to get all the work done while queuing the requests if (callback != null) { callback.done(true); } return true; } return false; } @Override public void onCompletion(RecordMetadata recordMetadata, Exception e) { if (e != null) { if (body instanceof Exchange) { ((Exchange) body).setException(e); } if (body instanceof Message && ((Message) body).getExchange() != null) { ((Message) body).getExchange().setException(e); } } recordMetadatas.add(recordMetadata); if (count.decrementAndGet() == 0) { // use worker pool to continue routing the exchange // as this thread is from Kafka Callback and should not be used // by Camel routing workerPool.submit(new Runnable() { @Override public void run() { LOG.trace("All messages sent, continue routing."); if (callback != null) { callback.done(false); } } }); } } } }
/* * VCollapse.java.java * * Created on 01-03-2010 01:34:45 PM * * Copyright 2010 Jonathan Colt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jonathancolt.nicity.view.value; /* * #%L * nicity-view * %% * Copyright (C) 2013 Jonathan Colt * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.jonathancolt.nicity.core.collection.CSet; import com.jonathancolt.nicity.core.lang.ICallback; import com.jonathancolt.nicity.core.lang.UCompare; import com.jonathancolt.nicity.view.border.ButtonBorder; import com.jonathancolt.nicity.view.core.AColor; import com.jonathancolt.nicity.view.core.UV; import com.jonathancolt.nicity.view.core.VChain; import com.jonathancolt.nicity.view.core.VString; import com.jonathancolt.nicity.view.core.ViewColor; import com.jonathancolt.nicity.view.core.Viewer; import com.jonathancolt.nicity.view.interfaces.IEvent; import com.jonathancolt.nicity.view.interfaces.IView; import com.jonathancolt.nicity.view.list.AItem; /** * * @author Administrator */ public class VCollapse extends AItem implements Comparable { /** * * @return */ public VChain handleAndContent() { return null; } /** * */ protected Object name; /** * */ protected ICallback picked; /** * */ protected ICallback contentCallback; /** * */ protected boolean open = false; /** * */ protected boolean vertical = false; /** * */ protected IView content; /** * */ protected int order = -1; /** * */ protected int openSpans = 0; /** * */ protected int closeSpans = 0; /** * */ public CSet openTogether = new CSet(); /** * */ public CSet closeTogether = new CSet(); /** * */ public CSet closeWhenOpening = new CSet(); /** * */ public CSet openWhenClosing = new CSet(); /** * */ public AColor dark = ViewColor.cThemeShadow; /** * */ public AColor color = ViewColor.cTheme; /** * */ public AColor light = ViewColor.cThemeHighlight; /** * * @param _name * @param _contentCallback * @param _open */ public VCollapse(Object _name, final IView view, boolean _open) { this(_name, new ICallback<Object, Object>() { @Override public Object callback(Object i) { return view; } }, _open, false); } /** * * @param _name * @param _contentCallback * @param _open */ public VCollapse(Object _name, ICallback _contentCallback, boolean _open) { this(_name, _contentCallback, _open, false); } /** * * @param _name * @param _contentCallback * @param _open * @param _openSpans * @param _closeSpans */ public VCollapse(Object _name, ICallback _contentCallback, boolean _open, int _openSpans, int _closeSpans) { this(_name, _contentCallback, _open, _openSpans, _closeSpans, ViewColor.cThemeShadow, ViewColor.cTheme, ViewColor.cThemeHighlight); } /** * * @param _name * @param _contentCallback * @param _open * @param _openSpans * @param _closeSpans * @param _dark * @param _color * @param _light */ public VCollapse(Object _name, ICallback _contentCallback, boolean _open, int _openSpans, int _closeSpans, AColor _dark, AColor _color, AColor _light) { name = _name; contentCallback = _contentCallback; open = _open; openSpans = _openSpans; closeSpans = _closeSpans; dark = _dark; color = _color; light = _light; rebuild(null); } /** * * @param _name * @param _contentCallback * @param _open * @param _vertical */ public VCollapse(Object _name, ICallback _contentCallback, boolean _open, boolean _vertical) { name = _name; contentCallback = _contentCallback; open = _open; vertical = _vertical; rebuild(null); } /** * * @return */ public boolean isTrue() { return open == true; } /** * */ public void open() { if (open) { return; } open = true; changed(); } /** * */ public void close() { if (!open) { return; } open = false; changed(); } /** * */ public void changed() { if (open) { handleOthers(openTogether, true, false); handleOthers(closeWhenOpening, true, false); } else { handleOthers(closeTogether, true, false); handleOthers(openWhenClosing, false, true); } rebuild(null); } /** * * @param _content */ public void rebuild(IView _content) { if (picked != null) { picked.callback(this); } VChain c = handleAndContent(); if (c == null) { if (vertical) { c = new VChain(UV.cNENW); } else { c = new VChain(UV.cSWNW); } } Handle handle = new Handle(_content); c.add(handle); if (_content != null || open) { open = true; if (_content == null) { _content = (IView) contentCallback.callback(this); } if (_content == null) { _content = new VString("Generating Contents", color.invert()); } c.add(_content); } Viewer m = new Viewer(c); if (!open) { if (closeSpans != 0) {//!! hacky c.unspans(openSpans); m.unspans(openSpans); unspans(openSpans); c.spans(closeSpans); m.spans(closeSpans); spans(closeSpans); } } else { if (openSpans != 0) {//!! hacky c.unspans(closeSpans); m.unspans(closeSpans); unspans(closeSpans); c.spans(openSpans); m.spans(openSpans); spans(openSpans); } } setBorder(null); setContent(m); paint(); } private IView openable() { VString v = new VString(" + ", UV.fonts[UV.cSmall]) { @Override public IView spans(int _spans) { return this; } }; v.setBorder(new ButtonBorder(1)); return v; } private IView closeable() { VString v = new VString(" - ", UV.fonts[UV.cSmall]) { @Override public IView spans(int _spans) { return this; } }; v.setBorder(new ButtonBorder(1)); return v; } class Handle extends AItem { Handle(IView _content) { if (open) { if (name instanceof IView) { VChain c = new VChain(UV.cEW, closeable(), (IView) name); c.spans(openSpans); setContent(c); } else { setContent(new VChain(UV.cEW, closeable(), new VString(name, color.invert().bw()))); } } else { if (name instanceof IView) { VChain c = new VChain(UV.cEW, openable(), (IView) name); c.spans(closeSpans); setContent(c); } else { setContent(new VChain(UV.cEW, openable(), new VString(name, color.invert().bw()))); } } } @Override public Object getParcel() { return VCollapse.this.getParcel(); } @Override public void picked(IEvent _e) { open = !open; changed(); } @Override public void selected(IEvent _e) { picked(_e); } } private void handleOthers(CSet _set, boolean _ifState, boolean _desiredState) { Object[] all = _set.getAll(Object.class); for (Object a : all) { VCollapse c = (VCollapse) a; if (c.open == _ifState) { c.open = _desiredState; c.rebuild(null); } } } /** * * @return */ @Override public Object getValue() { return this; } // Comparable @Override public int compareTo(Object o) { return (int) UCompare.compare(this.order, ((VCollapse) o).order); } }
/* * Copyright 2014, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.transport.netty; import static io.netty.channel.ChannelOption.SO_KEEPALIVE; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.transport.ClientStream; import io.grpc.transport.ClientStreamListener; import io.grpc.transport.ClientTransport; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.EventLoopGroup; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.AsciiString; import io.netty.handler.codec.http2.DefaultHttp2Connection; import io.netty.handler.codec.http2.DefaultHttp2FrameReader; import io.netty.handler.codec.http2.DefaultHttp2FrameWriter; import io.netty.handler.codec.http2.DefaultHttp2LocalFlowController; import io.netty.handler.codec.http2.DefaultHttp2StreamRemovalPolicy; import io.netty.handler.codec.http2.Http2Connection; import io.netty.handler.codec.http2.Http2Exception; import io.netty.handler.codec.http2.Http2FrameLogger; import io.netty.handler.codec.http2.Http2FrameReader; import io.netty.handler.codec.http2.Http2FrameWriter; import io.netty.handler.codec.http2.Http2Headers; import io.netty.handler.codec.http2.Http2InboundFrameLogger; import io.netty.handler.codec.http2.Http2OutboundFrameLogger; import io.netty.handler.codec.http2.Http2StreamRemovalPolicy; import io.netty.handler.ssl.SslContext; import io.netty.util.internal.logging.InternalLogLevel; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.concurrent.GuardedBy; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import javax.net.ssl.SSLParameters; /** * A Netty-based {@link ClientTransport} implementation. */ class NettyClientTransport implements ClientTransport { private static final Logger log = Logger.getLogger(NettyClientTransport.class.getName()); private final SocketAddress address; private final Class<? extends Channel> channelType; private final EventLoopGroup group; private final Http2Negotiator.Negotiation negotiation; private final NettyClientHandler handler; private final boolean ssl; private final AsciiString authority; private final int connectionWindowSize; private final int streamWindowSize; // We should not send on the channel until negotiation completes. This is a hard requirement // by SslHandler but is appropriate for HTTP/1.1 Upgrade as well. private Channel channel; private Listener listener; /** * Whether the transport started or failed during starting. Only transitions to true. When * changed, this.notifyAll() must be called. */ private volatile boolean started; /** Guaranteed to be true when RUNNING. */ private volatile boolean negotiationComplete; /** Whether the transport started shutting down. */ @GuardedBy("this") private boolean shutdown; private Throwable shutdownCause; /** Whether the transport completed shutting down. */ @GuardedBy("this") private boolean terminated; NettyClientTransport(SocketAddress address, Class<? extends Channel> channelType, NegotiationType negotiationType, EventLoopGroup group, SslContext sslContext, int connectionWindowSize, int streamWindowSize) { Preconditions.checkNotNull(negotiationType, "negotiationType"); this.address = Preconditions.checkNotNull(address, "address"); this.group = Preconditions.checkNotNull(group, "group"); this.channelType = Preconditions.checkNotNull(channelType, "channelType"); this.connectionWindowSize = connectionWindowSize; this.streamWindowSize = streamWindowSize; InetSocketAddress inetAddress = null; if (address instanceof InetSocketAddress) { inetAddress = (InetSocketAddress) address; authority = new AsciiString(inetAddress.getHostString() + ":" + inetAddress.getPort()); } else { Preconditions.checkState(negotiationType != NegotiationType.TLS, "TLS not supported for non-internet socket types"); // Specialized address types are allowed to support custom Channel types so just assume their // toString() values are valid :authority values authority = new AsciiString(address.toString()); } DefaultHttp2StreamRemovalPolicy streamRemovalPolicy = new DefaultHttp2StreamRemovalPolicy(); handler = newHandler(streamRemovalPolicy); switch (negotiationType) { case PLAINTEXT: negotiation = Http2Negotiator.plaintext(handler); ssl = false; break; case PLAINTEXT_UPGRADE: negotiation = Http2Negotiator.plaintextUpgrade(handler); ssl = false; break; case TLS: if (sslContext == null) { try { sslContext = SslContext.newClientContext(); } catch (SSLException ex) { throw new RuntimeException(ex); } } // TODO(ejona86): specify allocator. The method currently ignores it though. SSLEngine sslEngine = sslContext.newEngine(null, inetAddress.getHostString(), inetAddress.getPort()); SSLParameters sslParams = new SSLParameters(); sslParams.setEndpointIdentificationAlgorithm("HTTPS"); sslEngine.setSSLParameters(sslParams); negotiation = Http2Negotiator.tls(sslEngine, streamRemovalPolicy, handler); ssl = true; break; default: throw new IllegalArgumentException("Unsupported negotiationType: " + negotiationType); } } @Override public ClientStream newStream(MethodDescriptor<?, ?> method, Metadata.Headers headers, ClientStreamListener listener) { Preconditions.checkNotNull(method, "method"); Preconditions.checkNotNull(headers, "headers"); Preconditions.checkNotNull(listener, "listener"); // We can't write to the channel until negotiation is complete. awaitStarted(); if (!negotiationComplete) { throw new IllegalStateException("Negotiation failed to complete", shutdownCause); } // Create the stream. NettyClientStream stream = new NettyClientStream(listener, channel, handler); try { // Convert the headers into Netty HTTP/2 headers. AsciiString defaultPath = new AsciiString("/" + method.getName()); Http2Headers http2Headers = Utils.convertClientHeaders(headers, ssl, defaultPath, authority); // Write the request and await creation of the stream. channel.writeAndFlush(new CreateStreamCommand(http2Headers, stream)).get(); } catch (InterruptedException e) { // Restore the interrupt. Thread.currentThread().interrupt(); stream.cancel(); throw new RuntimeException(e); } catch (ExecutionException e) { stream.cancel(); throw new RuntimeException(e.getCause() != null ? e.getCause() : e); } return stream; } @Override public void start(Listener transportListener) { listener = Preconditions.checkNotNull(transportListener, "listener"); Bootstrap b = new Bootstrap(); b.group(group); b.channel(channelType); if (NioSocketChannel.class.isAssignableFrom(channelType)) { b.option(SO_KEEPALIVE, true); } b.handler(negotiation.initializer()); // Start the connection operation to the server. final ChannelFuture connectFuture = b.connect(address); channel = connectFuture.channel(); connectFuture.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { // The connection attempt failed. notifyTerminated(future.cause()); return; } // Connected successfully, start the protocol negotiation. negotiation.onConnected(channel); } }); Futures.addCallback(negotiation.completeFuture(), new FutureCallback<Void>() { @Override public void onSuccess(Void result) { // The negotiation was successful. negotiationComplete = true; notifyStarted(); } @Override public void onFailure(Throwable t) { // The negotiation failed. notifyTerminated(t); } }); // Handle transport shutdown when the channel is closed. channel.closeFuture().addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { // The close failed. Just notify that transport shutdown failed. notifyTerminated(future.cause()); return; } if (handler.connectionError() != null) { // The handler encountered a connection error. notifyTerminated(handler.connectionError()); } else { // Normal termination of the connection. notifyTerminated(null); } } }); } @Override public void shutdown() { notifyShutdown(null); // Notifying of termination is automatically done when the channel closes. if (channel != null && channel.isOpen()) { channel.close(); } } /** * Waits until started. Does not throw an exception if the transport has now failed. */ private void awaitStarted() { if (!started) { try { synchronized (this) { while (!started) { wait(); } } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted while waiting for transport to start", ex); } } } private synchronized void notifyStarted() { started = true; notifyAll(); } private void notifyShutdown(Throwable t) { if (t != null) { log.log(Level.SEVERE, "Transport failed", t); } boolean notifyShutdown; synchronized (this) { notifyShutdown = !shutdown; if (!shutdown) { shutdownCause = t; shutdown = true; notifyStarted(); } } if (notifyShutdown) { listener.transportShutdown(); } } private void notifyTerminated(Throwable t) { notifyShutdown(t); boolean notifyTerminated; synchronized (this) { notifyTerminated = !terminated; terminated = true; } if (notifyTerminated) { listener.transportTerminated(); } } private NettyClientHandler newHandler(Http2StreamRemovalPolicy streamRemovalPolicy) { try { Http2Connection connection = new DefaultHttp2Connection(false, streamRemovalPolicy); Http2FrameReader frameReader = new DefaultHttp2FrameReader(); Http2FrameWriter frameWriter = new DefaultHttp2FrameWriter(); Http2FrameLogger frameLogger = new Http2FrameLogger(InternalLogLevel.DEBUG); frameReader = new Http2InboundFrameLogger(frameReader, frameLogger); frameWriter = new Http2OutboundFrameLogger(frameWriter, frameLogger); DefaultHttp2LocalFlowController inboundFlow = new DefaultHttp2LocalFlowController(connection, frameWriter); // Set the initial window size for new streams. inboundFlow.initialWindowSize(streamWindowSize); return new NettyClientHandler(connection, frameReader, frameWriter, inboundFlow, connectionWindowSize); } catch (Http2Exception e) { throw new RuntimeException(e); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.exposurenotification.storage; import android.content.Context; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.common.collect.Iterables; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.samples.exposurenotification.Log; import com.google.samples.exposurenotification.nearby.ScannedPacket; import com.google.samples.exposurenotification.nearby.ScannedPacket.ScannedPacketBuilder; import com.google.samples.exposurenotification.nearby.ScannedPacket.ScannedPacketContent; import com.google.samples.exposurenotification.nearby.ScannedPacket.ScannedPacketContent.ScannedPacketContentBuilder; import com.google.samples.exposurenotification.data.DayNumber; import com.google.samples.exposurenotification.data.RollingProximityId; import com.google.samples.exposurenotification.features.ContactTracingFeature; import org.joda.time.Instant; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * The data store for contact records. Contact records are bundled BLE scan results stored in * key-value pairs. The key is a concatenation of {@link DayNumber} and {@link RollingProximityId}. * The value is a {@link ContactRecordValue}; */ @SuppressWarnings("NewApi") public class ContactRecordDataStore implements AutoCloseable { private final Map<byte[], byte[]> store = new HashMap<>(); private ContactRecordDataStore(Context context) throws StorageException { } public static synchronized ContactRecordDataStore open(Context context) throws StorageException { return new ContactRecordDataStore(context); } @Override public synchronized void close() { } /** * Gets the contact records that match the given {@code dayNumber} and {@code rollingProximityId}. * * @return the record if a match is found, or null otherwise. */ @Nullable public ContactRecord getRecord(DayNumber dayNumber, RollingProximityId rollingProximityId) { ContactRecordKey key = new ContactRecordKey(dayNumber, rollingProximityId); try { byte[] valueBytes = store.get(key); if (valueBytes == null) { return null; } ContactRecordValue parsedValue = ContactRecordValue.parseFrom(valueBytes); return new ContactRecord( key, ContactTracingFeature.contactRecordStoreCompactFormatEnabled() ? convertToCompactFormat(parsedValue) : parsedValue); } catch (InvalidProtocolBufferException e) { Log.log.atSevere().withCause(e).log("Error getting record"); } return null; } public List<ContactRecord> getAllRecords() { List<ContactRecord> records = new ArrayList<>(); synchronized (store) { try { for (Entry<byte[], byte[]> iterator : store.entrySet()) { if (iterator.getKey() == null) { continue; } ContactRecordValue parsedValue = ContactRecordValue.parseFrom(iterator.getValue()); records.add( new ContactRecord( ContactRecordKey.fromBytes(iterator.getKey()), ContactTracingFeature.contactRecordStoreCompactFormatEnabled() ? convertToCompactFormat(parsedValue) : parsedValue)); } } catch (InvalidProtocolBufferException e) { Log.log.atSevere().withCause(e).log("Error fetching record"); } } return records; } /** * Gets all scan records in {@link ScannedPacket} format, to pass the records from wearable to * phone. Should be used in wearable EN module only. */ public List<ScannedPacket> getAllRecordsFromWearable() { ArrayList<ScannedPacket> scannedPacketArrayList = new ArrayList<>(); List<ContactRecord> contactRecordList = getAllRecords(); Log.log .atInfo() .log( "getAllRecordsFormWearable called, contactRecordList.size=%d", contactRecordList.size()); for (ContactRecord contactRecord : contactRecordList) { if (contactRecord.getValue().getSightingRecordsCount() <= 0) { continue; } ArrayList<ScannedPacketContent> scannedPacketContents = new ArrayList<>(); byte[] metadata = null; for (SightingRecord sightingRecord : contactRecord.value.getSightingRecordsList()) { if (metadata == null) { metadata = sightingRecord.getAssociatedEncryptedMetadata().toByteArray(); } scannedPacketContents.add( new ScannedPacketContentBuilder() .setEpochSeconds(sightingRecord.getEpochSeconds()) .setRssi(sightingRecord.getRssi()) .setPreviousScanEpochSeconds(sightingRecord.getPreviousScanEpochSeconds()) .build()); } if (metadata == null) { continue; } scannedPacketArrayList.add( new ScannedPacketBuilder() .setId(contactRecord.key.rollingProximityId.getDirect()) .setEncryptedMetadata(metadata) .setScannedPacketContents(scannedPacketContents.toArray(new ScannedPacketContent[0])) .build()); } Log.log .atInfo() .log( "getAllRecordsFormWearable done, %d scanned packet found", scannedPacketArrayList.size()); return scannedPacketArrayList; } /** * Gets all the scanned IDs, for each contact record, only return the 16-byte raw ID. */ public List<byte[]> getAllRawIds() { List<byte[]> rawIds = new ArrayList<>(); synchronized (store) { for (Entry<byte[], byte[]> iterator : store.entrySet()) { if (iterator.getKey() == null) { continue; } rawIds.add(ContactRecordKey.getRollingProximityId(iterator.getKey())); } } return rawIds; } /** * Adds or updates a contact record value with the key given by {@code dayNumber} and {@code * rollingProximityId}. */ @VisibleForTesting public void putRecord( DayNumber dayNumber, RollingProximityId rollingProximityId, ContactRecordValue value) { ContactRecordKey key = new ContactRecordKey(dayNumber, rollingProximityId); synchronized (store) { store.put(key.getBytes(), value.toByteArray()); } } /** * Appends a {@link SightingRecord} to the {@link ContactRecordValue} keyed by the {@code * dayNumber} and {@code rollingProximityId}. */ public void appendSightingRecord( Instant sightTime, byte[] id, int rssi, byte[] associatedEncryptedMetadata, int previousScanEpochSeconds) { DayNumber dayNumber = new DayNumber(sightTime); int sightTimeSeconds = (int) MILLISECONDS.toSeconds(sightTime.getMillis()); RollingProximityId rollingProximityId = new RollingProximityId(id); if (!ContactTracingFeature.contactRecordStoreCompactFormatEnabled()) { SightingRecord sightingRecord = SightingRecord.newBuilder() .setEpochSeconds(sightTimeSeconds) .setRssi(rssi) .setAssociatedEncryptedMetadata(ByteString.copyFrom(associatedEncryptedMetadata)) .setPreviousScanEpochSeconds(previousScanEpochSeconds) .build(); ContactRecord contactRecord = getRecord(dayNumber, rollingProximityId); ContactRecordValue contactRecordValue = contactRecord == null ? ContactRecordValue.getDefaultInstance() : contactRecord.getValue(); ContactRecordValue updatedContactRecordValue = contactRecordValue.toBuilder().addSightingRecords(sightingRecord).build(); putRecord(dayNumber, rollingProximityId, updatedContactRecordValue); return; } ContactRecord contactRecord = getRecord(dayNumber, rollingProximityId); ContactRecordValue existingValue = contactRecord == null ? null : convertToCompactFormat(contactRecord.getValue()); ContactRecordValue updatedValue; if (existingValue == null || existingValue.getSightingRecordsCount() == 0) { // No records for this RPI, or the data is corrupted. We create new ContactRecordValue // completely. updatedValue = ContactRecordValue.newBuilder() .setEncryptedMetadata(ByteString.copyFrom(associatedEncryptedMetadata)) .addSightingRecords( SightingRecord.newBuilder() .setEpochSeconds(sightTimeSeconds) .setPreviousScanEpochSeconds(previousScanEpochSeconds) .setRssiValues(wrapSingleRssi(rssi))) .build(); } else { SightingRecord.Builder sightingRecordBuilder = Iterables.getLast(existingValue.getSightingRecordsList()).toBuilder(); if (isSameScanCycle(sightingRecordBuilder, sightTimeSeconds)) { sightingRecordBuilder.setRssiValues( sightingRecordBuilder.getRssiValues().concat(wrapSingleRssi(rssi))); backFillEncryptedMetadataIfRequired( sightingRecordBuilder, existingValue.getEncryptedMetadata(), ByteString.copyFrom(associatedEncryptedMetadata)); updatedValue = existingValue.toBuilder() .setSightingRecords( existingValue.getSightingRecordsCount() - 1, sightingRecordBuilder) .build(); } else { updatedValue = existingValue.toBuilder() .addSightingRecords( SightingRecord.newBuilder() .setEpochSeconds(sightTimeSeconds) .setPreviousScanEpochSeconds(previousScanEpochSeconds) .setRssiValues(wrapSingleRssi(rssi))) .build(); } } putRecord(dayNumber, rollingProximityId, updatedValue); } private static boolean isSameScanCycle( SightingRecord.Builder sightingRecordBuilder, int sightingTimeSeconds) { // Simple heuristic that considering a new sighting packet to be in the same scan cycle of a // SightingRecord if the elapsed seconds since the beginning of SightingRecord is no bigger than // 1.5x maximum possible scan duration. return sightingTimeSeconds <= sightingRecordBuilder.getEpochSeconds() + (ContactTracingFeature.scanTimeSeconds() + ContactTracingFeature.scanTimeExtendForProfileInUseSeconds()) * 1.5; } /** * Ensures that `sightingRecordBuilder.encryptedMetadata` has the same (or larger) length as * `sightingRecordBuilder.rssiValues`. If not, fill the slots with the `encryptedMetadata`. */ private static void fillMissingEncryptedMetadata( SightingRecord.Builder sightingRecordBuilder, ByteString encryptedMetadata) { if (!sightingRecordBuilder.hasRssiValues()) { return; } while (sightingRecordBuilder.getEncryptedMetadataCount() < sightingRecordBuilder.getRssiValues().size()) { sightingRecordBuilder.addEncryptedMetadata(encryptedMetadata); } } @VisibleForTesting static ByteString wrapSingleRssi(int rssi) { return ByteString.copyFrom(new byte[]{(byte) rssi}); } /** * Back fills {@link SightingRecord.Builder#getEncryptedMetadataList()} per format requirement: If * {@code currentEncryptedMetadata} is not equal to {@code baseEncryptedMetadata}, we must fill * encrypted metadata for this sighting packet as well as all previous packets if absent. If not * equal, we do not need to fill it as long as the whole {@link * SightingRecord.Builder#getEncryptedMetadataList()} is empty, otherwise we still need to set it. * * @param sightingRecordBuilder the builder to update * @param baseEncryptedMetadata the encrypted metadata in {@link ContactRecordValue}. * @param currentEncryptedMetadata the encrypted metadata associated with the last element of * {@link SightingRecord.Builder#getRssiValues()}. */ private static void backFillEncryptedMetadataIfRequired( SightingRecord.Builder sightingRecordBuilder, ByteString baseEncryptedMetadata, ByteString currentEncryptedMetadata) { if (sightingRecordBuilder.getEncryptedMetadataCount() != 0 || !currentEncryptedMetadata.equals(baseEncryptedMetadata)) { fillMissingEncryptedMetadata(sightingRecordBuilder, baseEncryptedMetadata); sightingRecordBuilder.setEncryptedMetadata( sightingRecordBuilder.getEncryptedMetadataCount() - 1, currentEncryptedMetadata); } } /** * Coverts a {@link ContactRecordValue} to the "compact format" if it is not already in that * format. * * <p>We can remove the conversion code once the build using compact format has been rolled out * for a while. */ private static ContactRecordValue convertToCompactFormat(ContactRecordValue originalValue) { if (originalValue.hasEncryptedMetadata()) { return originalValue; } ContactRecordValue.Builder updatedValueBuilder = ContactRecordValue.newBuilder(); SightingRecord.Builder sightingRecordBuilder = null; for (SightingRecord sightingRecord : originalValue.getSightingRecordsList()) { // Set the base encrypted metadata to be the first one we see from the list. if (!updatedValueBuilder.hasEncryptedMetadata()) { updatedValueBuilder.setEncryptedMetadata(sightingRecord.getAssociatedEncryptedMetadata()); } if (sightingRecordBuilder != null && isSameScanCycle(sightingRecordBuilder, sightingRecord.getEpochSeconds())) { sightingRecordBuilder.setRssiValues( sightingRecordBuilder.getRssiValues().concat(wrapSingleRssi(sightingRecord.getRssi()))); } else { if (sightingRecordBuilder != null) { updatedValueBuilder.addSightingRecords(sightingRecordBuilder); } sightingRecordBuilder = sightingRecord.toBuilder() .setRssiValues(wrapSingleRssi(sightingRecord.getRssi())) .clearAssociatedEncryptedMetadata() .clearRssi(); } backFillEncryptedMetadataIfRequired( sightingRecordBuilder, updatedValueBuilder.getEncryptedMetadata(), sightingRecord.getAssociatedEncryptedMetadata()); } if (sightingRecordBuilder != null) { updatedValueBuilder.addSightingRecords(sightingRecordBuilder); } return updatedValueBuilder.build(); } /** * Deletes all contact records on a day given by the {@link DayNumber}. * * @return the total number of entries deleted */ public int deletesRecords(DayNumber dayNumber) throws StorageException { // FIXME: delete records for a given day number. return 0; } /** * Deletes all contact records prior (inclusive) to a day given by the {@link DayNumber}. * * @return the total number of entries deleted */ public int deletePriorRecords(DayNumber lastDayNumberToDelete) throws StorageException { // FIXME: Delete prescribed records. return 0; } /** * An immutable data class represents a contact record. The key-value pair data can be accessed * through {@link #getKey()} and {@link #getValue()}. */ public static class ContactRecord { private final ContactRecordKey key; private final ContactRecordValue value; public ContactRecord(ContactRecordKey key, ContactRecordValue value) { this.key = key; this.value = value; } public ContactRecordKey getKey() { return key; } public ContactRecordValue getValue() { return value; } } /** * An immutable data class represents the key for a {@link ContactRecord}. The byte array * representation of this class is used as the key of underlying data store. */ public static class ContactRecordKey { private final DayNumber dayNumber; private final RollingProximityId rollingProximityId; public ContactRecordKey(DayNumber dayNumber, RollingProximityId rollingProximityId) { this.dayNumber = dayNumber; this.rollingProximityId = rollingProximityId; } public DayNumber getDayNumber() { return dayNumber; } public RollingProximityId getRollingProximityId() { return rollingProximityId; } private static byte[] getRollingProximityId(byte[] contactRecordKeyBytes) { return Arrays.copyOfRange( contactRecordKeyBytes, DayNumber.getSizeBytes(), contactRecordKeyBytes.length); } /** * Gets the byte array representation of the key. */ public byte[] getBytes() { ByteBuffer byteBuffer = ByteBuffer.allocate(DayNumber.getSizeBytes() + rollingProximityId.length); dayNumber.putIn(byteBuffer); rollingProximityId.putIn(byteBuffer); return byteBuffer.array(); } public static ContactRecordKey fromBytes(byte[] bytes) { ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); DayNumber dayNumber = DayNumber.getFrom(byteBuffer); byte[] rollingProximityIdBytes = new byte[RollingProximityId.MIN_ID.length]; byteBuffer.get(rollingProximityIdBytes); return new ContactRecordKey(dayNumber, new RollingProximityId(rollingProximityIdBytes, true)); } } }
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.flow.impl; import com.google.common.base.Objects; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Modified; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.util.KryoNamespace; import org.onlab.util.Tools; import org.onosproject.cfg.ComponentConfigService; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.NodeId; import org.onosproject.core.CoreService; import org.onosproject.core.IdGenerator; import org.onosproject.mastership.MastershipService; import org.onosproject.net.DeviceId; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.CompletedBatchOperation; import org.onosproject.net.flow.DefaultFlowEntry; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowEntry.FlowEntryState; import org.onosproject.net.flow.FlowId; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.FlowRuleBatchEntry; import org.onosproject.net.flow.FlowRuleBatchEntry.FlowRuleOperation; import org.onosproject.net.flow.FlowRuleBatchEvent; import org.onosproject.net.flow.FlowRuleBatchOperation; import org.onosproject.net.flow.FlowRuleBatchRequest; import org.onosproject.net.flow.FlowRuleEvent; import org.onosproject.net.flow.FlowRuleEvent.Type; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.flow.FlowRuleStore; import org.onosproject.net.flow.FlowRuleStoreDelegate; import org.onosproject.net.flow.StoredFlowEntry; import org.onosproject.store.AbstractStore; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.ClusterMessage; import org.onosproject.store.cluster.messaging.ClusterMessageHandler; import org.onosproject.store.flow.ReplicaInfoEvent; import org.onosproject.store.flow.ReplicaInfoEventListener; import org.onosproject.store.flow.ReplicaInfoService; import org.onosproject.store.serializers.KryoSerializer; import org.onosproject.store.serializers.StoreSerializer; import org.onosproject.store.serializers.custom.DistributedStoreSerializers; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import java.util.Collections; import java.util.Dictionary; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static com.google.common.base.Strings.isNullOrEmpty; import static org.onlab.util.Tools.get; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.net.flow.FlowRuleEvent.Type.RULE_REMOVED; import static org.onosproject.store.flow.impl.FlowStoreMessageSubjects.*; import static org.slf4j.LoggerFactory.getLogger; /** * Manages inventory of flow rules using a distributed state management protocol. */ @Component(immediate = true, enabled = true) @Service public class NewDistributedFlowRuleStore extends AbstractStore<FlowRuleBatchEvent, FlowRuleStoreDelegate> implements FlowRuleStore { private final Logger log = getLogger(getClass()); private static final int MESSAGE_HANDLER_THREAD_POOL_SIZE = 8; private static final boolean DEFAULT_BACKUP_ENABLED = true; private static final int DEFAULT_BACKUP_PERIOD_MILLIS = 2000; private static final long FLOW_RULE_STORE_TIMEOUT_MILLIS = 5000; // number of devices whose flow entries will be backed up in one communication round private static final int FLOW_TABLE_BACKUP_BATCH_SIZE = 1; @Property(name = "msgHandlerPoolSize", intValue = MESSAGE_HANDLER_THREAD_POOL_SIZE, label = "Number of threads in the message handler pool") private int msgHandlerPoolSize = MESSAGE_HANDLER_THREAD_POOL_SIZE; @Property(name = "backupEnabled", boolValue = DEFAULT_BACKUP_ENABLED, label = "Indicates whether backups are enabled or not") private boolean backupEnabled = DEFAULT_BACKUP_ENABLED; @Property(name = "backupPeriod", intValue = DEFAULT_BACKUP_PERIOD_MILLIS, label = "Delay in ms between successive backup runs") private int backupPeriod = DEFAULT_BACKUP_PERIOD_MILLIS; private InternalFlowTable flowTable = new InternalFlowTable(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ReplicaInfoService replicaInfoManager; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterCommunicationService clusterCommunicator; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ComponentConfigService configService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MastershipService mastershipService; private Map<Long, NodeId> pendingResponses = Maps.newConcurrentMap(); private ExecutorService messageHandlingExecutor; private ScheduledFuture<?> backupTask; private final ScheduledExecutorService backupSenderExecutor = Executors.newSingleThreadScheduledExecutor(groupedThreads("onos/flow", "backup-sender")); protected static final StoreSerializer SERIALIZER = new KryoSerializer() { @Override protected void setupKryoPool() { serializerPool = KryoNamespace.newBuilder() .register(DistributedStoreSerializers.STORE_COMMON) .nextId(DistributedStoreSerializers.STORE_CUSTOM_BEGIN) .build(); } }; private IdGenerator idGenerator; private NodeId local; @Activate public void activate(ComponentContext context) { configService.registerProperties(getClass()); idGenerator = coreService.getIdGenerator(FlowRuleService.FLOW_OP_TOPIC); local = clusterService.getLocalNode().id(); messageHandlingExecutor = Executors.newFixedThreadPool( msgHandlerPoolSize, groupedThreads("onos/store/flow", "message-handlers")); registerMessageHandlers(messageHandlingExecutor); if (backupEnabled) { replicaInfoManager.addListener(flowTable); backupTask = backupSenderExecutor.scheduleWithFixedDelay( flowTable::backup, 0, backupPeriod, TimeUnit.MILLISECONDS); } logConfig("Started"); } @Deactivate public void deactivate(ComponentContext context) { if (backupEnabled) { replicaInfoManager.removeListener(flowTable); backupTask.cancel(true); } configService.unregisterProperties(getClass(), false); unregisterMessageHandlers(); messageHandlingExecutor.shutdownNow(); backupSenderExecutor.shutdownNow(); log.info("Stopped"); } @SuppressWarnings("rawtypes") @Modified public void modified(ComponentContext context) { if (context == null) { backupEnabled = DEFAULT_BACKUP_ENABLED; logConfig("Default config"); return; } Dictionary properties = context.getProperties(); int newPoolSize; boolean newBackupEnabled; int newBackupPeriod; try { String s = get(properties, "msgHandlerPoolSize"); newPoolSize = isNullOrEmpty(s) ? msgHandlerPoolSize : Integer.parseInt(s.trim()); s = get(properties, "backupEnabled"); newBackupEnabled = isNullOrEmpty(s) ? backupEnabled : Boolean.parseBoolean(s.trim()); s = get(properties, "backupPeriod"); newBackupPeriod = isNullOrEmpty(s) ? backupPeriod : Integer.parseInt(s.trim()); } catch (NumberFormatException | ClassCastException e) { newPoolSize = MESSAGE_HANDLER_THREAD_POOL_SIZE; newBackupEnabled = DEFAULT_BACKUP_ENABLED; newBackupPeriod = DEFAULT_BACKUP_PERIOD_MILLIS; } boolean restartBackupTask = false; if (newBackupEnabled != backupEnabled) { backupEnabled = newBackupEnabled; if (!backupEnabled) { replicaInfoManager.removeListener(flowTable); if (backupTask != null) { backupTask.cancel(false); backupTask = null; } } else { replicaInfoManager.addListener(flowTable); } restartBackupTask = backupEnabled; } if (newBackupPeriod != backupPeriod) { backupPeriod = newBackupPeriod; restartBackupTask = backupEnabled; } if (restartBackupTask) { if (backupTask != null) { // cancel previously running task backupTask.cancel(false); } backupTask = backupSenderExecutor.scheduleWithFixedDelay( flowTable::backup, 0, backupPeriod, TimeUnit.MILLISECONDS); } if (newPoolSize != msgHandlerPoolSize) { msgHandlerPoolSize = newPoolSize; ExecutorService oldMsgHandler = messageHandlingExecutor; messageHandlingExecutor = Executors.newFixedThreadPool( msgHandlerPoolSize, groupedThreads("onos/store/flow", "message-handlers")); // replace previously registered handlers. registerMessageHandlers(messageHandlingExecutor); oldMsgHandler.shutdown(); } logConfig("Reconfigured"); } private void registerMessageHandlers(ExecutorService executor) { clusterCommunicator.addSubscriber(APPLY_BATCH_FLOWS, new OnStoreBatch(), executor); clusterCommunicator.<FlowRuleBatchEvent>addSubscriber( REMOTE_APPLY_COMPLETED, SERIALIZER::decode, this::notifyDelegate, executor); clusterCommunicator.addSubscriber( GET_FLOW_ENTRY, SERIALIZER::decode, flowTable::getFlowEntry, SERIALIZER::encode, executor); clusterCommunicator.addSubscriber( GET_DEVICE_FLOW_ENTRIES, SERIALIZER::decode, flowTable::getFlowEntries, SERIALIZER::encode, executor); clusterCommunicator.addSubscriber( REMOVE_FLOW_ENTRY, SERIALIZER::decode, this::removeFlowRuleInternal, SERIALIZER::encode, executor); clusterCommunicator.addSubscriber( REMOVE_FLOW_ENTRY, SERIALIZER::decode, this::removeFlowRuleInternal, SERIALIZER::encode, executor); clusterCommunicator.addSubscriber( FLOW_TABLE_BACKUP, SERIALIZER::decode, flowTable::onBackupReceipt, SERIALIZER::encode, executor); } private void unregisterMessageHandlers() { clusterCommunicator.removeSubscriber(REMOVE_FLOW_ENTRY); clusterCommunicator.removeSubscriber(GET_DEVICE_FLOW_ENTRIES); clusterCommunicator.removeSubscriber(GET_FLOW_ENTRY); clusterCommunicator.removeSubscriber(APPLY_BATCH_FLOWS); clusterCommunicator.removeSubscriber(REMOTE_APPLY_COMPLETED); clusterCommunicator.removeSubscriber(FLOW_TABLE_BACKUP); } private void logConfig(String prefix) { log.info("{} with msgHandlerPoolSize = {}; backupEnabled = {}, backupPeriod = {}", prefix, msgHandlerPoolSize, backupEnabled, backupPeriod); } // This is not a efficient operation on a distributed sharded // flow store. We need to revisit the need for this operation or at least // make it device specific. @Override public int getFlowRuleCount() { AtomicInteger sum = new AtomicInteger(0); deviceService.getDevices().forEach(device -> sum.addAndGet(Iterables.size(getFlowEntries(device.id())))); return sum.get(); } @Override public FlowEntry getFlowEntry(FlowRule rule) { NodeId master = mastershipService.getMasterFor(rule.deviceId()); if (master == null) { log.debug("Failed to getFlowEntry: No master for {}", rule.deviceId()); return null; } if (Objects.equal(local, master)) { return flowTable.getFlowEntry(rule); } log.trace("Forwarding getFlowEntry to {}, which is the primary (master) for device {}", master, rule.deviceId()); return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(rule, FlowStoreMessageSubjects.GET_FLOW_ENTRY, SERIALIZER::encode, SERIALIZER::decode, master), FLOW_RULE_STORE_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS, null); } @Override public Iterable<FlowEntry> getFlowEntries(DeviceId deviceId) { NodeId master = mastershipService.getMasterFor(deviceId); if (master == null) { log.debug("Failed to getFlowEntries: No master for {}", deviceId); return Collections.emptyList(); } if (Objects.equal(local, master)) { return flowTable.getFlowEntries(deviceId); } log.trace("Forwarding getFlowEntries to {}, which is the primary (master) for device {}", master, deviceId); return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(deviceId, FlowStoreMessageSubjects.GET_DEVICE_FLOW_ENTRIES, SERIALIZER::encode, SERIALIZER::decode, master), FLOW_RULE_STORE_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS, Collections.emptyList()); } @Override public void storeFlowRule(FlowRule rule) { storeBatch(new FlowRuleBatchOperation( Collections.singletonList(new FlowRuleBatchEntry(FlowRuleOperation.ADD, rule)), rule.deviceId(), idGenerator.getNewId())); } @Override public void storeBatch(FlowRuleBatchOperation operation) { if (operation.getOperations().isEmpty()) { notifyDelegate(FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(true, Collections.emptySet(), operation.deviceId()))); return; } DeviceId deviceId = operation.deviceId(); NodeId master = mastershipService.getMasterFor(deviceId); if (master == null) { log.warn("No master for {} : flows will be marked for removal", deviceId); updateStoreInternal(operation); notifyDelegate(FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(true, Collections.emptySet(), operation.deviceId()))); return; } if (Objects.equal(local, master)) { storeBatchInternal(operation); return; } log.trace("Forwarding storeBatch to {}, which is the primary (master) for device {}", master, deviceId); clusterCommunicator.unicast(operation, APPLY_BATCH_FLOWS, SERIALIZER::encode, master) .whenComplete((result, error) -> { if (error != null) { log.warn("Failed to storeBatch: {} to {}", operation, master, error); Set<FlowRule> allFailures = operation.getOperations() .stream() .map(op -> op.target()) .collect(Collectors.toSet()); notifyDelegate(FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(false, allFailures, deviceId))); } }); } private void storeBatchInternal(FlowRuleBatchOperation operation) { final DeviceId did = operation.deviceId(); //final Collection<FlowEntry> ft = flowTable.getFlowEntries(did); Set<FlowRuleBatchEntry> currentOps = updateStoreInternal(operation); if (currentOps.isEmpty()) { batchOperationComplete(FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(true, Collections.emptySet(), did))); return; } notifyDelegate(FlowRuleBatchEvent.requested(new FlowRuleBatchRequest(operation.id(), currentOps), operation.deviceId())); } private Set<FlowRuleBatchEntry> updateStoreInternal(FlowRuleBatchOperation operation) { return operation.getOperations().stream().map( op -> { StoredFlowEntry entry; switch (op.operator()) { case ADD: entry = new DefaultFlowEntry(op.target()); // always add requested FlowRule // Note: 2 equal FlowEntry may have different treatment flowTable.remove(entry.deviceId(), entry); flowTable.add(entry); return op; case REMOVE: entry = flowTable.getFlowEntry(op.target()); if (entry != null) { entry.setState(FlowEntryState.PENDING_REMOVE); return op; } break; case MODIFY: //TODO: figure this out at some point break; default: log.warn("Unknown flow operation operator: {}", op.operator()); } return null; } ).filter(op -> op != null).collect(Collectors.toSet()); } @Override public void deleteFlowRule(FlowRule rule) { storeBatch( new FlowRuleBatchOperation( Collections.singletonList( new FlowRuleBatchEntry( FlowRuleOperation.REMOVE, rule)), rule.deviceId(), idGenerator.getNewId())); } @Override public FlowRuleEvent addOrUpdateFlowRule(FlowEntry rule) { NodeId master = mastershipService.getMasterFor(rule.deviceId()); if (Objects.equal(local, master)) { return addOrUpdateFlowRuleInternal(rule); } log.warn("Tried to update FlowRule {} state," + " while the Node was not the master.", rule); return null; } private FlowRuleEvent addOrUpdateFlowRuleInternal(FlowEntry rule) { // check if this new rule is an update to an existing entry StoredFlowEntry stored = flowTable.getFlowEntry(rule); if (stored != null) { stored.setBytes(rule.bytes()); stored.setLife(rule.life()); stored.setPackets(rule.packets()); if (stored.state() == FlowEntryState.PENDING_ADD) { stored.setState(FlowEntryState.ADDED); return new FlowRuleEvent(Type.RULE_ADDED, rule); } return new FlowRuleEvent(Type.RULE_UPDATED, rule); } // TODO: Confirm if this behavior is correct. See SimpleFlowRuleStore // TODO: also update backup if the behavior is correct. flowTable.add(rule); return null; } @Override public FlowRuleEvent removeFlowRule(FlowEntry rule) { final DeviceId deviceId = rule.deviceId(); NodeId master = mastershipService.getMasterFor(deviceId); if (Objects.equal(local, master)) { // bypass and handle it locally return removeFlowRuleInternal(rule); } if (master == null) { log.warn("Failed to removeFlowRule: No master for {}", deviceId); // TODO: revisit if this should be null (="no-op") or Exception return null; } log.trace("Forwarding removeFlowRule to {}, which is the master for device {}", master, deviceId); return Futures.get(clusterCommunicator.sendAndReceive( rule, REMOVE_FLOW_ENTRY, SERIALIZER::encode, SERIALIZER::decode, master), FLOW_RULE_STORE_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS, RuntimeException.class); } private FlowRuleEvent removeFlowRuleInternal(FlowEntry rule) { final DeviceId deviceId = rule.deviceId(); // This is where one could mark a rule as removed and still keep it in the store. final boolean removed = flowTable.remove(deviceId, rule); //flowEntries.remove(deviceId, rule); return removed ? new FlowRuleEvent(RULE_REMOVED, rule) : null; } @Override public void batchOperationComplete(FlowRuleBatchEvent event) { //FIXME: need a per device pending response NodeId nodeId = pendingResponses.remove(event.subject().batchId()); if (nodeId == null) { notifyDelegate(event); } else { // TODO check unicast return value clusterCommunicator.unicast(event, REMOTE_APPLY_COMPLETED, SERIALIZER::encode, nodeId); //error log: log.warn("Failed to respond to peer for batch operation result"); } } private final class OnStoreBatch implements ClusterMessageHandler { @Override public void handle(final ClusterMessage message) { FlowRuleBatchOperation operation = SERIALIZER.decode(message.payload()); log.debug("received batch request {}", operation); final DeviceId deviceId = operation.deviceId(); NodeId master = mastershipService.getMasterFor(deviceId); if (!Objects.equal(local, master)) { Set<FlowRule> failures = new HashSet<>(operation.size()); for (FlowRuleBatchEntry op : operation.getOperations()) { failures.add(op.target()); } CompletedBatchOperation allFailed = new CompletedBatchOperation(false, failures, deviceId); // This node is no longer the master, respond as all failed. // TODO: we might want to wrap response in envelope // to distinguish sw programming failure and hand over // it make sense in the latter case to retry immediately. message.respond(SERIALIZER.encode(allFailed)); return; } pendingResponses.put(operation.id(), message.sender()); storeBatchInternal(operation); } } private class InternalFlowTable implements ReplicaInfoEventListener { private final Map<DeviceId, Map<FlowId, Set<StoredFlowEntry>>> flowEntries = Maps.newConcurrentMap(); private final Map<DeviceId, Long> lastBackupTimes = Maps.newConcurrentMap(); private final Map<DeviceId, Long> lastUpdateTimes = Maps.newConcurrentMap(); private final Map<DeviceId, NodeId> lastBackupNodes = Maps.newConcurrentMap(); @Override public void event(ReplicaInfoEvent event) { if (!backupEnabled) { return; } if (event.type() == ReplicaInfoEvent.Type.BACKUPS_CHANGED) { DeviceId deviceId = event.subject(); NodeId master = mastershipService.getMasterFor(deviceId); if (!Objects.equal(local, master)) { // ignore since this event is for a device this node does not manage. return; } NodeId newBackupNode = getBackupNode(deviceId); NodeId currentBackupNode = lastBackupNodes.get(deviceId); if (Objects.equal(newBackupNode, currentBackupNode)) { // ignore since backup location hasn't changed. return; } if (currentBackupNode != null && newBackupNode == null) { // Current backup node is most likely down and no alternate backup node // has been chosen. Clear current backup location so that we can resume // backups when either current backup comes online or a different backup node // is chosen. log.warn("Lost backup location {} for deviceId {} and no alternate backup node exists. " + "Flows can be lost if the master goes down", currentBackupNode, deviceId); lastBackupNodes.remove(deviceId); lastBackupTimes.remove(deviceId); return; // TODO: Pick any available node as backup and ensure hand-off occurs when // a new master is elected. } log.debug("Backup location for {} has changed from {} to {}.", deviceId, currentBackupNode, newBackupNode); backupSenderExecutor.schedule(() -> backupFlowEntries(newBackupNode, Sets.newHashSet(deviceId)), 0, TimeUnit.SECONDS); } } private void sendBackups(NodeId nodeId, Set<DeviceId> deviceIds) { // split up the devices into smaller batches and send them separately. Iterables.partition(deviceIds, FLOW_TABLE_BACKUP_BATCH_SIZE) .forEach(ids -> backupFlowEntries(nodeId, Sets.newHashSet(ids))); } private void backupFlowEntries(NodeId nodeId, Set<DeviceId> deviceIds) { if (deviceIds.isEmpty()) { return; } log.debug("Sending flowEntries for devices {} to {} as backup.", deviceIds, nodeId); Map<DeviceId, Map<FlowId, Set<StoredFlowEntry>>> deviceFlowEntries = Maps.newConcurrentMap(); deviceIds.forEach(id -> deviceFlowEntries.put(id, ImmutableMap.copyOf(getFlowTable(id)))); clusterCommunicator.<Map<DeviceId, Map<FlowId, Set<StoredFlowEntry>>>, Set<DeviceId>>sendAndReceive( deviceFlowEntries, FLOW_TABLE_BACKUP, SERIALIZER::encode, SERIALIZER::decode, nodeId) .whenComplete((backedupDevices, error) -> { Set<DeviceId> devicesNotBackedup = error != null ? deviceFlowEntries.keySet() : Sets.difference(deviceFlowEntries.keySet(), backedupDevices); if (devicesNotBackedup.size() > 0) { log.warn("Failed to backup devices: {}. Reason: {}", devicesNotBackedup, error.getMessage()); } if (backedupDevices != null) { backedupDevices.forEach(id -> { lastBackupTimes.put(id, System.currentTimeMillis()); lastBackupNodes.put(id, nodeId); }); } }); } /** * Returns the flow table for specified device. * * @param deviceId identifier of the device * @return Map representing Flow Table of given device. */ private Map<FlowId, Set<StoredFlowEntry>> getFlowTable(DeviceId deviceId) { return flowEntries.computeIfAbsent(deviceId, id -> Maps.newConcurrentMap()); } private Set<StoredFlowEntry> getFlowEntriesInternal(DeviceId deviceId, FlowId flowId) { return getFlowTable(deviceId).computeIfAbsent(flowId, id -> Sets.newCopyOnWriteArraySet()); } private StoredFlowEntry getFlowEntryInternal(FlowRule rule) { Set<StoredFlowEntry> flowEntries = getFlowEntriesInternal(rule.deviceId(), rule.id()); return flowEntries.stream() .filter(entry -> Objects.equal(entry, rule)) .findAny() .orElse(null); } private Set<FlowEntry> getFlowEntriesInternal(DeviceId deviceId) { Set<FlowEntry> result = Sets.newHashSet(); getFlowTable(deviceId).values().forEach(result::addAll); return result; } public StoredFlowEntry getFlowEntry(FlowRule rule) { return getFlowEntryInternal(rule); } public Set<FlowEntry> getFlowEntries(DeviceId deviceId) { return getFlowEntriesInternal(deviceId); } public void add(FlowEntry rule) { getFlowEntriesInternal(rule.deviceId(), rule.id()).add((StoredFlowEntry) rule); lastUpdateTimes.put(rule.deviceId(), System.currentTimeMillis()); } public boolean remove(DeviceId deviceId, FlowEntry rule) { try { return getFlowEntriesInternal(deviceId, rule.id()).remove(rule); } finally { lastUpdateTimes.put(deviceId, System.currentTimeMillis()); } } private NodeId getBackupNode(DeviceId deviceId) { List<NodeId> deviceStandbys = replicaInfoManager.getReplicaInfoFor(deviceId).backups(); // pick the standby which is most likely to become next master return deviceStandbys.isEmpty() ? null : deviceStandbys.get(0); } private void backup() { if (!backupEnabled) { return; } try { // determine the set of devices that we need to backup during this run. Set<DeviceId> devicesToBackup = mastershipService.getDevicesOf(local) .stream() .filter(deviceId -> { Long lastBackupTime = lastBackupTimes.get(deviceId); Long lastUpdateTime = lastUpdateTimes.get(deviceId); NodeId lastBackupNode = lastBackupNodes.get(deviceId); NodeId newBackupNode = getBackupNode(deviceId); return lastBackupTime == null || !Objects.equal(lastBackupNode, newBackupNode) || (lastUpdateTime != null && lastUpdateTime > lastBackupTime); }) .collect(Collectors.toSet()); // compute a mapping from node to the set of devices whose flow entries it should backup Map<NodeId, Set<DeviceId>> devicesToBackupByNode = Maps.newHashMap(); devicesToBackup.forEach(deviceId -> { NodeId backupLocation = getBackupNode(deviceId); if (backupLocation != null) { devicesToBackupByNode.computeIfAbsent(backupLocation, nodeId -> Sets.newHashSet()) .add(deviceId); } }); // send the device flow entries to their respective backup nodes devicesToBackupByNode.forEach(this::sendBackups); } catch (Exception e) { log.error("Backup failed.", e); } } private Set<DeviceId> onBackupReceipt(Map<DeviceId, Map<FlowId, Set<StoredFlowEntry>>> flowTables) { log.debug("Received flowEntries for {} to backup", flowTables.keySet()); Set<DeviceId> backedupDevices = Sets.newHashSet(); try { flowTables.forEach((deviceId, deviceFlowTable) -> { // Only process those devices are that not managed by the local node. if (!Objects.equal(local, mastershipService.getMasterFor(deviceId))) { Map<FlowId, Set<StoredFlowEntry>> backupFlowTable = getFlowTable(deviceId); backupFlowTable.clear(); backupFlowTable.putAll(deviceFlowTable); backedupDevices.add(deviceId); } }); } catch (Exception e) { log.warn("Failure processing backup request", e); } return backedupDevices; } } }
package gov.va.cpe.vpr.web; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMap; import static org.mockito.Mockito.when; import gov.va.cpe.vpr.NotFoundException; import gov.va.cpe.vpr.Problem; import gov.va.cpe.vpr.Result; import gov.va.cpe.vpr.VitalSign; import gov.va.cpe.vpr.mapping.ILinkService; import gov.va.cpe.vpr.pom.IGenericPatientObjectDAO; import gov.va.cpe.vpr.queryeng.query.QueryDef; import gov.va.cpe.vpr.ws.link.LinkRelation; import gov.va.hmp.feed.atom.Link; import gov.va.hmp.healthtime.PointInTime; import gov.va.hmp.hub.dao.json.JsonAssert; import gov.va.hmp.jsonc.JsonCCollection; import gov.va.hmp.web.servlet.view.ModelAndViewFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.web.servlet.ModelAndView; public class TrendControllerTest { private TrendController tc; private MockHttpServletRequest mockRequest; private ILinkService linkServiceMock; private IGenericPatientObjectDAO genericPatientObjectDaoMock; @Before public void setUp() throws Exception { mockRequest = new MockHttpServletRequest(); linkServiceMock = Mockito.mock(ILinkService.class); genericPatientObjectDaoMock = Mockito.mock(IGenericPatientObjectDAO.class); tc = new TrendController(); tc.setGenericPatientObjectDao(genericPatientObjectDaoMock); tc.setLinkService(linkServiceMock); } @Test public void testCreateTrendData() throws Exception { List<Result> list = new ArrayList(); Map<String, Object> map = new LinkedHashMap<>(10); map.put("pid", "123"); map.put("facilityName", "SLC-FO HMP DEV"); map.put("high", 100); map.put("low", "Neg."); map.put("observed", "20120529180230.000"); map.put("result", 12.5f); map.put("specimen", "URINE"); map.put("typeName", "URINE GLUCOSE"); map.put("uid", "urn:va:lab:F484:8:CH;6879469.819787;690"); map.put("units", "mg/dL"); map.put("comment", "foo"); Result result = new Result(map); list.add(result); List<Map<String,Object>> trendData = tc.createTrendData(list); assertEquals(1, trendData.size()); DateTime t = new DateTime(2012, 5, 29, 18, 2, 30, 0); Map<String, Object> map1 = new LinkedHashMap<>(2); map1.put("x", t.getMillis()); map1.put("y", 12.5f); map1.put("high", 100); map1.put("low", "Neg."); map1.put("units", "mg/dL"); assertEquals(1, trendData.size()); Map<String,Object> ret = trendData.get(0); assertEquals(t.getMillis(), ret.get("x")); assertEquals(12.5f, ret.get("y")); assertEquals("100", ret.get("high")); assertEquals("Neg.", ret.get("low")); assertEquals("mg/dL", ret.get("units")); assertEquals("foo", ret.get("comment")); } @Test public void testCreateTrendData_ResultMissing() throws Exception { List list = new ArrayList(); LinkedHashMap<String, Object> map = new LinkedHashMap<>(10); map.put("pid", "123"); map.put("facilityName", "SLC-FO HMP DEV"); map.put("high", 100); map.put("low", "Neg."); map.put("observed", null); map.put("result", "Neg."); map.put("specimen", "URINE"); map.put("typeName", "URINE GLUCOSE"); map.put("uid", "urn:va:lab:F484:8:CH;6879469.819787;690"); map.put("units", "mg/dL"); Result result = new Result(map); list.add(result); List trendData = tc.createTrendData(list); assertEquals(0, trendData.size()); } @Test public void testCreateTrendData_NoObserved() throws Exception { List list = new ArrayList(); LinkedHashMap<String, Object> map = new LinkedHashMap<>(9); map.put("pid", "123"); map.put("facilityName", "SLC-FO HMP DEV"); map.put("high", 100); map.put("low", "Neg."); map.put("result", "12"); map.put("specimen", "URINE"); map.put("typeName", "URINE GLUCOSE"); map.put("uid", "urn:va:lab:F484:8:CH;6879469.819787;690"); map.put("units", "mg/dL"); Result result = new Result(map); list.add(result); List trendData = tc.createTrendData(list); assertEquals(0, trendData.size()); } @Test public void testCreateTrendData_ObservedIncomplete() throws Exception { List list = new ArrayList(); LinkedHashMap<String, Object> map = new LinkedHashMap<>(10); map.put("pid", "123"); map.put("facilityName", "SLC-FO HMP DEV"); map.put("observed", 201209); map.put("result", "12"); map.put("high", 100); map.put("low", "Neg."); map.put("specimen", "URINE"); map.put("typeName", "URINE GLUCOSE"); map.put("uid", "urn:va:lab:F484:8:CH;6879469.819787;690"); map.put("units", "mg/dL"); Result result = new Result(map); list.add(result); List trendData = tc.createTrendData(list); assertEquals(0, trendData.size()); } @Test public void testGetIndex() { assertEquals(TrendController.LAB_INDEX, tc.getIndex(new Result())); assertEquals(TrendController.VITAL_INDEX, tc.getIndex(new VitalSign())); try { assertEquals(TrendController.VITAL_INDEX, tc.getIndex(new Problem())); Assert.assertFalse(true);//should never get here } catch (Exception e) { assertEquals("Trend type is invalid. Valid types: result, vitalSign", e.getMessage()); } } @Test public void testPitToJsDate() throws Exception { DateTime t = new DateTime(2001, 10, 22, 12, 0, 0, 0); assertThat(tc.pitToJsDate(new PointInTime(2001, 10, 22)), is(t.getMillis())); assertThat(tc.pitToJsDate(new PointInTime(2001, 10)), nullValue()); } @Test public void testCreateLink() throws Exception { mockRequest.setRequestURI("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); Result result = new Result(); when(genericPatientObjectDaoMock.findByUID(Result.class, "urn:va:lab:F484:8:CH;6879469.819787;690")).thenReturn(result); Link link = new Link(); link.setRel(LinkRelation.TREND.toString()); when(linkServiceMock.getLinks(result)).thenReturn(Arrays.asList(link)); assertNotNull(tc.createLink(mockRequest)); } @Test public void testCreateLink_NoMatch() throws Exception { mockRequest.setRequestURI("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); Result result = new Result(); when(genericPatientObjectDaoMock.findByUID(Result.class, "urn:va:lab:F484:8:CH;6879469.819787;690")).thenReturn(result); when(linkServiceMock.getLinks(result)).thenReturn(new ArrayList<Link>(Arrays.asList(new Link()))); try { tc.createLink(mockRequest); Assert.assertTrue(false);//should never get here } catch (NotFoundException nfe) { assertEquals("No trend found for item with uid=urn:va:lab:F484:8:CH;6879469.819787;690", nfe.getMessage()); } } @Test public void testRenderJson() throws Exception { mockRequest.setRequestURI("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); Result result = new Result(); result.setData("pid", "23"); result.setData("typeName", "GLUCOSE"); when(genericPatientObjectDaoMock.findByUID(Result.class, "urn:va:lab:F484:8:CH;6879469.819787;690")).thenReturn(result); Link link = new Link(); link.setRel(LinkRelation.TREND.toString()); when(linkServiceMock.getLinks(result)).thenReturn(Arrays.asList(link)); Result result1 = new Result(); result1.setData("pid", "23"); result1.setData("observed", 20120920); result1.setData("result", "12"); when(genericPatientObjectDaoMock.findAllByQuery(any(Class.class), any(QueryDef.class), anyMap())).thenReturn(Arrays.asList(result1)); ModelAndView mv = tc.renderJson(mockRequest); assertNotNull(mv); JsonCCollection<Map<String,Object>> resp = (JsonCCollection<Map<String,Object>>) mv.getModel().get(ModelAndViewFactory.DEFAULT_MODEL_KEY); assertNotNull(resp); assertEquals("GLUCOSE", resp.getAdditionalData().get("name")); assertEquals("line", resp.getAdditionalData().get("type")); DateTime t = new DateTime(2012, 9, 20, 12, 0, 0, 0); Map<String, Object> map2 = new LinkedHashMap<>(2); map2.put("x", t.getMillis()); map2.put("y", 12.0f); assertThat(resp.getItems().get(0), equalTo(map2)); } @Test public void testRenderXml() throws Exception { mockRequest.setRequestURI("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); Result result = new Result(); result.setData("pid", "23"); result.setData("typeName", "GLUCOSE"); when(genericPatientObjectDaoMock.findByUID(Result.class, "urn:va:lab:F484:8:CH;6879469.819787;690")).thenReturn(result); Link link = new Link(); link.setRel(LinkRelation.TREND.toString()); link.setHref("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); when(linkServiceMock.getLinks(result)).thenReturn(Arrays.asList(link)); String viewName = tc.renderXml(mockRequest); assertThat(viewName, is("redirect:/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690")); } @Test public void testRenderExtjs() throws Exception { mockRequest.setRequestURI("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); Result result = new Result(); result.setData("pid", "23"); result.setData("typeName", "GLUCOSE"); when(genericPatientObjectDaoMock.findByUID(Result.class, "urn:va:lab:F484:8:CH;6879469.819787;690")).thenReturn(result); Link link = new Link(); link.setRel(LinkRelation.TREND.toString()); link.setHref("/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690"); when(linkServiceMock.getLinks(result)).thenReturn(Arrays.asList(link)); String responseBody = tc.renderExtJs("extjs", mockRequest); JsonAssert.assertJsonEquals("{\"xtype\":\"trendpanel\",\"url\":\"/vpr/trend/urn:va:lab:F484:8:CH;6879469.819787;690?format=extjs\"}", responseBody); } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.hbaserowdecoder; import java.util.ArrayList; import java.util.List; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.hbase.mapping.MappingEditor; import org.pentaho.hbase.shim.api.Mapping; /** * UI dialog for the HBase row decoder step * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseRowDecoderDialog extends BaseStepDialog implements StepDialogInterface { private static final Class<?> PKG = HBaseRowDecoderMeta.class; /** various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; // The tabs of the dialog private CTabFolder m_wTabFolder; private CTabItem m_wConfigTab; private CTabItem m_editorTab; private CCombo m_incomingKeyCombo; private CCombo m_incomingResultCombo; // mapping editor composite private MappingEditor m_mappingEditor; private final HBaseRowDecoderMeta m_currentMeta; private final HBaseRowDecoderMeta m_originalMeta; public HBaseRowDecoderDialog( Shell parent, Object in, TransMeta tr, String name ) { super( parent, (BaseStepMeta) in, tr, name ); m_currentMeta = (HBaseRowDecoderMeta) in; m_originalMeta = (HBaseRowDecoderMeta) m_currentMeta.clone(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // Start of the config tab m_wConfigTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wConfigTab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.ConfigTab.TabTitle" ) ); Composite wConfigComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wConfigComp ); FormLayout configLayout = new FormLayout(); configLayout.marginWidth = 3; configLayout.marginHeight = 3; wConfigComp.setLayout( configLayout ); // incoming key field line Label inKeyLab = new Label( wConfigComp, SWT.RIGHT ); inKeyLab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.KeyField.Label" ) ); props.setLook( inKeyLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( middle, -margin ); inKeyLab.setLayoutData( fd ); m_incomingKeyCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_incomingKeyCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_incomingKeyCombo.setLayoutData( fd ); m_incomingKeyCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_incomingKeyCombo.setToolTipText( transMeta.environmentSubstitute( m_incomingKeyCombo.getText() ) ); } } ); // incoming result line Label inResultLab = new Label( wConfigComp, SWT.RIGHT ); inResultLab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.ResultField.Label" ) ); props.setLook( inResultLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_incomingKeyCombo, margin ); fd.right = new FormAttachment( middle, -margin ); inResultLab.setLayoutData( fd ); m_incomingResultCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_incomingResultCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_incomingKeyCombo, margin ); fd.right = new FormAttachment( 100, 0 ); m_incomingResultCombo.setLayoutData( fd ); m_incomingResultCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_incomingResultCombo.setToolTipText( transMeta.environmentSubstitute( m_incomingResultCombo.getText() ) ); } } ); populateFieldsCombo(); wConfigComp.layout(); m_wConfigTab.setControl( wConfigComp ); // --- mapping editor tab m_editorTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_editorTab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.MappingEditorTab.TabTitle" ) ); m_mappingEditor = new MappingEditor( shell, m_wTabFolder, null, null, SWT.FULL_SELECTION | SWT.MULTI, false, props, transMeta ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.right = new FormAttachment( 100, 0 ); m_mappingEditor.setLayoutData( fd ); m_mappingEditor.layout(); m_editorTab.setControl( m_mappingEditor ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } protected void ok() { if ( Const.isEmpty( m_stepnameText.getText() ) ) { return; } stepname = m_stepnameText.getText(); m_currentMeta.setIncomingKeyField( m_incomingKeyCombo.getText() ); m_currentMeta.setIncomingResultField( m_incomingResultCombo.getText() ); List<String> problems = new ArrayList<String>(); Mapping mapping = m_mappingEditor.getMapping( false, problems ); if ( problems.size() > 0 ) { StringBuffer p = new StringBuffer(); for ( String s : problems ) { p.append( s ).append( "\n" ); } MessageDialog md = new MessageDialog( shell, BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.Title" ), null, BaseMessages .getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping" ) + ":\n\n" + p.toString(), MessageDialog.WARNING, new String[] { BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonOK" ), BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonCancel" ) }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open() & 0xFF; if ( idx == 1 || idx == 255 /* 255 = escape pressed */ ) { return; // Cancel } } if ( mapping != null ) { m_currentMeta.setMapping( mapping ); } if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void getData() { if ( !Const.isEmpty( m_currentMeta.getIncomingKeyField() ) ) { m_incomingKeyCombo.setText( m_currentMeta.getIncomingKeyField() ); } if ( !Const.isEmpty( m_currentMeta.getIncomingResultField() ) ) { m_incomingResultCombo.setText( m_currentMeta.getIncomingResultField() ); } if ( m_currentMeta.getMapping() != null ) { m_mappingEditor.setMapping( m_currentMeta.getMapping() ); } } private void populateFieldsCombo() { StepMeta stepMeta = transMeta.findStep( stepname ); String currentKey = m_incomingKeyCombo.getText(); String currentResult = m_incomingResultCombo.getText(); int keyIndex = -1; int valueIndex = -1; if ( stepMeta != null ) { try { RowMetaInterface rowMeta = transMeta.getPrevStepFields( stepMeta ); if ( rowMeta != null && rowMeta.size() > 0 ) { m_incomingKeyCombo.removeAll(); m_incomingResultCombo.removeAll(); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface vm = rowMeta.getValueMeta( i ); String fieldName = vm.getName(); if ( fieldName.equalsIgnoreCase( "key" ) ) { keyIndex = i; } else if ( fieldName.equalsIgnoreCase( "value" ) ) { valueIndex = i; } m_incomingKeyCombo.add( fieldName ); m_incomingResultCombo.add( fieldName ); } if ( !Const.isEmpty( currentKey ) ) { m_incomingKeyCombo.setText( currentKey ); } else if ( keyIndex >= 0 ) { // auto set key field m_incomingKeyCombo.select( keyIndex ); } if ( !Const.isEmpty( currentResult ) ) { m_incomingResultCombo.setText( currentResult ); } else if ( valueIndex >= 0 ) { // auto set value (Result) field m_incomingResultCombo.select( valueIndex ); } } } catch ( KettleException ex ) { if ( log.isError() ) { log.logError( "Error populating fields", ex ); } } } } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.paho; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.ConfigurerStrategy; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class PahoComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("automaticReconnect", boolean.class); map.put("brokerUrl", java.lang.String.class); map.put("cleanSession", boolean.class); map.put("clientId", java.lang.String.class); map.put("configuration", org.apache.camel.component.paho.PahoConfiguration.class); map.put("connectionTimeout", int.class); map.put("filePersistenceDirectory", java.lang.String.class); map.put("keepAliveInterval", int.class); map.put("maxInflight", int.class); map.put("maxReconnectDelay", int.class); map.put("mqttVersion", int.class); map.put("persistence", org.apache.camel.component.paho.PahoPersistence.class); map.put("qos", int.class); map.put("retained", boolean.class); map.put("serverURIs", java.lang.String.class); map.put("willPayload", java.lang.String.class); map.put("willQos", int.class); map.put("willRetained", boolean.class); map.put("willTopic", java.lang.String.class); map.put("bridgeErrorHandler", boolean.class); map.put("lazyStartProducer", boolean.class); map.put("basicPropertyBinding", boolean.class); map.put("client", org.eclipse.paho.client.mqttv3.MqttClient.class); map.put("customWebSocketHeaders", java.util.Properties.class); map.put("executorServiceTimeout", int.class); map.put("httpsHostnameVerificationEnabled", boolean.class); map.put("password", java.lang.String.class); map.put("socketFactory", javax.net.SocketFactory.class); map.put("sslClientProps", java.util.Properties.class); map.put("sslHostnameVerifier", javax.net.ssl.HostnameVerifier.class); map.put("userName", java.lang.String.class); ALL_OPTIONS = map; ConfigurerStrategy.addConfigurerClearer(PahoComponentConfigurer::clearConfigurers); } private org.apache.camel.component.paho.PahoConfiguration getOrCreateConfiguration(PahoComponent target) { if (target.getConfiguration() == null) { target.setConfiguration(new org.apache.camel.component.paho.PahoConfiguration()); } return target.getConfiguration(); } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { PahoComponent target = (PahoComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "automaticreconnect": case "automaticReconnect": getOrCreateConfiguration(target).setAutomaticReconnect(property(camelContext, boolean.class, value)); return true; case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "brokerurl": case "brokerUrl": getOrCreateConfiguration(target).setBrokerUrl(property(camelContext, java.lang.String.class, value)); return true; case "cleansession": case "cleanSession": getOrCreateConfiguration(target).setCleanSession(property(camelContext, boolean.class, value)); return true; case "client": target.setClient(property(camelContext, org.eclipse.paho.client.mqttv3.MqttClient.class, value)); return true; case "clientid": case "clientId": getOrCreateConfiguration(target).setClientId(property(camelContext, java.lang.String.class, value)); return true; case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.paho.PahoConfiguration.class, value)); return true; case "connectiontimeout": case "connectionTimeout": getOrCreateConfiguration(target).setConnectionTimeout(property(camelContext, int.class, value)); return true; case "customwebsocketheaders": case "customWebSocketHeaders": getOrCreateConfiguration(target).setCustomWebSocketHeaders(property(camelContext, java.util.Properties.class, value)); return true; case "executorservicetimeout": case "executorServiceTimeout": getOrCreateConfiguration(target).setExecutorServiceTimeout(property(camelContext, int.class, value)); return true; case "filepersistencedirectory": case "filePersistenceDirectory": getOrCreateConfiguration(target).setFilePersistenceDirectory(property(camelContext, java.lang.String.class, value)); return true; case "httpshostnameverificationenabled": case "httpsHostnameVerificationEnabled": getOrCreateConfiguration(target).setHttpsHostnameVerificationEnabled(property(camelContext, boolean.class, value)); return true; case "keepaliveinterval": case "keepAliveInterval": getOrCreateConfiguration(target).setKeepAliveInterval(property(camelContext, int.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "maxinflight": case "maxInflight": getOrCreateConfiguration(target).setMaxInflight(property(camelContext, int.class, value)); return true; case "maxreconnectdelay": case "maxReconnectDelay": getOrCreateConfiguration(target).setMaxReconnectDelay(property(camelContext, int.class, value)); return true; case "mqttversion": case "mqttVersion": getOrCreateConfiguration(target).setMqttVersion(property(camelContext, int.class, value)); return true; case "password": getOrCreateConfiguration(target).setPassword(property(camelContext, java.lang.String.class, value)); return true; case "persistence": getOrCreateConfiguration(target).setPersistence(property(camelContext, org.apache.camel.component.paho.PahoPersistence.class, value)); return true; case "qos": getOrCreateConfiguration(target).setQos(property(camelContext, int.class, value)); return true; case "retained": getOrCreateConfiguration(target).setRetained(property(camelContext, boolean.class, value)); return true; case "serveruris": case "serverURIs": getOrCreateConfiguration(target).setServerURIs(property(camelContext, java.lang.String.class, value)); return true; case "socketfactory": case "socketFactory": getOrCreateConfiguration(target).setSocketFactory(property(camelContext, javax.net.SocketFactory.class, value)); return true; case "sslclientprops": case "sslClientProps": getOrCreateConfiguration(target).setSslClientProps(property(camelContext, java.util.Properties.class, value)); return true; case "sslhostnameverifier": case "sslHostnameVerifier": getOrCreateConfiguration(target).setSslHostnameVerifier(property(camelContext, javax.net.ssl.HostnameVerifier.class, value)); return true; case "username": case "userName": getOrCreateConfiguration(target).setUserName(property(camelContext, java.lang.String.class, value)); return true; case "willpayload": case "willPayload": getOrCreateConfiguration(target).setWillPayload(property(camelContext, java.lang.String.class, value)); return true; case "willqos": case "willQos": getOrCreateConfiguration(target).setWillQos(property(camelContext, int.class, value)); return true; case "willretained": case "willRetained": getOrCreateConfiguration(target).setWillRetained(property(camelContext, boolean.class, value)); return true; case "willtopic": case "willTopic": getOrCreateConfiguration(target).setWillTopic(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } public static void clearBootstrapConfigurers() { } public static void clearConfigurers() { ALL_OPTIONS.clear(); } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { PahoComponent target = (PahoComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "automaticreconnect": case "automaticReconnect": return getOrCreateConfiguration(target).isAutomaticReconnect(); case "basicpropertybinding": case "basicPropertyBinding": return target.isBasicPropertyBinding(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "brokerurl": case "brokerUrl": return getOrCreateConfiguration(target).getBrokerUrl(); case "cleansession": case "cleanSession": return getOrCreateConfiguration(target).isCleanSession(); case "client": return target.getClient(); case "clientid": case "clientId": return getOrCreateConfiguration(target).getClientId(); case "configuration": return target.getConfiguration(); case "connectiontimeout": case "connectionTimeout": return getOrCreateConfiguration(target).getConnectionTimeout(); case "customwebsocketheaders": case "customWebSocketHeaders": return getOrCreateConfiguration(target).getCustomWebSocketHeaders(); case "executorservicetimeout": case "executorServiceTimeout": return getOrCreateConfiguration(target).getExecutorServiceTimeout(); case "filepersistencedirectory": case "filePersistenceDirectory": return getOrCreateConfiguration(target).getFilePersistenceDirectory(); case "httpshostnameverificationenabled": case "httpsHostnameVerificationEnabled": return getOrCreateConfiguration(target).isHttpsHostnameVerificationEnabled(); case "keepaliveinterval": case "keepAliveInterval": return getOrCreateConfiguration(target).getKeepAliveInterval(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "maxinflight": case "maxInflight": return getOrCreateConfiguration(target).getMaxInflight(); case "maxreconnectdelay": case "maxReconnectDelay": return getOrCreateConfiguration(target).getMaxReconnectDelay(); case "mqttversion": case "mqttVersion": return getOrCreateConfiguration(target).getMqttVersion(); case "password": return getOrCreateConfiguration(target).getPassword(); case "persistence": return getOrCreateConfiguration(target).getPersistence(); case "qos": return getOrCreateConfiguration(target).getQos(); case "retained": return getOrCreateConfiguration(target).isRetained(); case "serveruris": case "serverURIs": return getOrCreateConfiguration(target).getServerURIs(); case "socketfactory": case "socketFactory": return getOrCreateConfiguration(target).getSocketFactory(); case "sslclientprops": case "sslClientProps": return getOrCreateConfiguration(target).getSslClientProps(); case "sslhostnameverifier": case "sslHostnameVerifier": return getOrCreateConfiguration(target).getSslHostnameVerifier(); case "username": case "userName": return getOrCreateConfiguration(target).getUserName(); case "willpayload": case "willPayload": return getOrCreateConfiguration(target).getWillPayload(); case "willqos": case "willQos": return getOrCreateConfiguration(target).getWillQos(); case "willretained": case "willRetained": return getOrCreateConfiguration(target).isWillRetained(); case "willtopic": case "willTopic": return getOrCreateConfiguration(target).getWillTopic(); default: return null; } } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ratpack.embed; import com.google.inject.Injector; import groovy.lang.Closure; import groovy.lang.DelegatesTo; import ratpack.embed.internal.LaunchConfigEmbeddedApp; import ratpack.func.Action; import ratpack.groovy.Groovy; import ratpack.groovy.guice.GroovyBindingsSpec; import ratpack.groovy.guice.internal.DefaultGroovyBindingsSpec; import ratpack.groovy.handling.GroovyChain; import ratpack.groovy.internal.ClosureUtil; import ratpack.guice.BindingsSpec; import ratpack.guice.Guice; import ratpack.launch.LaunchConfig; import ratpack.launch.LaunchConfigBuilder; import java.nio.file.Path; import java.util.function.Supplier; import static ratpack.groovy.internal.ClosureUtil.configureDelegateFirst; /** * A highly configurable {@link EmbeddedApp} implementation that allows * the application to be defined in code at runtime. * <p> * <pre class="tested"> * import ratpack.test.embed.BaseDirBuilder * import ratpack.session.SessionModule * import ratpack.groovy.test.embed.GroovyEmbeddedApp * <p> * GroovyEmbeddedApp.build { * baseDir { * BaseDirBuilder.tmpDir().build { * it.file "public/foo.txt", "bar" * } * } * <p> * launchConfig { * development true * other "some.other.property": "value" * } * <p> * // Configure the module registry * bindings { * add new SessionModule() * } * <p> * // Use the GroovyChain DSL for defining the application handlers * handlers { * get { * render "root" * } * assets "public" * } * } * </pre> */ public interface GroovyEmbeddedApp extends EmbeddedApp { public interface Spec { /** * Specifies the handlers of the application. * <p> * The given closure will not be executed until this application is started. * <p> * Subsequent calls to this method will <i>replace</i> the previous definition. * Calling this method after the application has started has no effect. * * @param closure The definition of the application handlers * @return {@code this} */ Spec handlers(@DelegatesTo(value = GroovyChain.class, strategy = Closure.DELEGATE_FIRST) Closure<?> closure); /** * Specifies the bindings of the application. * <p> * The given closure will not be executed until this application is started. * <p> * Subsequent calls to this method will <i>replace</i> the previous definition. * Calling this method after the application has started has no effect. * * @param closure The definition of the application handlers * @return {@code this} */ Spec bindings(@DelegatesTo(value = GroovyBindingsSpec.class, strategy = Closure.DELEGATE_FIRST) Closure<?> closure); /** * Modifies the launch config of the application. * <p> * The given closure will not be executed until this application is started. * <p> * Subsequent calls to this method will <i>replace</i> the previous definition. * Calling this method after the application has started has no effect. * * @param closure The definition of the application handlers * @return {@code this} */ Spec launchConfig(@DelegatesTo(value = LaunchConfigBuilder.class, strategy = Closure.DELEGATE_FIRST) Closure<?> closure); Spec parentInjector(Injector parentInjector); Spec baseDir(Supplier<? extends Path> baseDirSupplier); default Spec baseDir(BaseDirBuilder baseDirBuilder) { return baseDir(baseDirBuilder::build); } default Spec baseDir(Path baseDir) { return baseDir(() -> baseDir); } } public static EmbeddedApp build(@DelegatesTo(value = Spec.class, strategy = Closure.DELEGATE_FIRST) Closure<?> closure) { return new LaunchConfigEmbeddedApp() { @Override protected LaunchConfig createLaunchConfig() { final SpecWrapper spec = new SpecWrapper(); configureDelegateFirst(spec.getSpec(), closure); LaunchConfigBuilder launchConfigBuilder; if (spec.baseDirSupplier != null) { Path baseDirPath = spec.baseDirSupplier.get(); launchConfigBuilder = LaunchConfigBuilder.baseDir(baseDirPath); } else { launchConfigBuilder = LaunchConfigBuilder.noBaseDir(); } configureDelegateFirst(launchConfigBuilder.port(0), spec.launchConfig); final Action<? super BindingsSpec> bindingsAction = bindingsSpec -> configureDelegateFirst(new DefaultGroovyBindingsSpec(bindingsSpec), spec.bindings); return launchConfigBuilder.build(launchConfig -> { Guice.Builder builder = Guice.builder(launchConfig); if (spec.parentInjector != null) { builder.parent(spec.parentInjector); } return builder.bindings(bindingsAction).build(chain -> Groovy.chain(chain, spec.handlers)); }); } }; } static class SpecWrapper { private Closure<?> handlers = ClosureUtil.noop(); private Closure<?> bindings = ClosureUtil.noop(); private Closure<?> launchConfig = ClosureUtil.noop(); private Injector parentInjector; private Supplier<? extends Path> baseDirSupplier; Spec getSpec() { return new Spec() { @Override public Spec handlers(Closure<?> closure) { handlers = closure; return this; } @Override public Spec bindings(Closure<?> closure) { bindings = closure; return this; } @Override public Spec launchConfig(Closure<?> closure) { launchConfig = closure; return this; } @Override public Spec parentInjector(Injector injector) { parentInjector = injector; return this; } @Override public Spec baseDir(Supplier<? extends Path> supplier) { baseDirSupplier = supplier; return this; } }; } } }
/************************************************************************* * * ADOBE CONFIDENTIAL * __________________ * * [2002] - [2007] Adobe Systems Incorporated * All Rights Reserved. * * NOTICE: All information contained herein is, and remains * the property of Adobe Systems Incorporated and its suppliers, * if any. The intellectual and technical concepts contained * herein are proprietary to Adobe Systems Incorporated * and its suppliers and may be covered by U.S. and Foreign Patents, * patents in process, and are protected by trade secret or copyright law. * Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained * from Adobe Systems Incorporated. **************************************************************************/ package flex.messaging.client; /** * A class to hold user agent specific properties. For example, in streaming * endpoints, a certain number of bytes need to be written before the * streaming connection can be used and this value is specific to user agents. * Similarly, the number of simultaneous connections a session can have is user * agent specific. */ public class UserAgentSettings { /** * The prefixes of the version token used by various browsers. */ public static final String USER_AGENT_CHROME = "Chrome"; public static final String USER_AGENT_FIREFOX = "Firefox"; public static final String USER_AGENT_FIREFOX_3 = "Firefox/3"; public static final String USER_AGENT_MSIE = "MSIE"; public static final String USER_AGENT_MSIE_8 = "MSIE 8"; public static final String USER_AGENT_OPERA = "Opera"; public static final String USER_AGENT_OPERA_8 = "Opera 8"; // Opera 10 apparently ships as User Agent Opera/9.8. public static final String USER_AGENT_OPERA_10 = "Opera/9.8"; public static final String USER_AGENT_SAFARI = "Safari"; /** * Bytes needed to kickstart the streaming connections for IE. */ public static final int KICKSTART_BYTES_MSIE = 2048; /** * Bytes needed to kickstart the streaming connections for SAFARI. */ public static final int KICKSTART_BYTES_SAFARI = 512; /** * The default number of persistent connections per session for various browsers. */ public static final int MAX_PERSISTENT_CONNECTIONS_DEFAULT = 1; private static final int MAX_PERSISTENT_CONNECTIONS_CHROME = 5; private static final int MAX_PERSISTENT_CONNECTIONS_FIREFOX_3 = 5; private static final int MAX_PERSISTENT_CONNECTIONS_MSIE_8 = 5; private static final int MAX_PERSISTENT_CONNECTIONS_OPERA = 3; private static final int MAX_PERSISTENT_CONNECTIONS_OPERA_8 = 7; private static final int MAX_PERSISTENT_CONNECTIONS_OPERA_10 = 7; private static final int MAX_PERSISTENT_CONNECTIONS_SAFARI = 3; private String matchOn; private int kickstartBytes; private int maxPersistentConnectionsPerSession = MAX_PERSISTENT_CONNECTIONS_DEFAULT; /** * Static method to retrieve pre-initialized user agents which are as follows: * * In Chrome 0, 1, 2, the limit is 6: * match-on="Chrome" max-persistent-connections-per-session="5" * * In Firefox 1, 2, the limit is 2: * match-on="Firefox" max-persistent-connections-per-session="1" * * In Firefox 3, the limit is 6: * match-on="Firefox/3" max-persistent-connections-per-session="5" * * In MSIE 5, 6, 7, the limit is 2 with kickstart bytes of 2K: * match-on="MSIE" max-persistent-connections-per-session="1" kickstart-bytes="2048" * * In MSIE 8, the limit is 6 with kickstart bytes of 2K: * match-on="MSIE 8" max-persistent-connections-per-session="5" kickstart-bytes="2048" * * In Opera 7, 9, the limit is 4: * match-on="Opera" max-persistent-connections-per-session="3" * * In Opera 8, the limit is 8: * match-on="Opera 8" max-persistent-connections-per-session="7" * * In Opera 10, the limit is 8. * match-on="Opera 10" max-persistent-connections-per-session="7" * * In Safari 3, 4, the limit is 4. * match-on="Safari" max-persistent-connections-per-session="3" * * @param matchOn String to use match the agent. */ public static UserAgentSettings getAgent(String matchOn) { UserAgentSettings userAgent = new UserAgentSettings(); userAgent.setMatchOn(matchOn); if (USER_AGENT_CHROME.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_CHROME); } else if (USER_AGENT_FIREFOX.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_DEFAULT); } else if (USER_AGENT_FIREFOX_3.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_FIREFOX_3); } else if (USER_AGENT_MSIE.equals(matchOn)) { userAgent.setKickstartBytes(KICKSTART_BYTES_MSIE); userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_DEFAULT); } else if (USER_AGENT_MSIE_8.equals(matchOn)) { userAgent.setKickstartBytes(KICKSTART_BYTES_MSIE); userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_MSIE_8); } else if (USER_AGENT_OPERA.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_OPERA); } else if (USER_AGENT_OPERA_8.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_OPERA_8); } else if (USER_AGENT_OPERA_10.equals(matchOn)) { userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_OPERA_10); } else if (USER_AGENT_SAFARI.equals(matchOn)) { userAgent.setKickstartBytes(KICKSTART_BYTES_SAFARI); userAgent.setMaxPersistentConnectionsPerSession(MAX_PERSISTENT_CONNECTIONS_SAFARI); } return userAgent; } /** * Returns the String to use to match the agent. * * @return The String to use to match the agent. */ public String getMatchOn() { return matchOn; } /** * Sets the String to use to match the agent. * * @param matchOn The String to use to match the agent. */ public void setMatchOn(String matchOn) { this.matchOn = matchOn; } /** * Returns the number of bytes needed to kickstart the streaming connections * for the user agent. * * @return The number of bytes needed to kickstart the streaming connections * for the user agent. */ public int getKickstartBytes() { return kickstartBytes; } /** * Sets the number of bytes needed to kickstart the streaming connections * for the user agent. * * @param kickstartBytes The number of bytes needed to kickstart the streaming * connections for the user agent. */ public void setKickstartBytes(int kickstartBytes) { if (kickstartBytes < 0) kickstartBytes = 0; this.kickstartBytes = kickstartBytes; } /** * @deprecated Use {@link UserAgentSettings#getMaxPersistentConnectionsPerSession()} instead. */ public int getMaxStreamingConnectionsPerSession() { return getMaxPersistentConnectionsPerSession(); } /** * @deprecated Use {@link UserAgentSettings#setMaxPersistentConnectionsPerSession(int)} instead. */ public void setMaxStreamingConnectionsPerSession(int maxStreamingConnectionsPerSession) { setMaxPersistentConnectionsPerSession(maxStreamingConnectionsPerSession); } /** * Returns the number of simultaneous streaming connections per session * the user agent supports. * * @return The number of streaming connections per session the user agent supports. */ public int getMaxPersistentConnectionsPerSession() { return maxPersistentConnectionsPerSession; } /** * Sets the number of simultaneous streaming connections per session * the user agent supports. * * @param maxStreamingConnectionsPerSession The number of simultaneous * streaming connections per session the user agent supports. */ public void setMaxPersistentConnectionsPerSession(int maxStreamingConnectionsPerSession) { this.maxPersistentConnectionsPerSession = maxStreamingConnectionsPerSession; } }
/* * Copyright 2014 The gRPC Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc; import static com.google.common.collect.Iterables.getOnlyElement; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.AdditionalAnswers.delegatesTo; import static org.mockito.Matchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import io.grpc.MethodDescriptor.Marshaller; import io.grpc.MethodDescriptor.MethodType; import io.grpc.ServerCall.Listener; import io.grpc.internal.NoopServerCall; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; /** Unit tests for {@link ServerInterceptors}. */ @RunWith(JUnit4.class) public class ServerInterceptorsTest { @Mock private Marshaller<String> requestMarshaller; @Mock private Marshaller<Integer> responseMarshaller; @Mock private ServerCallHandler<String, Integer> handler; @Mock private ServerCall.Listener<String> listener; private MethodDescriptor<String, Integer> flowMethod; private ServerCall<String, Integer> call = new NoopServerCall<String, Integer>(); private ServerServiceDefinition serviceDefinition; private final Metadata headers = new Metadata(); /** Set up for test. */ @Before public void setUp() { MockitoAnnotations.initMocks(this); flowMethod = MethodDescriptor.<String, Integer>newBuilder() .setType(MethodType.UNKNOWN) .setFullMethodName("basic/flow") .setRequestMarshaller(requestMarshaller) .setResponseMarshaller(responseMarshaller) .build(); Mockito.when(handler.startCall( Mockito.<ServerCall<String, Integer>>any(), Mockito.<Metadata>any())) .thenReturn(listener); serviceDefinition = ServerServiceDefinition.builder(new ServiceDescriptor("basic", flowMethod)) .addMethod(flowMethod, handler).build(); } /** Final checks for all tests. */ @After public void makeSureExpectedMocksUnused() { verifyZeroInteractions(requestMarshaller); verifyZeroInteractions(responseMarshaller); verifyZeroInteractions(listener); } @Test(expected = NullPointerException.class) public void npeForNullServiceDefinition() { ServerServiceDefinition serviceDef = null; ServerInterceptors.intercept(serviceDef, Arrays.<ServerInterceptor>asList()); } @Test(expected = NullPointerException.class) public void npeForNullInterceptorList() { ServerInterceptors.intercept(serviceDefinition, (List<ServerInterceptor>) null); } @Test(expected = NullPointerException.class) public void npeForNullInterceptor() { ServerInterceptors.intercept(serviceDefinition, Arrays.asList((ServerInterceptor) null)); } @Test public void noop() { assertSame(serviceDefinition, ServerInterceptors.intercept(serviceDefinition, Arrays.<ServerInterceptor>asList())); } @Test public void multipleInvocationsOfHandler() { ServerInterceptor interceptor = mock(ServerInterceptor.class, delegatesTo(new NoopInterceptor())); ServerServiceDefinition intercepted = ServerInterceptors.intercept(serviceDefinition, Arrays.asList(interceptor)); assertSame(listener, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); verify(interceptor).interceptCall(same(call), same(headers), anyCallHandler()); verify(handler).startCall(call, headers); verifyNoMoreInteractions(interceptor, handler); assertSame(listener, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); verify(interceptor, times(2)) .interceptCall(same(call), same(headers), anyCallHandler()); verify(handler, times(2)).startCall(call, headers); verifyNoMoreInteractions(interceptor, handler); } @Test public void correctHandlerCalled() { @SuppressWarnings("unchecked") ServerCallHandler<String, Integer> handler2 = mock(ServerCallHandler.class); MethodDescriptor<String, Integer> flowMethod2 = flowMethod.toBuilder().setFullMethodName("basic/flow2").build(); serviceDefinition = ServerServiceDefinition.builder( new ServiceDescriptor("basic", flowMethod, flowMethod2)) .addMethod(flowMethod, handler) .addMethod(flowMethod2, handler2).build(); ServerServiceDefinition intercepted = ServerInterceptors.intercept( serviceDefinition, Arrays.<ServerInterceptor>asList(new NoopInterceptor())); getMethod(intercepted, "basic/flow").getServerCallHandler().startCall(call, headers); verify(handler).startCall(call, headers); verifyNoMoreInteractions(handler); verifyNoMoreInteractions(handler2); getMethod(intercepted, "basic/flow2").getServerCallHandler().startCall(call, headers); verify(handler2).startCall(call, headers); verifyNoMoreInteractions(handler); verifyNoMoreInteractions(handler2); } @Test public void callNextTwice() { ServerInterceptor interceptor = new ServerInterceptor() { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { // Calling next twice is permitted, although should only rarely be useful. assertSame(listener, next.startCall(call, headers)); return next.startCall(call, headers); } }; ServerServiceDefinition intercepted = ServerInterceptors.intercept(serviceDefinition, interceptor); assertSame(listener, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); verify(handler, times(2)).startCall(same(call), same(headers)); verifyNoMoreInteractions(handler); } @Test public void ordered() { final List<String> order = new ArrayList<String>(); handler = new ServerCallHandler<String, Integer>() { @Override public ServerCall.Listener<String> startCall( ServerCall<String, Integer> call, Metadata headers) { order.add("handler"); return listener; } }; ServerInterceptor interceptor1 = new ServerInterceptor() { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { order.add("i1"); return next.startCall(call, headers); } }; ServerInterceptor interceptor2 = new ServerInterceptor() { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { order.add("i2"); return next.startCall(call, headers); } }; ServerServiceDefinition serviceDefinition = ServerServiceDefinition.builder( new ServiceDescriptor("basic", flowMethod)) .addMethod(flowMethod, handler).build(); ServerServiceDefinition intercepted = ServerInterceptors.intercept( serviceDefinition, Arrays.asList(interceptor1, interceptor2)); assertSame(listener, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); assertEquals(Arrays.asList("i2", "i1", "handler"), order); } @Test public void orderedForward() { final List<String> order = new ArrayList<String>(); handler = new ServerCallHandler<String, Integer>() { @Override public ServerCall.Listener<String> startCall( ServerCall<String, Integer> call, Metadata headers) { order.add("handler"); return listener; } }; ServerInterceptor interceptor1 = new ServerInterceptor() { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { order.add("i1"); return next.startCall(call, headers); } }; ServerInterceptor interceptor2 = new ServerInterceptor() { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { order.add("i2"); return next.startCall(call, headers); } }; ServerServiceDefinition serviceDefinition = ServerServiceDefinition.builder( new ServiceDescriptor("basic", flowMethod)) .addMethod(flowMethod, handler).build(); ServerServiceDefinition intercepted = ServerInterceptors.interceptForward( serviceDefinition, interceptor1, interceptor2); assertSame(listener, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); assertEquals(Arrays.asList("i1", "i2", "handler"), order); } @Test public void argumentsPassed() { @SuppressWarnings("unchecked") final ServerCall<String, Integer> call2 = new NoopServerCall<String, Integer>(); @SuppressWarnings("unchecked") final ServerCall.Listener<String> listener2 = mock(ServerCall.Listener.class); ServerInterceptor interceptor = new ServerInterceptor() { @SuppressWarnings("unchecked") // Lot's of casting for no benefit. Not intended use. @Override public <R1, R2> ServerCall.Listener<R1> interceptCall( ServerCall<R1, R2> call, Metadata headers, ServerCallHandler<R1, R2> next) { assertSame(call, ServerInterceptorsTest.this.call); assertSame(listener, next.startCall((ServerCall<R1, R2>)call2, headers)); return (ServerCall.Listener<R1>) listener2; } }; ServerServiceDefinition intercepted = ServerInterceptors.intercept( serviceDefinition, Arrays.asList(interceptor)); assertSame(listener2, getSoleMethod(intercepted).getServerCallHandler().startCall(call, headers)); verify(handler).startCall(call2, headers); } @Test @SuppressWarnings("unchecked") public void typedMarshalledMessages() { final List<String> order = new ArrayList<String>(); Marshaller<Holder> marshaller = new Marshaller<Holder>() { @Override public InputStream stream(Holder value) { return value.get(); } @Override public Holder parse(InputStream stream) { return new Holder(stream); } }; ServerCallHandler<Holder, Holder> handler2 = new ServerCallHandler<Holder, Holder>() { @Override public Listener<Holder> startCall(final ServerCall<Holder, Holder> call, final Metadata headers) { return new Listener<Holder>() { @Override public void onMessage(Holder message) { order.add("handler"); call.sendMessage(message); } }; } }; MethodDescriptor<Holder, Holder> wrappedMethod = MethodDescriptor.<Holder, Holder>newBuilder() .setType(MethodType.UNKNOWN) .setFullMethodName("basic/wrapped") .setRequestMarshaller(marshaller) .setResponseMarshaller(marshaller) .build(); ServerServiceDefinition serviceDef = ServerServiceDefinition.builder( new ServiceDescriptor("basic", wrappedMethod)) .addMethod(wrappedMethod, handler2).build(); ServerInterceptor interceptor1 = new ServerInterceptor() { @Override public <ReqT, RespT> Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { ServerCall<ReqT, RespT> interceptedCall = new ForwardingServerCall .SimpleForwardingServerCall<ReqT, RespT>(call) { @Override public void sendMessage(RespT message) { order.add("i1sendMessage"); assertTrue(message instanceof Holder); super.sendMessage(message); } }; ServerCall.Listener<ReqT> originalListener = next .startCall(interceptedCall, headers); return new ForwardingServerCallListener .SimpleForwardingServerCallListener<ReqT>(originalListener) { @Override public void onMessage(ReqT message) { order.add("i1onMessage"); assertTrue(message instanceof Holder); super.onMessage(message); } }; } }; ServerInterceptor interceptor2 = new ServerInterceptor() { @Override public <ReqT, RespT> Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { ServerCall<ReqT, RespT> interceptedCall = new ForwardingServerCall .SimpleForwardingServerCall<ReqT, RespT>(call) { @Override public void sendMessage(RespT message) { order.add("i2sendMessage"); assertTrue(message instanceof InputStream); super.sendMessage(message); } }; ServerCall.Listener<ReqT> originalListener = next .startCall(interceptedCall, headers); return new ForwardingServerCallListener .SimpleForwardingServerCallListener<ReqT>(originalListener) { @Override public void onMessage(ReqT message) { order.add("i2onMessage"); assertTrue(message instanceof InputStream); super.onMessage(message); } }; } }; ServerServiceDefinition intercepted = ServerInterceptors.intercept(serviceDef, interceptor1); ServerServiceDefinition inputStreamMessageService = ServerInterceptors .useInputStreamMessages(intercepted); ServerServiceDefinition intercepted2 = ServerInterceptors .intercept(inputStreamMessageService, interceptor2); ServerMethodDefinition<InputStream, InputStream> serverMethod = (ServerMethodDefinition<InputStream, InputStream>) intercepted2.getMethod("basic/wrapped"); ServerCall<InputStream, InputStream> call2 = new NoopServerCall<InputStream, InputStream>(); byte[] bytes = {}; serverMethod .getServerCallHandler() .startCall(call2, headers) .onMessage(new ByteArrayInputStream(bytes)); assertEquals( Arrays.asList("i2onMessage", "i1onMessage", "handler", "i1sendMessage", "i2sendMessage"), order); } @SuppressWarnings("unchecked") private static ServerMethodDefinition<String, Integer> getSoleMethod( ServerServiceDefinition serviceDef) { if (serviceDef.getMethods().size() != 1) { throw new AssertionError("Not exactly one method present"); } return (ServerMethodDefinition<String, Integer>) getOnlyElement(serviceDef.getMethods()); } @SuppressWarnings("unchecked") private static ServerMethodDefinition<String, Integer> getMethod( ServerServiceDefinition serviceDef, String name) { return (ServerMethodDefinition<String, Integer>) serviceDef.getMethod(name); } private ServerCallHandler<String, Integer> anyCallHandler() { return Mockito.<ServerCallHandler<String, Integer>>any(); } private static class NoopInterceptor implements ServerInterceptor { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall( ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { return next.startCall(call, headers); } } private static class Holder { private final InputStream inputStream; Holder(InputStream inputStream) { this.inputStream = inputStream; } public InputStream get() { return inputStream; } } }
/* * ***** BEGIN LICENSE BLOCK ***** * Zimbra Collaboration Suite Server * Copyright (C) 2008, 2009, 2010, 2011 Zimbra, Inc. * * The contents of this file are subject to the Zimbra Public License * Version 1.3 ("License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://www.zimbra.com/license. * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. * ***** END LICENSE BLOCK ***** */ package com.zimbra.cs.datasource; import java.io.CharArrayWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import com.zimbra.common.calendar.ICalTimeZone; import com.zimbra.common.calendar.ZCalendar; import com.zimbra.common.calendar.ZCalendar.ZComponent; import com.zimbra.common.calendar.ZCalendar.ZProperty; import com.zimbra.common.localconfig.LC; import com.zimbra.common.service.ServiceException; import com.zimbra.common.util.ZimbraLog; import com.zimbra.cs.account.DataSource; import com.zimbra.cs.account.Provisioning; import com.zimbra.cs.dav.DavElements; import com.zimbra.cs.dav.DavException; import com.zimbra.cs.dav.client.CalDavClient; import com.zimbra.cs.dav.client.CalDavClient.Appointment; import com.zimbra.cs.dav.client.DavObject; import com.zimbra.cs.dav.client.DavRequest; import com.zimbra.cs.db.DbDataSource; import com.zimbra.cs.db.DbDataSource.DataSourceItem; import com.zimbra.cs.mailbox.CalendarItem; import com.zimbra.cs.mailbox.Flag; import com.zimbra.cs.mailbox.Folder; import com.zimbra.cs.mailbox.MailItem; import com.zimbra.cs.mailbox.MailServiceException; import com.zimbra.cs.mailbox.MailServiceException.NoSuchItemException; import com.zimbra.cs.mailbox.Mailbox; import com.zimbra.cs.mailbox.Mailbox.SetCalendarItemData; import com.zimbra.cs.mailbox.Metadata; import com.zimbra.cs.mailbox.OperationContext; import com.zimbra.cs.mailbox.calendar.Invite; import com.zimbra.soap.type.DataSource.ConnectionType; public class CalDavDataImport extends MailItemImport { private static final String METADATA_KEY_TYPE = "t"; private static final String METADATA_TYPE_FOLDER = "f"; private static final String METADATA_TYPE_APPOINTMENT = "a"; private static final String METADATA_KEY_ETAG = "e"; private static final String METADATA_KEY_CTAG = "c"; private static final int DEFAULT_FOLDER_FLAGS = Flag.BITMASK_CHECKED; private CalDavClient mClient; private static class CalendarFolder { public int id; public Folder folder; public boolean ctagMatched; public CalendarFolder(int fid) { id = fid; } } public CalDavDataImport(DataSource ds) throws ServiceException { super(ds); } @Override public void importData(List<Integer> folderIds, boolean fullSync) throws ServiceException { ArrayList<CalendarFolder> folders = new ArrayList<CalendarFolder>(); try { mbox.beginTrackingSync(); if (folderIds != null) for (int fid : folderIds) folders.add(new CalendarFolder(fid)); else folders = syncFolders(); OperationContext octxt = new OperationContext(mbox); for (CalendarFolder f : folders) { f.folder = mbox.getFolderById(octxt, f.id); if (f.folder.getDefaultView() == MailItem.Type.APPOINTMENT) { sync(octxt, f); } } } catch (DavException e) { throw ServiceException.FAILURE("error importing CalDAV data", e); } catch (IOException e) { throw ServiceException.FAILURE("error importing CalDAV data", e); } } @Override public void test() throws ServiceException { mClient = new CalDavClient(getTargetUrl()); mClient.setAppName(getAppName()); mClient.setCredential(getUsername(), getDecryptedPassword()); mClient.setDebugEnabled(dataSource.isDebugTraceEnabled()); try { mClient.login(getDefaultPrincipalUrl()); } catch (Exception x) { throw ServiceException.FAILURE(x.getMessage(), x); } } protected String getUsername() { return getDataSource().getUsername(); } protected String getDecryptedPassword() throws ServiceException { return getDataSource().getDecryptedPassword(); } protected byte getDefaultColor() { return 0; } protected String getDefaultPrincipalUrl() { DataSource ds = getDataSource(); String attrs[] = ds.getMultiAttr(Provisioning.A_zimbraDataSourceAttribute); for (String a : attrs) { if (a.startsWith("p:")) { return a.substring(2).replaceAll("_USERNAME_", getUsername()); } } return null; } protected String getTargetUrl() { DataSource ds = getDataSource(); ConnectionType ctype = ds.getConnectionType(); StringBuilder url = new StringBuilder(); switch (ctype) { case ssl: url.append("https://"); break; case cleartext: default: url.append("http://"); break; } url.append(ds.getHost()).append(":").append(ds.getPort()); return url.toString(); } protected String getAppName() { return "ZCS"; } private CalDavClient getClient() throws ServiceException, IOException, DavException { if (mClient == null) { mClient = new CalDavClient(getTargetUrl()); mClient.setAppName(getAppName()); mClient.setCredential(getUsername(), getDecryptedPassword()); mClient.setDebugEnabled(dataSource.isDebugTraceEnabled()); mClient.login(getDefaultPrincipalUrl()); } return mClient; } private enum Status { created, deleted, modified }; private static class RemoteItem { Status status; } private static class RemoteCalendarItem extends RemoteItem { public RemoteCalendarItem(String h, String e) { href = h; etag = e; } String href; String etag; int itemId; } /** * @throws ServiceException subclasses may throw an error */ protected int getRootFolderId(DataSource ds) throws ServiceException { return ds.getFolderId(); } protected HashMap<String,DataSourceItem> getAllFolderMappings(DataSource ds) throws ServiceException { Collection<DataSourceItem> allFolders = DbDataSource.getAllMappingsInFolder(ds, getRootFolderId(ds)); HashMap<String,DataSourceItem> folders = new HashMap<String,DataSourceItem>(); for (DataSourceItem f : allFolders) if (f.remoteId != null) folders.put(f.remoteId, f); return folders; } private ArrayList<CalendarFolder> syncFolders() throws ServiceException, IOException, DavException { ArrayList<CalendarFolder> ret = new ArrayList<CalendarFolder>(); DataSource ds = getDataSource(); OperationContext octxt = new OperationContext(mbox); HashMap<String,DataSourceItem> allFolders = getAllFolderMappings(ds); Folder rootFolder = null; try { rootFolder = mbox.getFolderById(octxt, getRootFolderId(ds)); } catch (NoSuchItemException e) { // folder may be deleted. delete the datasource ZimbraLog.datasource.info("Folder %d was deleted. Deleting data source %s.", getRootFolderId(ds), ds.getName()); mbox.getAccount().deleteDataSource(ds.getId()); // return empty array return new ArrayList<CalendarFolder>(0); } List<Integer> deleted = new ArrayList<Integer>(); int lastSync = (int)rootFolder.getLastSyncDate(); if (lastSync > 0) { for (int itemId : mbox.getTombstones(lastSync).getAllIds()) deleted.add(itemId); } CalDavClient client = getClient(); Map<String,DavObject> calendars = client.getCalendars(); for (String name : calendars.keySet()) { DavObject obj = calendars.get(name); String ctag = obj.getPropertyText(DavElements.E_GETCTAG); String url = obj.getHref(); DataSourceItem f = allFolders.get(url); if (f == null) f = new DataSourceItem(0, 0, url, null); CalendarFolder cf = new CalendarFolder(f.itemId); Folder folder = null; if (f.itemId != 0) { // check if the folder was deleted if (deleted.contains(f.itemId)) { allFolders.remove(url); DbDataSource.deleteMapping(ds, f.itemId); DbDataSource.deleteAllMappingsInFolder(ds, f.itemId); deleteRemoteFolder(url); continue; } // check if the folder is valid try { folder = mbox.getFolderById(octxt, f.itemId); } catch (ServiceException se) { if (se.getCode() != MailServiceException.NO_SUCH_FOLDER) { throw se; } f.itemId = 0; } } if (f.itemId == 0) { try { // check if we can use the folder folder = mbox.getFolderByName(octxt, rootFolder.getId(), name); if (folder.getDefaultView() != MailItem.Type.APPOINTMENT) { name = name + " (" + getDataSource().getName() + ")"; folder = null; } } catch (MailServiceException.NoSuchItemException e) { } if (folder == null) { Folder.FolderOptions fopt = new Folder.FolderOptions(); fopt.setDefaultView(MailItem.Type.APPOINTMENT).setFlags(DEFAULT_FOLDER_FLAGS).setColor(getDefaultColor()); folder = mbox.createFolder(octxt, name, rootFolder.getId(), fopt); } f.itemId = folder.getId(); f.folderId = folder.getFolderId(); f.md = new Metadata(); f.md.put(METADATA_KEY_TYPE, METADATA_TYPE_FOLDER); if (ctag != null) { f.md.put(METADATA_KEY_CTAG, ctag); } f.remoteId = url; cf.id = f.itemId; mbox.setSyncDate(octxt, folder.getId(), mbox.getLastChangeID()); DbDataSource.addMapping(ds, f); } else if (f.md == null) { ZimbraLog.datasource.warn("syncFolders: empty metadata for item %d", f.itemId); f.folderId = folder.getFolderId(); f.remoteId = url; f.md = new Metadata(); f.md.put(METADATA_KEY_TYPE, METADATA_TYPE_FOLDER); if (ctag != null) f.md.put(METADATA_KEY_CTAG, ctag); DbDataSource.addMapping(ds, f); } else if (ctag != null) { String oldctag = f.md.get(METADATA_KEY_CTAG, null); if (ctag.equals(oldctag)) { cf.ctagMatched = true; } else { f.md.put(METADATA_KEY_CTAG, ctag); DbDataSource.updateMapping(ds, f); } } String fname = folder.getName(); if (!fname.equals(name)) { ZimbraLog.datasource.warn("renaming folder %s to %s", fname, name); try { mbox.rename(octxt, f.itemId, MailItem.Type.FOLDER, name, folder.getFolderId()); } catch (ServiceException e) { ZimbraLog.datasource.warn("folder rename failed", e); } } allFolders.remove(url); ret.add(cf); } if (!allFolders.isEmpty()) { // handle deleted folders ArrayList<Integer> fids = new ArrayList<Integer>(); int[] fidArray = new int[allFolders.size()]; int i = 0; for (DataSourceItem f : allFolders.values()) { Folder folder = mbox.getFolderById(octxt, f.itemId); if (folder != null && folder.getDefaultView() != MailItem.Type.APPOINTMENT && folder.getDefaultView() != MailItem.Type.TASK) { continue; } fids.add(f.itemId); fidArray[i++] = f.itemId; DbDataSource.deleteAllMappingsInFolder(ds, f.itemId); } if (!fids.isEmpty()) { DbDataSource.deleteMappings(ds, fids); try { mbox.delete(octxt, fidArray, MailItem.Type.FOLDER, null); } catch (ServiceException e) { ZimbraLog.datasource.warn("folder delete failed", e); } } } mbox.setSyncDate(octxt, rootFolder.getId(), mbox.getLastChangeID()); return ret; } private void deleteRemoteFolder(String url) throws ServiceException, IOException, DavException { ZimbraLog.datasource.debug("deleteRemoteFolder: deleting remote folder %s", url); getClient().sendRequest(DavRequest.DELETE(url)); } private boolean pushDelete(Collection<Integer> itemIds) throws ServiceException { DataSource ds = getDataSource(); boolean deleted = false; ArrayList<Integer> toDelete = new ArrayList<Integer>(); for (int itemId : itemIds) { try { deleteRemoteItem(DbDataSource.getMapping(ds, itemId)); toDelete.add(itemId); } catch (Exception e) { ZimbraLog.datasource.warn("pushDelete: can't delete remote item for item "+itemId, e); } } if (toDelete.size() > 0) { DbDataSource.deleteMappings(ds, toDelete); deleted = true; } return deleted; } private void deleteRemoteItem(DataSourceItem item) throws ServiceException, IOException, DavException { if (item.itemId <= 0 || item.md == null) { ZimbraLog.datasource.warn("pushDelete: empty item %d", item.itemId); return; } String type = item.md.get(METADATA_KEY_TYPE, null); if (type == null || !type.equals(METADATA_TYPE_APPOINTMENT)) { // not a calendar item return; } String uri = item.remoteId; if (uri == null) { ZimbraLog.datasource.warn("pushDelete: empty uri for item %d", item.itemId); return; } if (METADATA_TYPE_FOLDER.equals(type)) { ZimbraLog.datasource.debug("pushDelete: deleting remote folder %s", uri); getClient().sendRequest(DavRequest.DELETE(uri)); } else if (METADATA_TYPE_APPOINTMENT.equals(type)) { ZimbraLog.datasource.debug("pushDelete: deleting remote appointment %s", uri); getClient().sendRequest(DavRequest.DELETE(uri)); } else { ZimbraLog.datasource.warn("pushDelete: unrecognized item type for %d: %s", item.itemId, type); } } private String createTargetUrl(MailItem mitem) throws ServiceException { DataSourceItem folder = DbDataSource.getMapping(getDataSource(), mitem.getFolderId()); String url = folder.remoteId; switch (mitem.getType()) { case APPOINTMENT: url += ((CalendarItem)mitem).getUid() + ".ics"; break; default: String name = mitem.getName(); if (name != null) url += name; else url += mitem.getSubject(); break; } return url; } private void pushModify(MailItem mitem) throws ServiceException, IOException, DavException { int itemId = mitem.getId(); DataSource ds = getDataSource(); DataSourceItem item = DbDataSource.getMapping(ds, itemId); boolean isCreate = false; if (item.remoteId == null) { // new item item.md = new Metadata(); item.md.put(METADATA_KEY_TYPE, METADATA_TYPE_APPOINTMENT); item.remoteId = createTargetUrl(mitem); item.folderId = mitem.getFolderId(); isCreate = true; } String type = item.md.get(METADATA_KEY_TYPE); if (METADATA_TYPE_FOLDER.equals(type)) { if (mitem.getType() != MailItem.Type.FOLDER) { ZimbraLog.datasource.warn("pushModify: item type doesn't match in metadata for item %d", itemId); return; } // detect and push rename } else if (METADATA_TYPE_APPOINTMENT.equals(type)) { if (mitem.getType() != MailItem.Type.APPOINTMENT) { ZimbraLog.datasource.warn("pushModify: item type doesn't match in metadata for item %d", itemId); return; } // push modified appt ZimbraLog.datasource.debug("pushModify: sending appointment %s", item.remoteId); String etag = putAppointment((CalendarItem)mitem, item); if (etag == null) { Appointment appt = mClient.getEtag(item.remoteId); etag = appt.etag; } item.md.put(METADATA_KEY_ETAG, etag); if (isCreate) { DbDataSource.addMapping(ds, item); } else { DbDataSource.updateMapping(ds, item); } } else { ZimbraLog.datasource.warn("pushModify: unrecognized item type for %d: %s", itemId, type); return; } } private String putAppointment(CalendarItem calItem, DataSourceItem dsItem) throws ServiceException, IOException, DavException { StringBuilder buf = new StringBuilder(); ArrayList<String> recipients = new ArrayList<String>(); buf.append("BEGIN:VCALENDAR\r\n"); buf.append("VERSION:").append(ZCalendar.sIcalVersion).append("\r\n"); buf.append("PRODID:").append(ZCalendar.sZimbraProdID).append("\r\n"); Iterator<ICalTimeZone> iter = calItem.getTimeZoneMap().tzIterator(); while (iter.hasNext()) { ICalTimeZone tz = iter.next(); CharArrayWriter wr = new CharArrayWriter(); tz.newToVTimeZone().toICalendar(wr, true); wr.flush(); buf.append(wr.toCharArray()); wr.close(); } boolean appleICalExdateHack = LC.calendar_apple_ical_compatible_canceled_instances.booleanValue(); ZComponent[] vcomps = Invite.toVComponents(calItem.getInvites(), true, false, appleICalExdateHack); if (vcomps != null) { CharArrayWriter wr = new CharArrayWriter(); for (ZComponent vcomp : vcomps) { ZProperty organizer = vcomp.getProperty(ZCalendar.ICalTok.ORGANIZER); if (organizer != null) organizer.setValue(getUsername()); vcomp.toICalendar(wr, true); } wr.flush(); buf.append(wr.toCharArray()); wr.close(); } buf.append("END:VCALENDAR\r\n"); String etag = dsItem.md.get(METADATA_KEY_ETAG, null); if (recipients.isEmpty()) recipients = null; Appointment appt = new Appointment(dsItem.remoteId, etag, buf.toString(), recipients); return getClient().sendCalendarData(appt); } private List<RemoteItem> getRemoteItems(Folder folder) throws ServiceException, IOException, DavException { ZimbraLog.datasource.debug("Refresh folder %s", folder.getPath()); DataSource ds = getDataSource(); DataSourceItem item = DbDataSource.getMapping(ds, folder.getId()); if (item.md == null) throw ServiceException.FAILURE("Mapping for folder "+folder.getPath()+" not found", null); // CalDAV doesn't provide delete tombstone. in order to check for deleted appointments // we need to cross reference the current result with what we have from last sync // and check for any appointment that has disappeared since last sync. HashMap<String,DataSourceItem> allItems = new HashMap<String,DataSourceItem>(); for (DataSourceItem localItem : DbDataSource.getAllMappingsInFolder(getDataSource(), folder.getId())) allItems.put(localItem.remoteId, localItem); ArrayList<RemoteItem> ret = new ArrayList<RemoteItem>(); CalDavClient client = getClient(); Collection<Appointment> appts = client.getEtags(item.remoteId); for (Appointment a : appts) { ret.add(new RemoteCalendarItem(a.href, a.etag)); allItems.remove(a.href); } ArrayList<Integer> deletedIds = new ArrayList<Integer>(); for (DataSourceItem deletedItem : allItems.values()) { // what's left in the collection are previous mapping that has disappeared. // we need to delete the appointments that are mapped locally. RemoteCalendarItem rci = new RemoteCalendarItem(deletedItem.remoteId, null); rci.status = Status.deleted; rci.itemId = deletedItem.itemId; ret.add(rci); deletedIds.add(deletedItem.itemId); ZimbraLog.datasource.debug("deleting: %d (%s) ", deletedItem.itemId, deletedItem.remoteId); } if (!deletedIds.isEmpty()) DbDataSource.deleteMappings(ds, deletedIds); return ret; } private MailItem applyRemoteItem(RemoteItem remoteItem, Folder where) throws ServiceException, IOException { if (!(remoteItem instanceof RemoteCalendarItem)) { ZimbraLog.datasource.warn("applyRemoteItem: not a calendar item: %s", remoteItem); return null; } RemoteCalendarItem item = (RemoteCalendarItem) remoteItem; DataSource ds = getDataSource(); DataSourceItem dsItem = DbDataSource.getReverseMapping(ds, item.href); OperationContext octxt = new OperationContext(mbox); MailItem mi = null; boolean isStale = false; boolean isCreate = false; if (dsItem.md == null && item.status != Status.deleted) { dsItem.md = new Metadata(); dsItem.md.put(METADATA_KEY_TYPE, METADATA_TYPE_APPOINTMENT); } if (dsItem.itemId == 0) { isStale = true; isCreate = true; } else { String etag = dsItem.md.get(METADATA_KEY_ETAG, null); try { mi = mbox.getItemById(octxt, dsItem.itemId, MailItem.Type.UNKNOWN); } catch (MailServiceException.NoSuchItemException se) { ZimbraLog.datasource.warn("applyRemoteItem: calendar item not found: ", remoteItem); } if (item.etag == null) { ZimbraLog.datasource.warn("No Etag returned for item %s", item.href); isStale = true; } else if (etag == null) { ZimbraLog.datasource.warn("Empty etag for item %d", dsItem.itemId); isStale = true; } else { isStale = !item.etag.equals(etag); } if (mi == null) isStale = true; } if (item.status == Status.deleted) { ZimbraLog.datasource.debug("Deleting appointment %s", item.href); try { mi = mbox.getItemById(octxt, item.itemId, MailItem.Type.UNKNOWN); } catch (NoSuchItemException se) { mi = null; } try { mbox.delete(octxt, item.itemId, MailItem.Type.UNKNOWN); } catch (ServiceException se) { ZimbraLog.datasource.warn("Error deleting remotely deleted item %d (%s)", item.itemId, dsItem.remoteId); } } else if (isStale) { ZimbraLog.datasource.debug("Updating stale appointment %s", item.href); ZCalendar.ZVCalendar vcalendar; SetCalendarItemData main = new SetCalendarItemData(); SetCalendarItemData exceptions[] = null; CalDavClient client = null; try { client = getClient(); } catch (DavException e) { throw ServiceException.FAILURE("error creating CalDAV client", e); } Appointment appt = client.getCalendarData(new Appointment(item.href, item.etag)); if (appt.data == null) { ZimbraLog.datasource.warn("No appointment found at "+item.href); return null; } dsItem.md.put(METADATA_KEY_ETAG, appt.etag); try { vcalendar = ZCalendar.ZCalendarBuilder.build(appt.data); List<Invite> invites = Invite.createFromCalendar(mbox.getAccount(), null, vcalendar, true); if (invites.size() > 1) exceptions = new SetCalendarItemData[invites.size() - 1]; int pos = 0; boolean first = true; for (Invite i : invites) { if (first) { main.invite = i; first = false; } else { SetCalendarItemData scid = new SetCalendarItemData(); scid.invite = i; exceptions[pos++] = scid; } } } catch (Exception e) { ZimbraLog.datasource.warn("Error parsing appointment ", e); return null; } mi = mbox.setCalendarItem(octxt, where.getId(), 0, null, main, exceptions, null, CalendarItem.NEXT_ALARM_KEEP_CURRENT); dsItem.itemId = mi.getId(); dsItem.folderId = mi.getFolderId(); if (isCreate) { DbDataSource.addMapping(ds, dsItem); } else { DbDataSource.updateMapping(ds, dsItem); } } else { ZimbraLog.datasource.debug("Appointment up to date %s", item.href); try { mi = mbox.getItemById(octxt, dsItem.itemId, MailItem.Type.UNKNOWN); } catch (NoSuchItemException se) { // item not found. delete the mapping so it can be downloaded again if needed. ArrayList<Integer> deletedIds = new ArrayList<Integer>(); deletedIds.add(dsItem.itemId); DbDataSource.deleteMappings(ds, deletedIds); } } return mi; } private void sync(OperationContext octxt, CalendarFolder cf) throws ServiceException, IOException, DavException { Folder syncFolder = cf.folder; int lastSync = (int)syncFolder.getLastSyncDate(); // hack alert: caldav import uses sync date field to store sync token int currentSync = lastSync; boolean allDone = false; HashMap<Integer,Integer> modifiedFromRemote = new HashMap<Integer,Integer>(); ArrayList<Integer> deletedFromRemote = new ArrayList<Integer>(); // loop through as long as there are un'synced local changes while (!allDone) { allDone = true; if (lastSync > 0) { // Don't push local changes during initial sync. // push local deletion List<Integer> deleted = new ArrayList<Integer>(); for (int itemId : mbox.getTombstones(lastSync).getAllIds()) { if (deletedFromRemote.contains(itemId)) { continue; // was just deleted from sync } deleted.add(itemId); } // move to trash is equivalent to delete HashSet<Integer> fid = new HashSet<Integer>(); fid.add(Mailbox.ID_FOLDER_TRASH); List<Integer> trashed = mbox.getModifiedItems(octxt, lastSync, MailItem.Type.UNKNOWN, fid).getFirst(); deleted.addAll(trashed); if (!deleted.isEmpty()) { // pushDelete returns true if one or more items were deleted allDone &= !pushDelete(deleted); } // push local modification fid.clear(); fid.add(syncFolder.getId()); List<Integer> modified = mbox.getModifiedItems(octxt, lastSync, MailItem.Type.UNKNOWN, fid).getFirst(); for (int itemId : modified) { MailItem item = mbox.getItemById(octxt, itemId, MailItem.Type.UNKNOWN); if (modifiedFromRemote.containsKey(itemId) && modifiedFromRemote.get(itemId).equals(item.getModifiedSequence())) continue; // was just downloaded from remote try { pushModify(item); } catch (Exception e) { ZimbraLog.datasource.info("Failed to push item "+item.getId(), e); } allDone = false; } } if (cf.ctagMatched) { currentSync = mbox.getLastChangeID(); break; } // pull in the changes from the remote server List<RemoteItem> remoteItems = getRemoteItems(syncFolder); for (RemoteItem item : remoteItems) { MailItem localItem = applyRemoteItem(item, syncFolder); if (localItem != null) { if (item.status == Status.deleted) deletedFromRemote.add(localItem.getId()); else modifiedFromRemote.put(localItem.getId(), localItem.getModifiedSequence()); } } currentSync = mbox.getLastChangeID(); lastSync = currentSync; } mbox.setSyncDate(octxt, syncFolder.getId(), currentSync); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.mvel.integrationtests; import java.util.Collection; import org.drools.testcoverage.common.util.KieBaseTestConfiguration; import org.drools.testcoverage.common.util.KieUtil; import org.drools.testcoverage.common.util.TestParametersUtil; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.kie.api.KieBase; import org.kie.api.KieServices; import org.kie.api.builder.KieFileSystem; import org.kie.api.builder.ReleaseId; import org.kie.api.definition.KiePackage; import org.kie.api.definition.rule.Rule; import org.kie.api.runtime.KieContainer; import static org.junit.Assert.assertEquals; // DROOLS-1044 @RunWith(Parameterized.class) public class KieBaseIncludesTest { private final KieBaseTestConfiguration kieBaseTestConfiguration; public KieBaseIncludesTest(final KieBaseTestConfiguration kieBaseTestConfiguration) { this.kieBaseTestConfiguration = kieBaseTestConfiguration; } @Parameterized.Parameters(name = "KieBase type={0}") public static Collection<Object[]> getParameters() { return TestParametersUtil.getKieBaseCloudConfigurations(true); } /** * Test the inclusion of a KieBase defined in one KJAR into the KieBase of another KJAR. * <p/> * The 2 KieBases use different package names for the rules (i.e. "rules" and "rules2"). */ @Test public void testKieBaseIncludesCrossKJarDifferentPackageNames() { // @formatter:off String pomContent1 = "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" + "<modelVersion>4.0.0</modelVersion>\n" + "<groupId>org.kie</groupId>\n" + "<artifactId>rules-1</artifactId>\n" + "<version>1.0.0</version>\n" + "<packaging>jar</packaging>\n" + "<dependencies>\n" + "<dependency>\n" + "<groupId>org.kie</groupId>\n" + "<artifactId>rules-2</artifactId>\n" + "<version>1.0.0</version>\n" + "</dependency>\n" + "</dependencies>\n" + "</project>\n"; String kmoduleContent1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<kmodule xmlns=\"http://jboss.org/kie/6.0.0/kmodule\">\n" + "<kbase name=\"kbase1\" equalsBehavior=\"equality\" default=\"true\" packages=\"rules\" includes=\"kbase2\">\n" + "<ksession name=\"ksession1\" default=\"true\" type=\"stateful\"/>\n" + "</kbase>\n"+ "</kmodule>"; String drl1 = "package rules\n" + "\n" + "rule \"Rule in KieBase 1\"\n" + "when\n" + "then\n" + "System.out.println(\"Rule in KieBase1\");\n" + "end"; String kmoduleContent2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<kmodule xmlns=\"http://jboss.org/kie/6.0.0/kmodule\">\n" + "<kbase name=\"kbase2\" equalsBehavior=\"equality\" default=\"false\" packages=\"rules2\">\n" + "<ksession name=\"ksession2\" default=\"false\" type=\"stateful\"/>\n" + "</kbase>\n"+ "</kmodule>"; String drl2 = "package rules2\n" + "\n" + "rule \"Rule in KieBase 2\"\n" + "when\n" + "then\n" + "System.out.println(\"Rule in KieBase2\");\n" + "end"; // @formatter:on KieServices ks = KieServices.Factory.get(); ReleaseId releaseId1 = ks.newReleaseId( "org.kie", "rules-1", "1.0.0" ); ReleaseId releaseId2 = ks.newReleaseId("org.kie", "rules-2", "1.0.0"); //First deploy the second KJAR on which the first one depends. KieFileSystem kfs2 = ks.newKieFileSystem() .generateAndWritePomXML(releaseId2) .write("src/main/resources/rules2/rules.drl", drl2) .writeKModuleXML(kmoduleContent2); KieUtil.getKieBuilderFromKieFileSystem(kieBaseTestConfiguration, kfs2, true); KieFileSystem kfs1 = ks.newKieFileSystem() //.generateAndWritePomXML(releaseId1) .writePomXML(pomContent1) .write("src/main/resources/rules/rules.drl", drl1) .writeKModuleXML(kmoduleContent1); KieUtil.getKieBuilderFromKieFileSystem(kieBaseTestConfiguration, kfs1, true); KieContainer kc = ks.newKieContainer( releaseId1 ); KieBase kieBase = kc.getKieBase(); // Assert the number of rules in the KieBase. long nrOfRules = getNumberOfRules(kieBase); // We should have 2 rules in our KieBase. One from our own DRL and one from the DRL in the KieBase we've included. assertEquals(2, nrOfRules); } /** * Test the inclusion of a KieBase defined in one KJAR into the KieBase of another KJAR. * <p/> * The 2 KieBases use the same package names for the rules (i.e. "rules"). */ @Test public void testKieBaseIncludesCrossKJarSamePackageNames() { // @formatter:off String pomContent1 = "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" + "<modelVersion>4.0.0</modelVersion>\n" + "<groupId>org.kie</groupId>\n" + "<artifactId>rules-1</artifactId>\n" + "<version>1.0.0</version>\n" + "<packaging>jar</packaging>\n" + "<dependencies>\n" + "<dependency>\n" + "<groupId>org.kie</groupId>\n" + "<artifactId>rules-2</artifactId>\n" + "<version>1.0.0</version>\n" + "</dependency>\n" + "</dependencies>\n" + "</project>\n"; String kmoduleContent1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<kmodule xmlns=\"http://jboss.org/kie/6.0.0/kmodule\">\n" + "<kbase name=\"kbase1\" equalsBehavior=\"equality\" default=\"true\" packages=\"rules\" includes=\"kbase2\">\n" + "<ksession name=\"ksession1\" default=\"true\" type=\"stateful\"/>\n" + "</kbase>\n"+ "</kmodule>"; String drl1 = "package rules\n" + "\n" + "rule \"Rule in KieBase 1\"\n" + "when\n" + "then\n" + "System.out.println(\"Rule in KieBase1\");\n" + "end"; String kmoduleContent2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<kmodule xmlns=\"http://jboss.org/kie/6.0.0/kmodule\">\n" + "<kbase name=\"kbase2\" equalsBehavior=\"equality\" default=\"false\" packages=\"rules\">\n" + "<ksession name=\"ksession2\" default=\"false\" type=\"stateful\"/>\n" + "</kbase>\n"+ "</kmodule>"; String drl2 = "package rules\n" + "\n" + "rule \"Rule in KieBase 2\"\n" + "when\n" + "then\n" + "System.out.println(\"Rule in KieBase2\");\n" + "end"; // @formatter:on KieServices ks = KieServices.Factory.get(); ReleaseId releaseId1 = ks.newReleaseId("org.kie", "rules-1", "1.0.0"); ReleaseId releaseId2 = ks.newReleaseId("org.kie", "rules-2", "1.0.0"); //First deploy the second KJAR on which the first one depends. KieFileSystem kfs2 = ks.newKieFileSystem() .generateAndWritePomXML(releaseId2) .write("src/main/resources/rules/rules.drl", drl2) .writeKModuleXML(kmoduleContent2); KieUtil.getKieBuilderFromKieFileSystem(kieBaseTestConfiguration, kfs2, true); KieFileSystem kfs1 = ks.newKieFileSystem() //.generateAndWritePomXML(releaseId1) .writePomXML(pomContent1) .write("src/main/resources/rules/rules.drl", drl1) .writeKModuleXML(kmoduleContent1); KieUtil.getKieBuilderFromKieFileSystem(kieBaseTestConfiguration, kfs1, true); KieContainer kc = ks.newKieContainer(releaseId1); KieBase kieBase = kc.getKieBase(); // Assert the number of rules in the KieBase. long nrOfRules = getNumberOfRules(kieBase); // We should have 2 rules in our KieBase. One from our own DRL and one from the DRL in the KieBase we've included. assertEquals(2, nrOfRules); } /** * Helper method which determines the number of rules in the {@link KieBase}. * * @param kieBase * the {@link KieBase} * @return the number of rules in the {@link KieBase} */ private static long getNumberOfRules(KieBase kieBase) { long nrOfRules = 0; Collection<KiePackage> kiePackages = kieBase.getKiePackages(); for (KiePackage nextKiePackage : kiePackages) { Collection<Rule> rules = nextKiePackage.getRules(); System.out.println(rules); nrOfRules += rules.size(); } return nrOfRules; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.xquery; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.Properties; import net.sf.saxon.Configuration; import net.sf.saxon.lib.ModuleURIResolver; import net.sf.saxon.query.StaticQueryContext; import org.apache.camel.Component; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.ProcessorEndpoint; import org.apache.camel.support.ResourceHelper; import org.apache.camel.support.service.ServiceHelper; /** * Transforms the message using a XQuery template using Saxon. */ @UriEndpoint(firstVersion = "1.0.0", scheme = "xquery", title = "XQuery", syntax = "xquery:resourceUri", label = "transformation") public class XQueryEndpoint extends ProcessorEndpoint { private volatile XQueryBuilder xquery; @UriPath @Metadata(required = true) private String resourceUri; @UriParam(label = "advanced") private Configuration configuration; @UriParam(label = "advanced") private Map<String, Object> configurationProperties = new HashMap<>(); @UriParam(label = "advanced") private StaticQueryContext staticQueryContext; @UriParam(label = "advanced") private Map<String, Object> parameters = new HashMap<>(); @UriParam private Map<String, String> namespacePrefixes = new HashMap<>(); @UriParam(defaultValue = "DOM") private ResultFormat resultsFormat = ResultFormat.DOM; @UriParam(label = "advanced") private Properties properties = new Properties(); @UriParam private Class<?> resultType; @UriParam(defaultValue = "true") private boolean stripsAllWhiteSpace = true; @UriParam(label = "advanced") private ModuleURIResolver moduleURIResolver; @UriParam private boolean allowStAX; @UriParam private String headerName; public XQueryEndpoint(String endpointUri, Component component) { super(endpointUri, component); } public String getResourceUri() { return resourceUri; } /** * The name of the template to load from classpath or file system */ public void setResourceUri(String resourceUri) { this.resourceUri = resourceUri; } public Configuration getConfiguration() { return configuration; } /** * To use a custom Saxon configuration */ public void setConfiguration(Configuration configuration) { this.configuration = configuration; } public Map<String, Object> getConfigurationProperties() { return configurationProperties; } /** * To set custom Saxon configuration properties */ public void setConfigurationProperties(Map<String, Object> configurationProperties) { this.configurationProperties = configurationProperties; } public StaticQueryContext getStaticQueryContext() { return staticQueryContext; } /** * To use a custom Saxon StaticQueryContext */ public void setStaticQueryContext(StaticQueryContext staticQueryContext) { this.staticQueryContext = staticQueryContext; } public Map<String, Object> getParameters() { return parameters; } /** * Additional parameters */ public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public Map<String, String> getNamespacePrefixes() { return namespacePrefixes; } /** * Allows to control which namespace prefixes to use for a set of namespace mappings */ public void setNamespacePrefixes(Map<String, String> namespacePrefixes) { this.namespacePrefixes = namespacePrefixes; } public ResultFormat getResultsFormat() { return resultsFormat; } /** * What output result to use */ public void setResultsFormat(ResultFormat resultsFormat) { this.resultsFormat = resultsFormat; } public Properties getProperties() { return properties; } /** * Properties to configure the serialization parameters */ public void setProperties(Properties properties) { this.properties = properties; } public Class<?> getResultType() { return resultType; } /** * What output result to use defined as a class */ public void setResultType(Class<?> resultType) { this.resultType = resultType; } public boolean isStripsAllWhiteSpace() { return stripsAllWhiteSpace; } /** * Whether to strip all whitespaces */ public void setStripsAllWhiteSpace(boolean stripsAllWhiteSpace) { this.stripsAllWhiteSpace = stripsAllWhiteSpace; } public ModuleURIResolver getModuleURIResolver() { return moduleURIResolver; } /** * To use the custom {@link ModuleURIResolver} */ public void setModuleURIResolver(ModuleURIResolver moduleURIResolver) { this.moduleURIResolver = moduleURIResolver; } public boolean isAllowStAX() { return allowStAX; } /** * Whether to allow using StAX mode */ public void setAllowStAX(boolean allowStAX) { this.allowStAX = allowStAX; } public String getHeaderName() { return headerName; } /** * To use a Camel Message header as the input source instead of Message body. */ public void setHeaderName(String headerName) { this.headerName = headerName; } @Override protected void doStart() throws Exception { super.doStart(); log.debug("{} using schema resource: {}", this, resourceUri); URL url = ResourceHelper.resolveMandatoryResourceAsUrl(getCamelContext().getClassResolver(), resourceUri); this.xquery = XQueryBuilder.xquery(url); this.xquery.setConfiguration(getConfiguration()); this.xquery.setConfigurationProperties(getConfigurationProperties()); this.xquery.setStaticQueryContext(getStaticQueryContext()); this.xquery.setParameters(getParameters()); this.xquery.setNamespaces(namespacePrefixes); this.xquery.setResultsFormat(getResultsFormat()); this.xquery.setProperties(getProperties()); this.xquery.setResultType(getResultType()); this.xquery.setStripsAllWhiteSpace(isStripsAllWhiteSpace()); this.xquery.setAllowStAX(isAllowStAX()); this.xquery.setHeaderName(getHeaderName()); this.xquery.setModuleURIResolver(getModuleURIResolver()); setProcessor(xquery); ServiceHelper.startService(xquery); } @Override protected void doStop() throws Exception { super.doStop(); ServiceHelper.stopService(xquery); } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.pgm.init; import static com.google.gerrit.pgm.init.api.InitUtil.hostname; import static java.nio.file.Files.exists; import com.google.gerrit.pgm.init.api.ConsoleUI; import com.google.gerrit.pgm.init.api.InitStep; import com.google.gerrit.pgm.init.api.Section; import com.google.gerrit.server.config.SitePaths; import com.google.gerrit.server.ioutil.HostPlatform; import com.google.gerrit.server.util.SocketUtil; import com.google.inject.Inject; import com.google.inject.Singleton; import java.io.IOException; import java.lang.ProcessBuilder.Redirect; import java.net.InetSocketAddress; /** Initialize the {@code sshd} configuration section. */ @Singleton public class InitSshd implements InitStep { private final ConsoleUI ui; private final SitePaths site; private final Section sshd; private final StaleLibraryRemover remover; @Inject InitSshd(ConsoleUI ui, SitePaths site, Section.Factory sections, StaleLibraryRemover remover) { this.ui = ui; this.site = site; this.sshd = sections.get("sshd", null); this.remover = remover; } @Override public void run() throws Exception { ui.header("SSH Daemon"); String hostname = "*"; int port = 29418; String listenAddress = sshd.get("listenAddress"); if (isOff(listenAddress)) { hostname = "off"; } else if (listenAddress != null && !listenAddress.isEmpty()) { final InetSocketAddress addr = SocketUtil.parse(listenAddress, port); hostname = SocketUtil.hostname(addr); port = addr.getPort(); } hostname = ui.readString(hostname, "Listen on address"); if (isOff(hostname)) { sshd.set("listenAddress", "off"); return; } port = ui.readInt(port, "Listen on port"); sshd.set("listenAddress", SocketUtil.format(hostname, port)); generateSshHostKeys(); remover.remove("bc(pg|pkix|prov)-.*[.]jar"); } static boolean isOff(String listenHostname) { return "off".equalsIgnoreCase(listenHostname) || "none".equalsIgnoreCase(listenHostname) || "no".equalsIgnoreCase(listenHostname); } private void generateSshHostKeys() throws InterruptedException, IOException { if (!exists(site.ssh_key) && (!exists(site.ssh_rsa) || !exists(site.ssh_ed25519) || !exists(site.ssh_ecdsa_256) || !exists(site.ssh_ecdsa_384) || !exists(site.ssh_ecdsa_521))) { System.err.print("Generating SSH host key ..."); System.err.flush(); // Generate the SSH daemon host key using ssh-keygen. // final String comment = "gerrit-code-review@" + hostname(); // Workaround for JDK-6518827 - zero-length argument ignored on Win32 String emptyPassphraseArg = HostPlatform.isWin32() ? "\"\"" : ""; if (!exists(site.ssh_rsa)) { System.err.print(" rsa..."); System.err.flush(); new ProcessBuilder( "ssh-keygen", "-q" /* quiet */, "-t", "rsa", "-N", emptyPassphraseArg, "-C", comment, "-f", site.ssh_rsa.toAbsolutePath().toString()) .redirectError(Redirect.INHERIT) .redirectOutput(Redirect.INHERIT) .start() .waitFor(); } if (!exists(site.ssh_ed25519)) { System.err.print(" ed25519..."); System.err.flush(); try { new ProcessBuilder( "ssh-keygen", "-q" /* quiet */, "-t", "ed25519", "-N", emptyPassphraseArg, "-C", comment, "-f", site.ssh_ed25519.toAbsolutePath().toString()) .redirectError(Redirect.INHERIT) .redirectOutput(Redirect.INHERIT) .start() .waitFor(); } catch (Exception e) { // continue since older hosts won't be able to generate ed25519 keys. System.err.print(" Failed to generate ed25519 key, continuing..."); System.err.flush(); } } if (!exists(site.ssh_ecdsa_256)) { System.err.print(" ecdsa 256..."); System.err.flush(); try { new ProcessBuilder( "ssh-keygen", "-q" /* quiet */, "-t", "ecdsa", "-b", "256", "-N", emptyPassphraseArg, "-C", comment, "-f", site.ssh_ecdsa_256.toAbsolutePath().toString()) .redirectError(Redirect.INHERIT) .redirectOutput(Redirect.INHERIT) .start() .waitFor(); } catch (Exception e) { // continue since older hosts won't be able to generate ecdsa keys. System.err.print(" Failed to generate ecdsa 256 key, continuing..."); System.err.flush(); } } if (!exists(site.ssh_ecdsa_384)) { System.err.print(" ecdsa 384..."); System.err.flush(); try { new ProcessBuilder( "ssh-keygen", "-q" /* quiet */, "-t", "ecdsa", "-b", "384", "-N", emptyPassphraseArg, "-C", comment, "-f", site.ssh_ecdsa_384.toAbsolutePath().toString()) .redirectError(Redirect.INHERIT) .redirectOutput(Redirect.INHERIT) .start() .waitFor(); } catch (Exception e) { // continue since older hosts won't be able to generate ecdsa keys. System.err.print(" Failed to generate ecdsa 384 key, continuing..."); System.err.flush(); } } if (!exists(site.ssh_ecdsa_521)) { System.err.print(" ecdsa 521..."); System.err.flush(); try { new ProcessBuilder( "ssh-keygen", "-q" /* quiet */, "-t", "ecdsa", "-b", "521", "-N", emptyPassphraseArg, "-C", comment, "-f", site.ssh_ecdsa_521.toAbsolutePath().toString()) .redirectError(Redirect.INHERIT) .redirectOutput(Redirect.INHERIT) .start() .waitFor(); } catch (Exception e) { // continue since older hosts won't be able to generate ecdsa keys. System.err.print(" Failed to generate ecdsa 521 key, continuing..."); System.err.flush(); } } System.err.println(" done"); } } }
package io.github.bckfnn.mvel.test; import io.github.bckfnn.mvel.Template; import io.github.bckfnn.mvel.TemplateCompiler; import io.github.bckfnn.mvel.TemplateContext; import io.github.bckfnn.mvel.template.io.ClassPathTemplateLoader; import java.io.Serializable; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import junit.framework.TestCase; import org.junit.Test; import org.mvel2.CompileException; import org.mvel2.MVEL; import org.mvel2.integration.VariableResolverFactory; import org.mvel2.integration.impl.MapVariableResolverFactory; import org.mvel2.templates.TemplateRuntime; public class TemplateTest extends TestCase { private Map<String, Object> map = new HashMap<String, Object>(); private Foo foo = new Foo(); private Base base = new Base(); public TemplateTest() { map.put("_foo_", "Foo"); map.put("_bar_", "Bar"); ArrayList<String> list = new ArrayList<String>(3); list.add("Jane"); list.add("John"); list.add("Foo"); map.put("arrayList", list); foo.setBar(new Bar()); map.put("foo", foo); map.put("a", null); map.put("b", null); map.put("c", "cat"); map.put("BWAH", ""); map.put("pi", "3.14"); map.put("hour", "60"); map.put("zero", 0); //noinspection UnnecessaryBoxing map.put("doubleTen", new Double(10)); map.put("variable_with_underscore", "HELLO"); map.put("testImpl", new TestInterface() { public String getName() { return "FOOBAR!"; } public boolean isFoo() { return true; } }); } public Object test(String template) { return test(template, base, map); } public Object test(String template, Object context, Map<String, Object> model) { TemplateCompiler compiler = new TemplateCompiler(new ClassPathTemplateLoader("", "")); Template t = compiler.compile(template.toCharArray()); VariableResolverFactory vrf = new MapVariableResolverFactory(model); io.github.bckfnn.mvel.TemplateRuntime rt = new io.github.bckfnn.mvel.TemplateRuntime(t, context, vrf); rt.exec(); return rt.getOutput(); } @Test public void testPassThru() { String s = "foobar!"; assertEquals("foobar!", test(s)); } @Test public void testBasicParsing() { String s = "foo: @{_foo_}--@{_bar_}!"; assertEquals("foo: Foo--Bar!", test(s)); } @Test public void testIfStatement() { String s = "@if(_foo_=='Foo')Hello@end"; assertEquals("Hello", test(s)); } @Test public void testIfStatement2() { String s = "@if(_foo_=='Bar')Hello@else(_foo_=='Foo')Goodbye@end"; assertEquals("Goodbye", test(s)); } @Test public void testIfStatement3() { String s = "@if(_foo_=='Bar')Hello@else(_foo_=='foo')Goodbye@else()Nope@end"; assertEquals("Nope", test(s)); } @Test public void testIfStatement4() { String s = "@if(_foo_=='Foo')Hello@else(_foo_=='foo')Goodbye@else()Nope@end()End"; assertEquals("HelloEnd", test(s)); } @Test public void testIfStatement5() { String s = "@if(_foo_=='foo')Hello@end()Goodbye"; assertEquals("Goodbye", test(s)); } @Test public void testIfNesting() { String s = "@if(_foo_=='Foo')Hello@if(_bar_=='Bar')Bar@end()@else(_foo_=='foo')Goodbye@else()Nope@end()"; assertEquals("HelloBar", test(s)); } @Test public void testForEach() { String s = "List:@foreach(item = arrayList)@{item}@end"; assertEquals("List:JaneJohnFoo", test(s)); } @Test public void testForEachMulti() { // TODO //String s = "Multi:@foreach(item = arrayList, item2 = arrayList)@{item}-@{item2}@end(','):Multi"; //assertEquals("Multi:Jane-Jane,John-John,Foo-Foo:Multi", test(s)); } @Test public void testComplexTemplate() { String s = "@foreach(item = arrayList)@if(item[0] == 'J')@{item}@end()@end()"; assertEquals("JaneJohn", test(s)); } /* @Test public void testFileBasedEval() { assertEquals("Foo::Bar", TemplateRuntime.eval(new File("src/test/java/org/mvel2/tests/templates/templateTest.mv"), base, new MapVariableResolverFactory(map), null)); } */ @Test public void testInclusionOfTemplateFile() { String s = "<<@include('templateTest.mv')>>"; assertEquals("<<Foo::Bar>>", test(s)); } @Test public void testInclusionOfTemplateFile2() { String s = "<<@include('templateError.mv')>>"; try { test(s); } catch (CompileException e) { //System.out.println(e.toString()); return; } assertTrue(false); } @Test public void testForEachException1() { String s = "<<@foreach(arrayList)@{item}@end()>>"; try { test(s); } catch (Exception e) { //System.out.println(e.getMessage()); return; } assertTrue(false); } @Test public void testForEachException2() { String s = "<<@foreach(item:arrayList)@{item}>>"; try { test(s); } catch (Exception e) { //System.out.println(e.toString()); return; } assertTrue(false); } /* @Test public void testTemplateFile() { String s = (String) TemplateRuntime.eval(new File("src/test/java/org/mvel2/tests/templates/templateIfTest.mv"), base, new MapVariableResolverFactory(map), null); System.out.println(s); } */ /* TODO port to mvel-async @Test public void testInclusionOfNamedTemplate() { SimpleTemplateRegistry registry = new SimpleTemplateRegistry(); registry.addNamedTemplate("footemplate", compileTemplate("@{_foo_}@{_bar_}")); registry.addNamedTemplate("bartemplate", compileTemplate("@{_bar_}@{_foo_}")); String s = "@includeNamed('footemplate') :: @includeNamed('bartemplate')"; assertEquals("FooBar :: BarFoo", TemplateRuntime.eval(s, map, registry)); } */ @Test public void testExpressions() { String s = "@(_foo_.length())"; Object r = test(s); assertEquals("3", r); } @Test public void testCode() { String s = "@code(a = 'foo'; b = 'bar')@a@b"; assertEquals("foobar", test(s)); } @Test public void testInlineDeclarations() { String s = "@declare('fudge')Hello @{name}!@end@includeNamed($='fudge'; name='John') -- @includeNamed($='fudge'; name='Mary')"; assertEquals("Hello John! -- Hello Mary!", test(s)); } @Test public void testInlineDeclarations2() { String s = "@declare('fudge')Hello @name!@end@code(toInclude='fudge')@includeNamed($=toInclude; name='John') -- @includeNamed($=toInclude; name='Mary')"; assertEquals("Hello John! -- Hello Mary!", test(s)); } @Test public void testPluginNode() { /* TODO port over to mvel-async Map<String, Class<? extends dk.innovasion.mvel.template.Node>> plugins = new HashMap<String, Class<? extends dk.innovasion.mvel.template.Node>>(); plugins.put("testNode", TestPluginNode.class); TemplateCompiler compiler = new TemplateCompiler("Foo:@testNode()!!"); CompiledTemplate compiled = compiler.compile(); assertEquals("Foo:THIS_IS_A_TEST!!", TemplateRuntime.execute(compiled)); */ } @Test public void testComments() { assertEquals("Foo", test("@comment( This section is commented )@{_foo_}")); } /** * Integration of old tests */ @Test public void testPassThru2() { assertEquals("foo@bar.com", TemplateRuntime.eval("foo@bar.com", map)); } @Test public void testMethodOnValue() { assertEquals("DOG", test("@(foo.bar.name.toUpperCase())")); } @Test public void testSimpleProperty() { assertEquals("dog", test("@foo.bar.name")); } @Test public void testBooleanOperator() { assertEquals("true", test("@(foo.bar.woof == true)")); } @Test public void testBooleanOperator2() { assertEquals("false", test("@(foo.bar.woof == false)")); } @Test public void testTextComparison() { assertEquals("true", test("@(foo.bar.name == 'dog')")); } @Test public void testNETextComparison() { assertEquals("true", test("@(foo.bar.name != 'foo')")); } @Test public void testChor() { assertEquals("cat", test("@(a or b or c)")); } @Test public void testChorWithLiteral() { assertEquals("fubar", test("@(a or 'fubar')")); } @Test public void testNullCompare() { assertEquals("true", test("@(c != null)")); } @Test public void testAnd() { assertEquals("true", test("@(c != null && foo.bar.name == 'dog' && foo.bar.woof)")); } @Test public void testMath() { assertEquals("188.4", test("@(pi * hour)")); } @Test public void testTemplating() { assertEquals("dogDOGGIE133.5", test("@(foo.bar.name)DOGGIE@(hour*2.225+1-1)")); } @Test public void testComplexAnd() { assertEquals("true", test("@((pi * hour) > 0 && foo.happy() == 'happyBar')")); } @Test public void testModulus() { assertEquals("0", test("@(38392 % 2)")); } @Test public void testLessThan() { assertEquals("true", test("@{pi < 3.15}")); assertEquals("true", test("@{pi <= 3.14}")); assertEquals("false", test("@{pi > 3.14}")); assertEquals("true", test("@{pi >= 3.14}")); } @Test public void testMethodAccess() { assertEquals("happyBar", test("@{foo.happy()}")); } @Test public void testMethodAccess2() { assertEquals("FUBAR", test("@{foo.toUC('fubar')}")); } @Test public void testMethodAccess3() { assertEquals("true", test("@{equalityCheck(c, 'cat')}")); } @Test public void testMethodAccess4() { assertEquals("null", test("@{readBack(null)}")); } @Test public void testMethodAccess5() { assertEquals("nulltest", test("@{appendTwoStrings(null, 'test')}")); } @Test public void testMethodAccess6() { assertEquals("false", test("@{!foo.bar.isWoof()}")); } @Test public void testNegation() { assertEquals("true", test("@{!fun && !fun}")); } @Test public void testNegation2() { assertEquals("false", test("@{fun && !fun}")); } @Test public void testNegation3() { assertEquals("true", test("@{!(fun && fun)}")); } @Test public void testNegation4() { assertEquals("false", test("@{(fun && fun)}")); } @Test public void testMultiStatement() { assertEquals("true", test("@{populate(); barfoo == 'sarah'}")); } @Test public void testAssignment2() { assertEquals("sarah", test("@{populate(); blahfoo = barfoo}")); } @Test public void testOr() { assertEquals("true", test("@{fun || true}")); } @Test public void testLiteralPassThrough() { assertEquals("true", test("@{true}")); } @Test public void testLiteralPassThrough2() { assertEquals("false", test("@{false}")); } @Test public void testLiteralPassThrough3() { assertEquals("null", test("@{null}")); } @Test public void testControlLoopList() { assertEquals("HappyHappy!JoyJoy!", test( "@foreach(item = list)" + "@{item}" + "@end" )); } @Test public void testControlLoopArray() { assertEquals("Happy0Happy!1Joy2Joy!3", test( "@code(i=0)@foreach(item = array)" + "@{item}@{i++}" + "@end" )); } @Test public void testMultiCollectionControlLoop() { // TODO /* assertEquals("0=Happy:Happy,1=Happy!:Happy!,2=Joy:Joy,3=Joy!:Joy!", test( "@code{i=0}@foreach{item : list, listItem : array}" + "@{i++}=@{item}:@{listItem}" + "@end{','}" )); */ } @Test public void testControlLoopListMultiple() { for (int i = 0; i < 100; i++) { testControlLoopList(); } } @Test public void testControlLoopArrayMultiple() { for (int i = 0; i < 100; i++) { testControlLoopArray(); } } public static interface TestInterface { public String getName(); public boolean isFoo(); } @Test public void testControlLoop2() { assertEquals("HappyHappy!JoyJoy!", test( "@foreach(item = list)" + "@item" + "@end" )); } @Test public void testControlLoop3() { assertEquals("HappyHappy!JoyJoy!", test( "@foreach(item = list )" + "@item" + "@end" )); } @Test public void testIfStatement6() { assertEquals("sarah", test("@if('fun' == 'fun')sarah@end")); } @Test public void testIfStatement7() { assertEquals("poo", test("@if('fun' == 'bar')sarah@else()poo@end")); } @Test public void testRegEx() { assertEquals("true", test("@{foo.bar.name ~= '[a-z].+'}")); } @Test public void testRegExNegate() { assertEquals("false", test("@{!(foo.bar.name ~= '[a-z].+')}")); } @Test public void testRegEx2() { assertEquals("true", test("@{foo.bar.name ~= '[a-z].+' && foo.bar.name != null}")); } @Test public void testBlank() { assertEquals("true", test("@{'' == empty}")); } @Test public void testBlank2() { assertEquals("true", test("@{BWAH == empty}")); } @Test public void testTernary() { assertEquals("foobie", test("@{zero==0?'foobie':zero}")); } @Test public void testTernary2() { assertEquals("blimpie", test("@{zero==1?'foobie':'blimpie'}")); } @Test public void testTernary3() { assertEquals("foobiebarbie", test("@{zero==1?'foobie':'foobie'+'barbie'}")); } @Test public void testTernary4() { assertEquals("no", test("@{ackbar ? 'yes' : 'no'}")); } @Test public void testStrAppend() { assertEquals("foobarcar", test("@{'foo' + 'bar' + 'car'}")); } @Test public void testStrAppend2() { assertEquals("foobarcar1", test("@{'foobar' + 'car' + 1}")); } @Test public void testInstanceCheck1() { assertEquals("true", test("@{c is java.lang.String}")); } @Test public void testInstanceCheck2() { assertEquals("false", test("@{pi is java.lang.Integer}")); } @Test public void testBitwiseOr1() { assertEquals("6", test("@{2 | 4}")); } @Test public void testBitwiseOr2() { assertEquals("true", test("@{(2 | 1) > 0}")); } @Test public void testBitwiseOr3() { assertEquals("true", test("@{(2 | 1) == 3}")); } @Test public void testBitwiseAnd1() { assertEquals("2", test("@{2 & 3}")); } @Test public void testShiftLeft() { assertEquals("4", test("@{2 << 1}")); } @Test public void testUnsignedShiftLeft() { assertEquals("2", test("@{-2 <<< 0}")); } @Test public void testShiftRight() { assertEquals("128", test("@{256 >> 1}")); } @Test public void testXOR() { assertEquals("3", test("@{1 ^ 2}")); } @Test public void testContains1() { assertEquals("true", test("@{list contains 'Happy!'}")); } @Test public void testContains2() { assertEquals("false", test("@{list contains 'Foobie'}")); } @Test public void testContains3() { assertEquals("true", test("@{sentence contains 'fox'}")); } @Test public void testContains4() { assertEquals("false", test("@{sentence contains 'mike'}")); } @Test public void testContains5() { assertEquals("true", test("@{!(sentence contains 'mike')}")); } @Test public void testTokenMethodAccess() { assertEquals(String.class.toString(), test("@{a = 'foo'; a.getClass()}")); } @Test public void testArrayCreationWithLength() { assertEquals("2", test("@{Array.getLength({'foo', 'bar'})}")); } @Test public void testMapCreation() { assertEquals("sarah", test("@{map = ['mike':'sarah','tom':'jacquelin']; map['mike']}")); } @Test public void testProjectionSupport() { assertEquals("true", test("@{(name in things) contains 'Bob'}")); } @Test public void testProjectionSupport2() { assertEquals("3", test("@{(name in things).size()}")); } @Test public void testObjectInstantiation() { assertEquals("foobie", test("@{new java.lang.String('foobie')}")); } @Test public void testObjectInstantiationWithMethodCall() { assertEquals("foobie", test("@{new String('foobie').toString()}")); } @Test public void testObjectInstantiation2() { test("@{new String() is String}"); } @Test public void testArrayCoercion() { assertEquals("gonk", test("@{funMethod( {'gonk', 'foo'} )}")); } @Test public void testMapAccess() { assertEquals("dog", test("@{funMap['foo'].bar.name}")); } @Test public void testMapAccess2() { assertEquals("dog", test("@{funMap.foo.bar.name}")); } @Test public void testSoundex() { assertEquals("true", test("@{'foobar' soundslike 'fubar'}")); } @Test public void testSoundex2() { assertEquals("false", test("@{'flexbar' soundslike 'fubar'}")); } @Test public void testThisReference() { // TODO //assertEquals("true", test("@{this}") instanceof Base); } @Test public void testIfLoopInTemplate() { assertEquals("ONETWOTHREE", test("@foreach(item = things)@if(item.name=='Bob')ONE@else(item.name=='Smith')TWO@else(item.name=='Cow')THREE@end@end")); } @Test public void testStringEscaping() { assertEquals("\"Mike Brock\"", test("@{\"\\\"Mike Brock\\\"\"}")); } @Test public void testStringEscaping2() { assertEquals("MVEL's Parser is Fast", test("@{'MVEL\\'s Parser is Fast'}")); } @Test public void testNestedAtSymbol() { assertEquals("email:foo@foo.com", test("email:@{'foo@foo.com'}")); } @Test public void testEscape() { assertEquals("foo@foo.com", test("foo@@@{'foo.com'}")); } @Test public void testEvalNodes() { //TODO: support eval assertEquals("foo", test("@eval{\"@{'foo'}\"}")); } @Test public void testIteration1() { List<String> list = new ArrayList<String>(); list.add("a1"); list.add("a2"); list.add("a3"); String template = "@foreach{item : list}a@end{}"; Map<String, Object> map = new HashMap<String, Object>(); map.put("list", list); String r = (String) TemplateRuntime.eval(template, map); assertEquals("aaa", r); } @Test public void testIteration2() { Folder f1 = new Folder("f1", null); String template = "@{name} @foreach{item : children}a@end{}"; String r = (String) TemplateRuntime.eval(template, f1); assertEquals("f1 aaa", r); } @Test public void testIteration3() { Folder f = new Folder("a1", null); List<Page> list = f.getChildren(); String template = "@foreach{item : list}a@end{}"; Map<String, Object> map = new HashMap<String, Object>(); map.put("list", list); String r = (String) TemplateRuntime.eval(template, map); assertEquals("aaa", r); } @Test public void testIteration4() { Folder f = new Folder("a1", null); String template = "@foreach{item : f.children}a@end{}"; Map<String, Object> map = new HashMap<String, Object>(); map.put("f", f); String r = (String) TemplateRuntime.eval(template, map); assertEquals("aaa", r); } @Test public void testMVEL197() { Map<String, Object> context = new HashMap<String, Object>(); Object[] args = new Object[1]; TestMVEL197 test = new TestMVEL197(); test.setName1("name1"); test.setName2("name2"); args[0] = test; context.put("args", args); String template = "${(args[0].name1=='name1'&&args[0].name2=='name2')?'a':'b'}"; Object value = TemplateRuntime.eval(template, context); assertEquals("a", value); } @Test public void testEscaping() { String template = "@@{'foo'}ABC"; assertEquals("@{'foo'}ABC", TemplateRuntime.eval(template, new Object())); } public class Page { String name; Folder parent; public Page(String name, Folder parent) { this.name = name; this.parent = parent; } public String getName() { return name; } public Folder getParent() { return parent; } } public class Folder extends Page { public Folder(String name, Folder parent) { super(name, parent); } public List<Page> getChildren() { List<Page> list = new ArrayList<Page>(); list.add(new Page("a1", this)); list.add(new Page("a2", this)); list.add(new Page("a3", this)); return list; } } @Test public void testMVEL229() { //final Object context = new Object(); final String template = "@code(sumText = 0)@(sumText)"; assertEquals("0", test(template)); } @Test public void testOutputStream1() { // Note: mvel-async does not support outputstream. String template = "@foreach(['foo','far'])@{$}@end"; assertEquals("foofar", test(template, new Object(), new HashMap<String, Object>())); } private Map<String, Object> setupVarsMVEL219() { Map<String, Object> vars = new LinkedHashMap<String, Object>(); vars.put("bal", new BigDecimal("999.99")); vars.put("word", "ball"); vars.put("object", new Dog()); Map<String, Object> map = new HashMap<String, Object>(); map.put("foo", "bar"); map.put("fu", new Dog()); map.put("trueValue", true); map.put("falseValue", false); map.put("one", 1); map.put("zero", 0); vars.put("map", map); return vars; } private Map<String, Object> setupVarsMVEL220() { Map<String, Object> vars = new LinkedHashMap<String, Object>(); vars.put("word", "ball"); vars.put("object", new Dog()); Map<String, Object> map = new HashMap<String, Object>(); map.put("foo", "bar"); map.put("fu", new Dog()); map.put("trueValue", true); map.put("falseValue", false); map.put("one", 1); map.put("zero", 0); map.put("list", "john,paul,ringo,george"); vars.put("map", map); return vars; } String[] testCasesMVEL220 = { "map[\"foundIt\"] = !(map['list']).contains(\"john\")", "map[\"foundIt\"] = !(map['list'].contains(\"john\"))", }; String[] templateTestCasesMVEL220 = { "@{map[\"foundIt\"] = !(map['list']).contains(\"john\")}", "@{map[\"foundIt\"] = !(map['list'].contains(\"john\"))}" }; @Test public void testEvalMVEL220() { Map<String, Object> vars = setupVarsMVEL220(); //System.out.println("Evaluation====================="); for (String expr : testCasesMVEL220) { //System.out.println("Evaluating '" + expr + "': ......"); Object ret = MVEL.eval(expr, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } //System.out.println("Evaluation====================="); } @Test public void testCompiledMVEL220() { Map<String, Object> vars = setupVarsMVEL220(); //System.out.println("Compilation====================="); for (String expr : testCasesMVEL220) { //System.out.println("Compiling '" + expr + "': ......"); Serializable compiled = MVEL.compileExpression(expr); Boolean ret = (Boolean) MVEL.executeExpression(compiled, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } //System.out.println("Compilation====================="); } @Test public void testTemplateMVEL220() { Map<String, Object> vars = setupVarsMVEL220(); //System.out.println("Templates====================="); for (String expr : templateTestCasesMVEL220) { //System.out.println("Templating '" + expr + "': ......"); Object ret = TemplateRuntime.eval(expr, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } //System.out.println("Templates====================="); } String[] testCasesMVEL219 = { "map['foo']==map['foo']", // ok "(map['one'] > 0)", // ok "(map['one'] > 0) && (map['foo'] == map['foo'])", // ok "(map['one'] > 0) && (map['foo']==map['foo'])", // broken }; String[] templateTestCasesMVEL219 = { "@{map['foo']==map['foo']}", // ok "@(map['one'] > 0)}", // ok "@{(map['one'] > 0) && (map['foo'] == map['foo'])}", // ok "@{(map['one'] > 0) && (map['foo']==map['foo'])}" // broken }; @Test public void testEvalMVEL219() { Map<String, Object> vars = setupVarsMVEL219(); for (String expr : testCasesMVEL219) { //System.out.println("Evaluating '" + expr + "': ......"); Object ret = MVEL.eval(expr, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } } @Test public void testCompiledMVEL219() { Map<String, Object> vars = setupVarsMVEL219(); for (String expr : testCasesMVEL219) { //System.out.println("Compiling '" + expr + "': ......"); Serializable compiled = MVEL.compileExpression(expr); Boolean ret = (Boolean) MVEL.executeExpression(compiled, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } } @Test public void testTemplateMVEL219() { Map<String, Object> vars = setupVarsMVEL219(); for (String expr : templateTestCasesMVEL219) { //System.out.println("Templating '" + expr + "': ......"); Object ret = TemplateRuntime.eval(expr, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertNotNull(ret); } } @Test public void testTemplateStringCoercion() { String expr = "@code{ buffer = new StringBuilder(); i = 10; buffer.append( i + \"blah\" );}@{buffer.toString()}"; Map<String, Object> vars = setupVarsMVEL219(); //System.out.println("Templating '" + expr + "': ......"); Object ret = TemplateRuntime.eval(expr, vars); //System.out.println("'" + expr + " ' = " + ret.toString()); assertEquals("10blah", ret); } @Test public void testMVEL244() { Foo244 foo = new Foo244("plop"); String template = "@foreach(foo.liste[0].liste) plop @end"; Map<String, Object> model = new HashMap<String, Object>(); model.put("foo", foo); assertEquals(" plop plop plop plop ", test(template, new Object(), model)); } @Test public void testImportsInTemplate() { String template = "@code{import java.util.HashMap; i = 10;}_____________@code{new HashMap().toString() + i}"; Map<String, Object> map = new HashMap<String, Object>(); Object result = TemplateRuntime.eval(template, map); assertNotNull("result cannot be null", result); assertEquals("result did not return string", String.class, result.getClass()); } public static class Foo244 { private List<Foo244> liste = new ArrayList<Foo244>(); private String val = ""; public Foo244() { } public Foo244(String plop) { liste.add(new Foo244()); liste.add(new Foo244()); liste.add(new Foo244()); liste.add(new Foo244()); liste.get(0).getListe().add(new Foo244()); liste.get(0).getListe().add(new Foo244()); liste.get(0).getListe().add(new Foo244()); liste.get(0).getListe().add(new Foo244()); } public List<Foo244> getListe() { return liste; } public void setListe(List<Foo244> liste) { this.liste = liste; } public String getVal() { return val; } public void setVal(String val) { this.val = val; } } public static class Node { public Node(int base, List<Node> list) { this.base = base; this.list = list; } public int base; public List<Node> list; } @Test public void testDRLTemplate() { /* * TODO String template = "@declare{\"drl\"}@includeNamed{\"ced\"; node=root }@end{}" + "" + "@declare{\"ced\"}" + "@if{ node.base==1 } @includeNamed{ \"cedX\"; connect=\"AND\"; args=node.list }" + "@elseif{ node.base ==2 }@includeNamed{ \"cedX\"; connect=\"OR\"; args=node.list }" + "@end{}" + "@end{}" + "" + "@declare{\"cedX\"}@{connect}@foreach{child : args}" + "@includeNamed{\"ced\"; node=child; }@end{} @{connect}@end{}"; TemplateRegistry reportRegistry = new SimpleTemplateRegistry(); reportRegistry.addNamedTemplate("drl", TemplateCompiler.compileTemplate(template)); TemplateRuntime.execute(reportRegistry.getNamedTemplate("drl"), null, reportRegistry); Map<String, Object> context = new HashMap<String, Object>(); context.put( "root", new Node( 2, Arrays.asList( new Node( 1, Collections.EMPTY_LIST ) ) ) ); String result = (String) TemplateRuntime.execute( reportRegistry.getNamedTemplate( "drl" ), null, new MapVariableResolverFactory( context ), reportRegistry ); assertEquals("OR AND AND OR", result); */ } public static class Pet { public void run() { } } public static class Dog extends Pet { @Override public void run() { System.out.println("dog is running"); } } public class TestMVEL197 { private String name1; private String name2; public String getName1() { return name1; } public void setName1(String name1) { this.name1 = name1; } public String getName2() { return name2; } public void setName2(String name2) { this.name2 = name2; } } public static class TestPluginNode extends io.github.bckfnn.mvel.template.Node { public boolean eval(io.github.bckfnn.mvel.TemplateRuntime runtime, Object ctx, VariableResolverFactory factory) { //appender.append("THIS_IS_A_TEST"); return runtime.continueWith(getNext(), factory); } public boolean demarc(TemplateContext context, Node terminatingNode) { return false; } } }
/** * Copyright 2013 multibit.org * * Licensed under the MIT license (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/mit-license.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.multibit.network; import java.io.File; import java.io.IOException; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.TimeZone; import junit.framework.TestCase; import org.junit.Before; import org.junit.Test; import org.multibit.ApplicationDataDirectoryLocator; import org.multibit.Constants; import org.multibit.Localiser; import org.multibit.CreateControllers; import org.multibit.controller.bitcoin.BitcoinController; import org.multibit.file.FileHandler; import org.multibit.model.bitcoin.WalletData; import org.multibit.model.bitcoin.WalletInfoData; import org.multibit.store.MultiBitWalletVersion; import org.multibit.viewsystem.simple.SimpleViewSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.bitcoin.core.DumpedPrivateKey; import com.google.bitcoin.core.ECKey; import com.google.bitcoin.core.NetworkParameters; import com.google.bitcoin.core.Wallet; import com.google.bitcoin.core.Wallet.BalanceType; public class ReplayManagerTest extends TestCase { private static final Logger log = LoggerFactory.getLogger(ReplayManagerTest.class); private BitcoinController controller; private Localiser localiser; private File multiBitDirectory; // The address for this private key is "1N4qu8a6NwBrxM5PvSoFh4qe6QSWmG6Xds". private static final String REPLAY1_PRIVATE_KEY = "5Jsokwg1ypfCPgJXv4vnhW11YWSp4anh9UbHoCZFZdwAnEpU69u"; private static final String START_OF_REPLAY_PERIOD = "2012-09-03T10:00:00Z"; private static final BigInteger BALANCE_AT_START = BigInteger.ZERO; private SimpleDateFormat formatter; private SimpleViewSystem simpleViewSystem; @Before public void setUp() throws Exception { multiBitDirectory = createMultiBitRuntime(); // Set the application data directory to be the one we just created. ApplicationDataDirectoryLocator applicationDataDirectoryLocator = new ApplicationDataDirectoryLocator(multiBitDirectory); // Create MultiBit controller final CreateControllers.Controllers controllers = CreateControllers.createControllers(applicationDataDirectoryLocator); controller = controllers.bitcoinController; log.debug("Creating Bitcoin service"); // Create the MultiBitService that connects to the bitcoin network. MultiBitService multiBitService = new MultiBitService(controller); controller.setMultiBitService(multiBitService); // Add the simple view system (no Swing). simpleViewSystem = new SimpleViewSystem(); controllers.coreController.registerViewSystem(simpleViewSystem); log.debug("Waiting for peer connection. . . "); while (!simpleViewSystem.isOnline()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } log.debug("Now online."); } @Test public void testReplayManagerSyncSingleWallet() throws Exception { // Get the system property runFunctionalTest to see if the functional // tests need running. String runFunctionalTests = System.getProperty(Constants.RUN_FUNCTIONAL_TESTS_PARAMETER); if (Boolean.TRUE.toString().equalsIgnoreCase(runFunctionalTests)) { // Date format is UTC with century, T time separator and Z for UTC // timezone. formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ENGLISH); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); // Initialise replay manager ReplayManager replayManager = ReplayManager.INSTANCE; assertNotNull(replayManager); replayManager.initialise(controller, true); String replayWalletPath = multiBitDirectory.getAbsolutePath() + File.separator + "replay.wallet"; // Create a new wallet. Wallet replayWallet = new Wallet(NetworkParameters.prodNet()); // Add in the replay key. DumpedPrivateKey replayDumpedPrivateKey = new DumpedPrivateKey(NetworkParameters.prodNet(), REPLAY1_PRIVATE_KEY); ECKey replayKey = replayDumpedPrivateKey.getKey(); replayKey.setCreationTimeSeconds(formatter.parse(START_OF_REPLAY_PERIOD).getTime() / 1000); log.debug("replayPrivateKey getCreationTimeSeconds = " + replayKey.getCreationTimeSeconds()); replayWallet.addKey(replayKey); WalletData perWalletModelData = new WalletData(); perWalletModelData.setWalletInfo(new WalletInfoData(replayWalletPath, replayWallet, MultiBitWalletVersion.PROTOBUF)); perWalletModelData.setWallet(replayWallet); perWalletModelData.setWalletFilename(replayWalletPath); perWalletModelData.setWalletDescription("testReplayManagerSyncSingleWallet test"); controller.getModel().getPerWalletModelDataList().add(perWalletModelData); log.debug("Replay wallet before replay = \n" + replayWallet.toString()); assertEquals(BALANCE_AT_START, replayWallet.getBalance()); log.debug("Replaying blockchain"); // Create a ReplayTask to replay the replay wallet from the // START_OF_REPLAY_PERIOD. List<WalletData> perWalletModelDataList = new ArrayList<WalletData>(); perWalletModelDataList.add(perWalletModelData); ReplayTask replayTask = new ReplayTask(perWalletModelDataList, formatter.parse(START_OF_REPLAY_PERIOD), ReplayTask.UNKNOWN_START_HEIGHT); replayManager.offerReplayTask(replayTask); // Run for a while. log.debug("Twiddling thumbs for 60 seconds ..."); Thread.sleep(60000); log.debug("... 60 seconds later."); // Check the wallet - there should be some transactions in there. if (replayWallet.getTransactions(true).size() > 0) { // We are done. } else { // Run for a while longer. log.debug("Twiddling thumbs for another 60 seconds ..."); Thread.sleep(60000); log.debug("... 60 seconds later."); if (replayWallet.getTransactions(true).size() > 0) { // We are done. } else { if (simpleViewSystem.getNumberOfBlocksDownloaded() > 0) { // Well it tried but probably got a slow connection - // give it a pass. } else { fail("No blocks were downloaded on replay"); } } } // Print out replay wallet after replay. log.debug("Replay wallet after replay = \n" + replayWallet); } else { log.debug("Not running functional test: ReplayManagerTest#testReplayManagerSyncSingleWallet. Add '-DrunFunctionalTests=true' to run"); } } /** * Create a working, portable runtime of MultiBit in a temporary directory. * * @return the temporary directory the multibit runtime has been created in */ private File createMultiBitRuntime() throws IOException { File multiBitDirectory = FileHandler.createTempDirectory("multibit"); String multiBitDirectoryPath = multiBitDirectory.getAbsolutePath(); System.out.println("Building MultiBit runtime in : " + multiBitDirectory.getAbsolutePath()); // Create an empty multibit.properties. File multibitProperties = new File(multiBitDirectoryPath + File.separator + "multibit.properties"); multibitProperties.createNewFile(); multibitProperties.deleteOnExit(); // Copy in the checkpoints and blockchain stored in git - this is in // source/main/resources/. File multibitBlockcheckpoints = new File(multiBitDirectoryPath + File.separator + "multibit.checkpoints"); FileHandler.copyFile(new File("./src/main/resources/multibit.checkpoints"), multibitBlockcheckpoints); multibitBlockcheckpoints.deleteOnExit(); return multiBitDirectory; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.offheap; import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; import org.apache.geode.DataSerializer; import org.apache.geode.Instantiator; import org.apache.geode.internal.DSCODE; import org.apache.geode.internal.DSFIDFactory; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.InternalInstantiator; /** * Determines the data type of the bytes in an off-heap MemoryBlock. This is used by the tests for * inspection of the off-heap memory. * * @since Geode 1.0 */ public class DataType { public static String getDataType(byte[] bytes) { final DataInput in = getDataInput(bytes); byte header = 0; try { header = in.readByte(); } catch (IOException e) { return "IOException: " + e.getMessage(); } try { if (header == DSCODE.DS_FIXED_ID_BYTE.toByte()) { return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readByte(), in).getClass().getName(); } if (header == DSCODE.DS_FIXED_ID_SHORT.toByte()) { return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readShort(), in).getClass().getName(); } if (header == DSCODE.DS_FIXED_ID_INT.toByte()) { return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readInt(), in).getClass().getName(); } if (header == DSCODE.DS_NO_FIXED_ID.toByte()) { return "org.apache.geode.internal.DataSerializableFixedID:" + DataSerializer.readClass(in).getName(); } if (header == DSCODE.NULL.toByte()) { return "null"; } if (header == DSCODE.NULL_STRING.toByte() || header == DSCODE.STRING.toByte() || header == DSCODE.HUGE_STRING.toByte() || header == DSCODE.STRING_BYTES.toByte() || header == DSCODE.HUGE_STRING_BYTES.toByte()) { return "java.lang.String"; } if (header == DSCODE.CLASS.toByte()) { return "java.lang.Class"; } if (header == DSCODE.DATE.toByte()) { return "java.util.Date"; } if (header == DSCODE.FILE.toByte()) { return "java.io.File"; } if (header == DSCODE.INET_ADDRESS.toByte()) { return "java.net.InetAddress"; } if (header == DSCODE.BOOLEAN.toByte()) { return "java.lang.Boolean"; } if (header == DSCODE.CHARACTER.toByte()) { return "java.lang.Character"; } if (header == DSCODE.BYTE.toByte()) { return "java.lang.Byte"; } if (header == DSCODE.SHORT.toByte()) { return "java.lang.Short"; } if (header == DSCODE.INTEGER.toByte()) { return "java.lang.Integer"; } if (header == DSCODE.LONG.toByte()) { return "java.lang.Long"; } if (header == DSCODE.FLOAT.toByte()) { return "java.lang.Float"; } if (header == DSCODE.DOUBLE.toByte()) { return "java.lang.Double"; } if (header == DSCODE.BYTE_ARRAY.toByte()) { return "byte[]"; } if (header == DSCODE.ARRAY_OF_BYTE_ARRAYS.toByte()) { return "byte[][]"; } if (header == DSCODE.SHORT_ARRAY.toByte()) { return "short[]"; } if (header == DSCODE.STRING_ARRAY.toByte()) { return "java.lang.String[]"; } if (header == DSCODE.INT_ARRAY.toByte()) { return "int[]"; } if (header == DSCODE.LONG_ARRAY.toByte()) { return "long[]"; } if (header == DSCODE.FLOAT_ARRAY.toByte()) { return "float[]"; } if (header == DSCODE.DOUBLE_ARRAY.toByte()) { return "double[]"; } if (header == DSCODE.BOOLEAN_ARRAY.toByte()) { return "boolean[]"; } if (header == DSCODE.CHAR_ARRAY.toByte()) { return "char[]"; } if (header == DSCODE.OBJECT_ARRAY.toByte()) { return "java.lang.Object[]"; } if (header == DSCODE.ARRAY_LIST.toByte()) { return "java.util.ArrayList"; } if (header == DSCODE.LINKED_LIST.toByte()) { return "java.util.LinkedList"; } if (header == DSCODE.HASH_SET.toByte()) { return "java.util.HashSet"; } if (header == DSCODE.LINKED_HASH_SET.toByte()) { return "java.util.LinkedHashSet"; } if (header == DSCODE.HASH_MAP.toByte()) { return "java.util.HashMap"; } if (header == DSCODE.IDENTITY_HASH_MAP.toByte()) { return "java.util.IdentityHashMap"; } if (header == DSCODE.HASH_TABLE.toByte()) { return "java.util.Hashtable"; } // ConcurrentHashMap is written as java.io.serializable // if (header == DSCODE.CONCURRENT_HASH_MAP.toByte()) { // return "java.util.concurrent.ConcurrentHashMap"; if (header == DSCODE.PROPERTIES.toByte()) { return "java.util.Properties"; } if (header == DSCODE.TIME_UNIT.toByte()) { return "java.util.concurrent.TimeUnit"; } if (header == DSCODE.USER_CLASS.toByte()) { byte userClassDSId = in.readByte(); return "DataSerializer: with Id:" + userClassDSId; } if (header == DSCODE.USER_CLASS_2.toByte()) { short userClass2DSId = in.readShort(); return "DataSerializer: with Id:" + userClass2DSId; } if (header == DSCODE.USER_CLASS_4.toByte()) { int userClass4DSId = in.readInt(); return "DataSerializer: with Id:" + userClass4DSId; } if (header == DSCODE.VECTOR.toByte()) { return "java.util.Vector"; } if (header == DSCODE.STACK.toByte()) { return "java.util.Stack"; } if (header == DSCODE.TREE_MAP.toByte()) { return "java.util.TreeMap"; } if (header == DSCODE.TREE_SET.toByte()) { return "java.util.TreeSet"; } if (header == DSCODE.BOOLEAN_TYPE.toByte()) { return "java.lang.Boolean.class"; } if (header == DSCODE.CHARACTER_TYPE.toByte()) { return "java.lang.Character.class"; } if (header == DSCODE.BYTE_TYPE.toByte()) { return "java.lang.Byte.class"; } if (header == DSCODE.SHORT_TYPE.toByte()) { return "java.lang.Short.class"; } if (header == DSCODE.INTEGER_TYPE.toByte()) { return "java.lang.Integer.class"; } if (header == DSCODE.LONG_TYPE.toByte()) { return "java.lang.Long.class"; } if (header == DSCODE.FLOAT_TYPE.toByte()) { return "java.lang.Float.class"; } if (header == DSCODE.DOUBLE_TYPE.toByte()) { return "java.lang.Double.class"; } if (header == DSCODE.VOID_TYPE.toByte()) { return "java.lang.Void.class"; } if (header == DSCODE.USER_DATA_SERIALIZABLE.toByte()) { Instantiator instantiator = InternalInstantiator.getInstantiator(in.readByte()); return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName(); } if (header == DSCODE.USER_DATA_SERIALIZABLE_2.toByte()) { Instantiator instantiator = InternalInstantiator.getInstantiator(in.readShort()); return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName(); } if (header == DSCODE.USER_DATA_SERIALIZABLE_4.toByte()) { Instantiator instantiator = InternalInstantiator.getInstantiator(in.readInt()); return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName(); } if (header == DSCODE.DATA_SERIALIZABLE.toByte()) { return "org.apache.geode.DataSerializable:" + DataSerializer.readClass(in).getName(); } if (header == DSCODE.SERIALIZABLE.toByte()) { String name = null; try { Object obj = InternalDataSerializer.basicReadObject(getDataInput(bytes)); name = obj.getClass().getName(); } catch (ClassNotFoundException e) { name = e.getMessage(); } return "java.io.Serializable:" + name; } if (header == DSCODE.PDX.toByte()) { int typeId = in.readInt(); return "pdxType:" + typeId; } if (header == DSCODE.PDX_ENUM.toByte()) { in.readByte(); // dsId is not needed int enumId = InternalDataSerializer.readArrayLength(in); return "pdxEnum:" + enumId; } if (header == DSCODE.GEMFIRE_ENUM.toByte()) { String name = DataSerializer.readString(in); return "java.lang.Enum:" + name; } if (header == DSCODE.PDX_INLINE_ENUM.toByte()) { String name = DataSerializer.readString(in); return "java.lang.Enum:" + name; } if (header == DSCODE.BIG_INTEGER.toByte()) { return "java.math.BigInteger"; } if (header == DSCODE.BIG_DECIMAL.toByte()) { return "java.math.BigDecimal"; } if (header == DSCODE.UUID.toByte()) { return "java.util.UUID"; } if (header == DSCODE.TIMESTAMP.toByte()) { return "java.sql.Timestamp"; } return "Unknown header byte: " + header; } catch (IOException e) { throw new Error(e); } catch (ClassNotFoundException e) { throw new Error(e); } } public static DataInput getDataInput(byte[] bytes) { return new DataInputStream(new ByteArrayInputStream(bytes)); } }
package com.transloadit.sdk; import com.transloadit.sdk.exceptions.LocalOperationException; import com.transloadit.sdk.exceptions.RequestException; import okhttp3.MediaType; import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.RequestBody; import org.apache.commons.codec.binary.Hex; import org.jetbrains.annotations.Nullable; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.json.JSONObject; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLConnection; import java.net.URLEncoder; import java.nio.charset.Charset; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * Transloadit tailored Http Request class */ public class Request { private Transloadit transloadit; private OkHttpClient httpClient = new OkHttpClient(); private String version; Request(Transloadit transloadit) { this.transloadit = transloadit; Properties prop = new Properties(); InputStream in = getClass().getClassLoader().getResourceAsStream("version.properties"); try { prop.load(in); version = "java-sdk:" + prop.getProperty("versionNumber").replace("'", ""); in.close(); } catch (IOException e) { throw new RuntimeException(e); } catch (NullPointerException npe) { version = "java-sdk:unknown"; } } /** * Makes http GET request. * @param url url to makes request to * @param params data to add to params field * @return {@link okhttp3.Response} * @throws RequestException * @throws LocalOperationException */ okhttp3.Response get(String url, Map<String, Object> params) throws RequestException, LocalOperationException { String fullUrl = getFullUrl(url); okhttp3.Request request = new okhttp3.Request.Builder() .url(addUrlParams(fullUrl, toPayload(params))) .addHeader("Transloadit-Client", version) .build(); try { return httpClient.newCall(request).execute(); } catch (IOException e) { throw new RequestException(e); } } okhttp3.Response get(String url) throws RequestException, LocalOperationException { return get(url, new HashMap<String, Object>()); } /** * Makes http POST request * @param url url to makes request to * @param params data to add to params field * @param extraData data to send along with request body, outside of params field. * @param files files to be uploaded along with the request. * @return {@link okhttp3.Response} * @throws RequestException * @throws LocalOperationException */ okhttp3.Response post(String url, Map<String, Object> params, @Nullable Map<String, String> extraData, @Nullable Map<String, File> files, @Nullable Map<String, InputStream> fileStreams) throws RequestException, LocalOperationException { Map<String, String> payload = toPayload(params); if (extraData != null) { payload.putAll(extraData); } okhttp3.Request request = new okhttp3.Request.Builder().url(getFullUrl(url)) .post(getBody(payload, files, fileStreams)) .addHeader("Transloadit-Client", version) .build(); try { return httpClient.newCall(request).execute(); } catch (IOException e) { throw new RequestException(e); } } okhttp3.Response post(String url, Map<String, Object> params) throws RequestException, LocalOperationException { return post(url, params, null, null, null); } /** * Makes http DELETE request * @param url url to makes request to * @param params data to add to params field * @return {@link okhttp3.Response} * @throws RequestException * @throws LocalOperationException */ okhttp3.Response delete(String url, Map<String, Object> params) throws RequestException, LocalOperationException { okhttp3.Request request = new okhttp3.Request.Builder() .url(getFullUrl(url)) .delete(getBody(toPayload(params), null)) .addHeader("Transloadit-Client", version) .build(); try { return httpClient.newCall(request).execute(); } catch (IOException e) { throw new RequestException(e); } } /** * Makes http PUT request * @param url * @param data * @return * @throws RequestException * @throws LocalOperationException */ okhttp3.Response put(String url, Map<String, Object> data) throws RequestException, LocalOperationException { okhttp3.Request request = new okhttp3.Request.Builder() .url(getFullUrl(url)) .put(getBody(toPayload(data), null)) .addHeader("Transloadit-Client", version) .build(); try { return httpClient.newCall(request).execute(); } catch (IOException e) { throw new RequestException(e); } } /** * Converts url path to the Transloadit full url. * Returns the url passed if it is already full. * * @param url * @return String */ private String getFullUrl(String url) { return url.startsWith("https://") || url.startsWith("http://") ? url : transloadit.getHostUrl() + url; } private String addUrlParams(String url, Map<String, ? extends Object> params) throws LocalOperationException { StringBuilder sb = new StringBuilder(); for(Map.Entry<String, ? extends Object> entry : params.entrySet()){ if(sb.length() > 0){ sb.append('&'); } try { sb.append(URLEncoder.encode(entry.getKey(), "UTF-8")).append('=') .append(URLEncoder.encode((String) entry.getValue(), "UTF-8")); } catch (UnsupportedEncodingException e) { throw new LocalOperationException(e); } } return url + "?" + sb.toString(); } /** * Builds okhttp3 compatible request body with the data passed. * * @param data data to add to request body * @param files files to upload * @return {@link RequestBody} */ private RequestBody getBody(Map<String, String> data, @Nullable Map<String, File> files, @Nullable Map<String, InputStream> fileStreams) throws LocalOperationException { MultipartBody.Builder builder = new MultipartBody.Builder().setType(MultipartBody.FORM); if (files != null) { for (Map.Entry<String, File> entry : files.entrySet()) { File file = entry.getValue(); String mimeType = URLConnection.guessContentTypeFromName(file.getName()); if (mimeType == null) { mimeType = "application/octet-stream"; } builder.addFormDataPart(entry.getKey(), file.getName(), RequestBody.create(MediaType.parse(mimeType), file)); } } if (fileStreams != null) { for (Map.Entry<String, InputStream> entry : fileStreams.entrySet()) { byte[] bytes; InputStream stream = entry.getValue(); try { bytes = new byte[stream.available()]; stream.read(bytes); } catch (IOException e) { throw new LocalOperationException(e); } builder.addFormDataPart(entry.getKey(), null, RequestBody.create(MediaType.parse("application/octet-stream"), bytes)); } } for (Map.Entry<String, String> entry : data.entrySet()) { builder.addFormDataPart(entry.getKey(), entry.getValue()); } return builder.build(); } private RequestBody getBody(Map<String, String> data, @Nullable Map<String, File> files) throws LocalOperationException { return getBody(data, files, null); } /** * Returns data tree structured as Transloadit expects it. * * @param data * @return {@link Map} * @throws LocalOperationException */ private Map<String, String> toPayload(Map<String, Object> data) throws LocalOperationException { Map<String, Object> dataClone = new HashMap<String, Object>(data); dataClone.put("auth", getAuthData()); Map<String, String> payload = new HashMap<String, String>(); payload.put("params", jsonifyData(dataClone)); if (transloadit.shouldSignRequest) { payload.put("signature", getSignature(jsonifyData(dataClone))); } return payload; } /** * converts Map of data to json string * * @param data map data to converted to json * @return {@link String} */ private String jsonifyData(Map<String, ? extends Object> data) { JSONObject jsonData = new JSONObject(data); return jsonData.toString(); } /** * * @return Map containing authentication key and the time it expires */ private Map<String, String> getAuthData() { Map<String, String> authData = new HashMap<String, String>(); authData.put("key", transloadit.key); Instant expiryTime = Instant.now().plus(transloadit.duration * 1000); DateTimeFormatter formatter = DateTimeFormat .forPattern("Y/MM/dd HH:mm:ss+00:00") .withZoneUTC(); authData.put("expires", formatter.print(expiryTime)); return authData; } /** * * @param message String data that needs to be encrypted. * @return signature generate based on the message passed and the transloadit secret. */ private String getSignature(String message) throws LocalOperationException { byte[] kSecret = transloadit.secret.getBytes(Charset.forName("UTF-8")); byte[] rawHmac = HmacSHA1(kSecret, message); byte[] hexBytes = new Hex().encode(rawHmac); return new String(hexBytes, Charset.forName("UTF-8")); } private byte[] HmacSHA1(byte[] key, String data) throws LocalOperationException { final String ALGORITHM = "HmacSHA1"; Mac mac; try { mac = Mac.getInstance(ALGORITHM); mac.init(new SecretKeySpec(key, ALGORITHM)); } catch (NoSuchAlgorithmException e) { throw new LocalOperationException(e); } catch (InvalidKeyException e) { throw new LocalOperationException(e); } return mac.doFinal(data.getBytes(Charset.forName("UTF-8"))); } }
package de.machmireinebook.epubeditor.epublib.epub; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import java.util.Arrays; import java.util.List; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import de.machmireinebook.epubeditor.epublib.Constants; import de.machmireinebook.epubeditor.epublib.bookprocessor.HtmlCleanerBookProcessor; import de.machmireinebook.epubeditor.epublib.domain.Book; import de.machmireinebook.epubeditor.epublib.domain.MediaType; import de.machmireinebook.epubeditor.epublib.domain.Resource; import de.machmireinebook.epubeditor.epublib.domain.Resources; import de.machmireinebook.epubeditor.epublib.util.ResourceUtil; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.Namespace; /** * Reads an epub file. * * @author paul */ public class EpubReader { private static final Logger log = Logger.getLogger(EpubReader.class); private BookProcessor bookProcessor = new HtmlCleanerBookProcessor(); public Book readEpub(File file) throws IOException { Book book; try (FileInputStream in = new FileInputStream(file)) { book = readEpub(in, Constants.CHARACTER_ENCODING); book.setPhysicalFileName(file.toPath()); } return book; } public Book readEpub(InputStream in) throws IOException { return readEpub(in, Constants.CHARACTER_ENCODING); } public Book readEpub(ZipInputStream in) throws IOException { return readEpub(in, Constants.CHARACTER_ENCODING); } public Book readEpub(ZipFile zipfile) throws IOException { Book book = readEpub(zipfile, Constants.CHARACTER_ENCODING); book.setPhysicalFileName(Paths.get(zipfile.getName())); return book; } /** * Read epub from inputstream * * @param in the inputstream from which to read the epub * @param encoding the encoding to use for the html files within the epub * @return the Book as read from the inputstream * @throws java.io.IOException */ public Book readEpub(InputStream in, String encoding) throws IOException { return readEpub(new ZipInputStream(in), encoding); } /** * Reads this EPUB without loading any resources into memory. * * @param fileName the file to load * @param encoding the encoding for XHTML files * @return this Book without loading all resources into memory. * @throws java.io.IOException */ public Book readEpubLazy(ZipFile zipFile, String encoding) throws IOException { Book book = readEpubLazy(zipFile, encoding, Arrays.asList(MediaType.values())); book.setPhysicalFileName(Paths.get(zipFile.getName())); return book; } public Book readEpub(ZipInputStream in, String encoding) throws IOException { return readEpub(ResourcesLoader.loadResources(in, encoding)); } public Book readEpub(ZipFile in, String encoding) throws IOException { Book book = readEpub(ResourcesLoader.loadResources(in, encoding)); book.setPhysicalFileName(Paths.get(in.getName())); return book; } /** * Reads this EPUB without loading all resources into memory. * * @param fileName the file to load * @param encoding the encoding for XHTML files * @param lazyLoadedTypes a list of the MediaType to load lazily * @return this Book without loading all resources into memory. * @throws java.io.IOException */ public Book readEpubLazy(ZipFile zipFile, String encoding, List<MediaType> lazyLoadedTypes) throws IOException { Resources resources = ResourcesLoader.loadResources(zipFile, encoding, lazyLoadedTypes); return readEpub(resources); } public Book readEpub(Resources resources) throws IOException { return readEpub(resources, new Book()); } public Book readEpub(Resources resources, Book book) throws IOException { if (book == null) { book = new Book(); } handleMimeType(book, resources); String packageResourceHref = getPackageResourceHref(resources); Resource packageResource = processPackageResource(packageResourceHref, book, resources); book.setOpfResource(packageResource); Resource ncxResource = processNcxResource(book); book.setNcxResource(ncxResource); book = postProcessBook(book); return book; } private Book postProcessBook(Book book) { if (bookProcessor != null) { book = bookProcessor.processBook(book); } return book; } private Resource processNcxResource(Book book) { return NCXDocument.read(book); } private Resource processPackageResource(String packageResourceHref, Book book, Resources resources) { Resource packageResource = resources.remove(packageResourceHref); try { PackageDocumentReader.read(packageResource, book, resources); } catch (Exception e) { log.error(e.getMessage(), e); } return packageResource; } private String getPackageResourceHref(Resources resources) { String defaultResult = "OEBPS/content.opf"; String result = defaultResult; Namespace ns = Namespace.getNamespace("urn:oasis:names:tc:opendocument:xmlns:container"); Resource containerResource = resources.remove("META-INF/container.xml"); if (containerResource == null) { return result; } try { Document document = ResourceUtil.getAsDocument(containerResource); Element rootFileElement = document.getRootElement().getChild("rootfiles", ns).getChild("rootfile", ns); result = rootFileElement.getAttributeValue("full-path"); } catch (Exception e) { log.error(e.getMessage(), e); } if (StringUtils.isBlank(result)) { result = defaultResult; } return result; } private void handleMimeType(Book result, Resources resources) { resources.remove("mimetype"); } }
/* $Id: InvocationRecord.java 15353 2015-01-28 10:07:31Z ceriel $ */ package ibis.satin.impl.spawnSync; import ibis.ipl.IbisIdentifier; import ibis.satin.impl.Config; import ibis.satin.impl.aborts.LocalRecord; import ibis.satin.impl.sharedObjects.SOReferenceSourceCrashedException; /** * An invocation record describes a spawned invocation, including the parameters * of the invocation. The Satin frontend generates a subclass of this class for * each spawnable method. Of all fields, only the owner, the stamp and the * parent info must be sent over the network. */ public abstract class InvocationRecord implements java.io.Serializable, Config { /** * */ private static final long serialVersionUID = 1L; /** * Must be public, it is used from the generated code (in another package) */ public transient Throwable eek; /** * The machine that spawned this job. Used by my subclasses. */ protected IbisIdentifier owner; /** * Used to locate this invocation record, when a remote job result comes in. */ private Stamp stamp; /** * The machine that spawned my parent (can be null for root jobs). */ private IbisIdentifier parentOwner; /** * The stamp of my parent (can be null for root jobs). */ private Stamp parentStamp; /** * The invocation record of my parent (can be null for root and stolen * jobs). */ protected transient InvocationRecord parent; private transient SpawnCounter spawnCounter; /** * Must be public, is accessed from generated code. */ public transient boolean aborted; /** * An id for the store where the result of the spawn must go. Must be * public, used by generated code. */ public transient int storeId; /** * These are used to link the records in the JobQueue. */ private transient InvocationRecord qprev; private transient InvocationRecord qnext; /** * Used to link the records in the cache. Used by generated code. */ public transient InvocationRecord cacheNext; /** * An id for the spawn in the code. Needed to run the correct inlet. */ protected transient int spawnId; protected transient LocalRecord parentLocals; private transient IbisIdentifier stealer; private transient boolean alreadySentExceptionResult; // Used by my subclasses protected transient boolean inletExecuted; public transient boolean checkpointed = false; /** * List of finished children; used for fault tolerance. */ private transient InvocationRecord finishedChild; /** * List of finished children; used for fault tolerance. */ private transient InvocationRecord finishedSibling; /** * List of children which need to be restarted; used for fault tolerance. */ private transient InvocationRecord toBeRestartedChild; /** * List of children which need to be restarted; used for fault tolerance. */ private transient InvocationRecord toBeRestartedSibling; /** * Used for fault tolerance. True means that the job is being redone after a * crash. */ private boolean reDone; /** * Used for fault tolerance. True means that the job is an orphan **/ private boolean orphan; protected InvocationRecord(SpawnCounter spawnCounter, InvocationRecord cacheNext, int storeId, int spawnId, LocalRecord parentLocals) { init(spawnCounter, cacheNext, storeId, spawnId, parentLocals); } /** Used for the invocation record cache. */ final protected void init(SpawnCounter spawnCounter, InvocationRecord cacheNext, int storeId, int spawnId, LocalRecord parentLocals) { this.storeId = storeId; this.cacheNext = cacheNext; this.spawnCounter = spawnCounter; this.spawnId = spawnId; this.parentLocals = parentLocals; } /** Used for the invocation record cache. */ final protected void clear() { owner = null; Stamp.deleteStamp(stamp); stamp = null; spawnCounter = null; qprev = null; setQnext(null); storeId = -2; stealer = null; eek = null; parentOwner = null; parentStamp = null; parent = null; aborted = false; spawnId = -2; parentLocals = null; alreadySentExceptionResult = false; inletExecuted = false; reDone = false; finishedChild = null; finishedSibling = null; toBeRestartedChild = null; toBeRestartedSibling = null; } /** * Compares this invocation record with another invocation record. Returns * <code>true</code> if equal. * * @param other * the invocation record to compare with. * @return <code>true</code> if equal, <code>false</code> if not. */ public final boolean equals(InvocationRecord other) { if (other == this) { return true; } return stamp.stampEquals(other.stamp) && owner.equals(other.owner); } /** * Compares this invocation record with another object. Returns * <code>true</code> if equal. * * @param o * the object to compare with. * @return <code>true</code> if equal, <code>false</code> if not. */ public final boolean equals(Object o) { if (o == this) { return true; } if (o instanceof InvocationRecord) { InvocationRecord other = (InvocationRecord) o; return stamp.stampEquals(other.stamp) && owner.equals(other.owner); } if (Config.ASSERTS) { System.out.println("warning: weird equals in Invocationrecord"); } return false; } /** * Returns a hashcode that conforms with the <code>equals</code> method. * * @return a hashcode. */ final public int hashCode() { return stamp.hashCode(); } /** * Returns a string representation of this invocation record. * * @return a string representation of this invocation record. */ public String toString() { String result = "(Invocation record: stamp = " + stamp; result += ", owner = " + (owner == null ? "NULL" : "" + owner); result += ", spawnCounter = " + (spawnCounter == null ? "NULL" : "" + spawnCounter.getValue()); result += ", stealer = " + stealer; result += ", parentStamp = " + parentStamp; result += ", parentOwner = " + (parentOwner == null ? "NULL" : "" + parentOwner); result += ", aborted = " + aborted; result += ", parent = " + (parent == null ? "NULL" : "" + parent); // recursive :-) result += ", parentLocals = " + (parentLocals == null ? "NULL" : "" + parentLocals) + ")"; return result; } public abstract ReturnRecord getReturnRecord(); /** * initializes the references to shared objects inside this invocation * record after stealing the job */ public abstract void setSOReferences() throws SOReferenceSourceCrashedException; /** * Returns a list of objectIds of the shared objects this record holds * references of. */ public abstract java.util.Vector<String> getSOReferences(); /** Executes the guard function, used for shared objects consistency. */ public boolean guard() { return true; } public abstract void runLocal() throws Throwable; public abstract ReturnRecord runRemote(); public abstract void clearParams(); public final IbisIdentifier getOwner() { return owner; } public final void setOwner(IbisIdentifier owner) { this.owner = owner; } public final Stamp getStamp() { return stamp; } public final InvocationRecord getParent() { return parent; } public final IbisIdentifier getParentOwner() { return parentOwner; } public final Stamp getParentStamp() { return parentStamp; } public final void decrSpawnCounter() { if (spawnCounter != null) { // Can be null in case of stolen job. spawnCounter.decr(this); } } public final void incrSpawnCounter() { spawnCounter.incr(this); } public final void setStealer(IbisIdentifier stealer) { this.stealer = stealer; } public final IbisIdentifier getStealer() { return stealer; } public final void setFinishedChild(InvocationRecord finishedChild) { this.finishedChild = finishedChild; } public final InvocationRecord getFinishedChild() { return finishedChild; } public final void setFinishedSibling(InvocationRecord finishedSibling) { this.finishedSibling = finishedSibling; } public final InvocationRecord getFinishedSibling() { return finishedSibling; } public final void setToBeRestartedChild(InvocationRecord toBeRestartedChild) { this.toBeRestartedChild = toBeRestartedChild; } public final InvocationRecord getToBeRestartedChild() { return toBeRestartedChild; } public final void setToBeRestartedSibling( InvocationRecord toBeRestartedSibling) { this.toBeRestartedSibling = toBeRestartedSibling; } public final InvocationRecord getToBeRestartedSibling() { return toBeRestartedSibling; } public final void setReDone(boolean reDone) { this.reDone = reDone; } public final boolean isReDone() { return reDone; } public final void setOrphan(boolean orphan) { this.orphan = orphan; } public final boolean isOrphan() { return orphan; } public final int getSpawnId() { return spawnId; } protected final void setParentLocals(LocalRecord parentLocals) { this.parentLocals = parentLocals; } public final LocalRecord getParentLocals() { return parentLocals; } public final void setAlreadySentExceptionResult( boolean alreadySentExceptionResult) { this.alreadySentExceptionResult = alreadySentExceptionResult; } public final boolean alreadySentExceptionResult() { return alreadySentExceptionResult; } public final void setInletExecuted(boolean inletExecuted) { this.inletExecuted = inletExecuted; } public final boolean isInletExecuted() { return inletExecuted; } /** * Determines if the specified invocation record is a descendent of the job * indicated by the specied stamp. */ public final boolean isDescendentOf(Stamp targetStamp) { if (parentStamp == null) { if (targetStamp == null) { return true; } return false; } return parentStamp.isDescendentOf(targetStamp); } public final boolean isDescendentOf(IbisIdentifier targetOwner) { if (parent == null) { return false; } if (parentOwner.equals(targetOwner)) { return true; } return parent.isDescendentOf(targetOwner); } /** * Attach a child to its parent's finished children list. */ public final void jobFinished() { if (!FT_NAIVE) { // Only needed if the GRT is used. if (parent != null) { finishedSibling = parent.finishedChild; parent.finishedChild = this; } // remove the job's children list finishedChild = null; } } public final void spawn(IbisIdentifier ident, InvocationRecord parent) { owner = ident; this.parent = parent; if (parent == null) { parentStamp = null; parentOwner = null; } else { parentStamp = parent.stamp; parentOwner = parent.owner; } stamp = Stamp.createStamp(parentStamp); spawnCounter.incr(this); } protected final void setQprev(InvocationRecord qprev) { this.qprev = qprev; } protected final InvocationRecord getQprev() { return qprev; } protected final void setQnext(InvocationRecord qnext) { this.qnext = qnext; } protected final InvocationRecord getQnext() { return qnext; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.psi.types; import com.intellij.openapi.util.Pair; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.ResolveResult; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil; import com.jetbrains.python.codeInsight.stdlib.PyNamedTupleType; import com.jetbrains.python.codeInsight.typing.PyProtocolsKt; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyBuiltinCache; import com.jetbrains.python.psi.impl.PyTypeProvider; import com.jetbrains.python.psi.resolve.PyResolveContext; import com.jetbrains.python.psi.resolve.RatedResolveResult; import com.jetbrains.python.pyi.PyiFile; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.function.Function; import static com.jetbrains.python.codeInsight.typing.PyProtocolsKt.inspectProtocolSubclass; import static com.jetbrains.python.psi.PyUtil.as; import static com.jetbrains.python.psi.impl.PyCallExpressionHelper.*; /** * @author vlan */ public class PyTypeChecker { private PyTypeChecker() { } /** * See {@link PyTypeChecker#match(PyType, PyType, TypeEvalContext, Map)} for description. */ public static boolean match(@Nullable PyType expected, @Nullable PyType actual, @NotNull TypeEvalContext context) { return match(expected, actual, new MatchContext(context, new HashMap<>())).orElse(true); } /** * Checks whether a type {@code actual} can be placed where {@code expected} is expected. * * For example {@code int} matches {@code object}, while {@code str} doesn't match {@code int}. * Work for builtin types, classes, tuples etc. * * Whether it's unknown if {@code actual} match {@code expected} the method returns {@code true}. * * @implNote This behavior may be changed in future by replacing {@code boolean} with {@code Optional<Boolean>} and updating the clients. * * @param expected expected type * @param actual type to be matched against expected * @param context type evaluation context * @param substitutions map of substitutions for {@code expected} type * @return {@code false} if {@code expected} and {@code actual} don't match, true otherwise */ public static boolean match(@Nullable PyType expected, @Nullable PyType actual, @NotNull TypeEvalContext context, @NotNull Map<PyGenericType, PyType> substitutions) { return match(expected, actual, new MatchContext(context, substitutions)).orElse(true); } @NotNull private static Optional<Boolean> match(@Nullable PyType expected, @Nullable PyType actual, @NotNull MatchContext context) { final Pair<PyType, PyType> types = Pair.create(expected, actual); if (context.matching.contains(types)) return Optional.of(true); context.matching.add(types); final Optional<Boolean> result = matchImpl(expected, actual, context); context.matching.remove(types); return result; } /** * Perform type matching. * * Implementation details: * <ul> * <li>The method mutates {@code context.substitutions} map adding new entries into it * <li>The order of match subroutine calls is important * <li>The method may recursively call itself * </ul> */ @NotNull private static Optional<Boolean> matchImpl(@Nullable PyType expected, @Nullable PyType actual, @NotNull MatchContext context) { if (expected instanceof PyClassType) { Optional<Boolean> match = matchObject((PyClassType)expected, actual); if (match.isPresent()) { return match; } } if (expected instanceof PyInstantiableType && actual instanceof PyInstantiableType) { Optional<Boolean> match = match((PyInstantiableType)expected, (PyInstantiableType)actual, context); if (match.isPresent()) { return match; } } if (expected instanceof PyGenericType) { return Optional.of(match((PyGenericType)expected, actual, context)); } if (expected == null || actual == null || isUnknown(actual, context.context)) { return Optional.of(true); } if (actual instanceof PyUnionType) { return Optional.of(match(expected, (PyUnionType)actual, context)); } if (expected instanceof PyUnionType) { return Optional.of(match((PyUnionType)expected, actual, context)); } if (expected instanceof PyClassType && actual instanceof PyClassType) { Optional<Boolean> match = match((PyClassType)expected, (PyClassType)actual, context); if (match.isPresent()) { return match; } } if (actual instanceof PyStructuralType && ((PyStructuralType)actual).isInferredFromUsages()) { return Optional.of(true); } if (expected instanceof PyStructuralType) { return Optional.of(match((PyStructuralType)expected, actual, context.context)); } if (actual instanceof PyStructuralType && expected instanceof PyClassType) { final Set<String> expectedAttributes = ((PyClassType)expected).getMemberNames(true, context.context); return Optional.of(expectedAttributes.containsAll(((PyStructuralType)actual).getAttributeNames())); } if (actual instanceof PyCallableType && expected instanceof PyCallableType) { final PyCallableType expectedCallable = (PyCallableType)expected; final PyCallableType actualCallable = (PyCallableType)actual; final Optional<Boolean> match = match(expectedCallable, actualCallable, context); if (match.isPresent()) { return match; } } return Optional.of(matchNumericTypes(expected, actual)); } /** * Check whether {@code expected} is Python *object* or *type*. * * {@see PyTypeChecker#match(PyType, PyType, TypeEvalContext, Map)} */ @NotNull private static Optional<Boolean> matchObject(@NotNull PyClassType expected, @Nullable PyType actual) { if (ArrayUtil.contains(expected.getName(), PyNames.OBJECT, PyNames.TYPE)) { final PyBuiltinCache builtinCache = PyBuiltinCache.getInstance(expected.getPyClass()); if (expected.equals(builtinCache.getObjectType())) { return Optional.of(true); } if (expected.equals(builtinCache.getTypeType()) && actual instanceof PyInstantiableType && ((PyInstantiableType)actual).isDefinition()) { return Optional.of(true); } } return Optional.empty(); } @NotNull private static Optional<Boolean> match(@NotNull PyInstantiableType expected, @NotNull PyInstantiableType actual, @NotNull MatchContext context) { if (expected instanceof PyGenericType && typeVarAcceptsBothClassAndInstanceTypes((PyGenericType)expected)) { return Optional.empty(); } if (expected.isDefinition() ^ actual.isDefinition()) { if (actual.isDefinition() && actual instanceof PyClassLikeType && matchClassObjectAndMetaclass(expected, (PyClassLikeType)actual, context)) { return Optional.of(true); } return Optional.of(false); } return Optional.empty(); } /** * Match {@code actual} versus {@code PyGenericType expected}. * * The method mutates {@code context.substitutions} map adding new entries into it */ private static boolean match(@NotNull PyGenericType expected, @Nullable PyType actual, @NotNull MatchContext context) { final PyType substitution = context.substitutions.get(expected); PyType bound = expected.getBound(); // Promote int in Type[TypeVar('T', int)] to Type[int] before checking that bounds match if (expected.isDefinition()) { final Function<PyType, PyType> toDefinition = t -> t instanceof PyInstantiableType ? ((PyInstantiableType)t).toClass() : t; bound = PyUnionType.union(PyTypeUtil.toStream(bound).map(toDefinition).toList()); } Optional<Boolean> match = match(bound, actual, context); if (match.isPresent() && !match.get()) { return false; } if (substitution != null) { if (expected.equals(actual) || substitution.equals(expected)) { return true; } if (context.recursive) { Optional<Boolean> recursiveMatch = match(substitution, actual, context.notRecursive()); if (recursiveMatch.isPresent()) { return recursiveMatch.get(); } } return false; } if (actual != null) { context.substitutions.put(expected, actual); } else if (bound != null) { context.substitutions.put(expected, bound); } return true; } private static boolean match(@NotNull PyType expected, @NotNull PyUnionType actual, @NotNull MatchContext context) { if (expected instanceof PyTupleType) { Optional<Boolean> match = match((PyTupleType)expected, actual, context); if (match.isPresent()) { return match.get(); } } return StreamEx.of(actual.getMembers()).anyMatch(type -> match(expected, type, context).orElse(false)); } @NotNull private static Optional<Boolean> match(@NotNull PyTupleType expected, @NotNull PyUnionType actual, @NotNull MatchContext context) { final int elementCount = expected.getElementCount(); if (!expected.isHomogeneous() && consistsOfSameElementNumberTuples(actual, elementCount)) { return Optional.of(substituteExpectedElementsWithUnions(expected, elementCount, actual, context)); } return Optional.empty(); } private static boolean match(@NotNull PyUnionType expected, @NotNull PyType actual, @NotNull MatchContext context) { return StreamEx.of(expected.getMembers()).anyMatch(type -> match(type, actual, context).orElse(true)); } @NotNull private static Optional<Boolean> match(@NotNull PyClassType expected, @NotNull PyClassType actual, @NotNull MatchContext context) { if (expected.equals(actual)) { return Optional.of(true); } if (expected instanceof PyTupleType && actual instanceof PyTupleType) { return match((PyTupleType)expected, (PyTupleType)actual, context); } final PyClass superClass = expected.getPyClass(); final PyClass subClass = actual.getPyClass(); final boolean matchClasses = matchClasses(superClass, subClass, context.context); if (PyProtocolsKt.isProtocol(expected, context.context) && !matchClasses) { if (expected instanceof PyCollectionType && !matchGenerics((PyCollectionType)expected, actual, context)) { return Optional.of(false); } for (kotlin.Pair<PyTypedElement, List<RatedResolveResult>> pair : inspectProtocolSubclass(expected, actual, context.context)) { final List<RatedResolveResult> subclassElements = pair.getSecond(); if (ContainerUtil.isEmpty(subclassElements)) { return Optional.of(false); } final PyType protocolElementType = context.context.getType(pair.getFirst()); final boolean elementResult = StreamEx .of(subclassElements) .map(ResolveResult::getElement) .select(PyTypedElement.class) .map(context.context::getType) .anyMatch(subclassElementType -> match(protocolElementType, subclassElementType, context).orElse(true)); if (!elementResult) { return Optional.of(false); } } final PyType originalProtocolGenericType = StreamEx .of(PyTypeProvider.EP_NAME.getExtensionList()) .map(provider -> provider.getGenericType(superClass, context.context)) .findFirst(Objects::nonNull) .orElse(null); // actual was matched against protocol definition above // and here protocol usage is matched against its definition to update substitutions match(expected, originalProtocolGenericType, context); return Optional.of(true); } if (expected instanceof PyCollectionType) { return Optional.of(match((PyCollectionType)expected, actual, context)); } if (matchClasses) { if (expected instanceof PyTypingNewType && !expected.equals(actual) && superClass.equals(subClass)) { return Optional.of(actual.getAncestorTypes(context.context).contains(expected)); } return Optional.of(true); } if (expected.equals(actual)) { return Optional.of(true); } return Optional.empty(); } @NotNull private static Optional<Boolean> match(@NotNull PyTupleType expected, @NotNull PyTupleType actual, @NotNull MatchContext context) { if (!expected.isHomogeneous() && !actual.isHomogeneous()) { if (expected.getElementCount() != actual.getElementCount()) { return Optional.of(false); } for (int i = 0; i < expected.getElementCount(); i++) { if (!match(expected.getElementType(i), actual.getElementType(i), context).orElse(true)) { return Optional.of(false); } } return Optional.of(true); } if (expected.isHomogeneous() && !actual.isHomogeneous()) { final PyType expectedElementType = expected.getIteratedItemType(); for (int i = 0; i < actual.getElementCount(); i++) { if (!match(expectedElementType, actual.getElementType(i), context).orElse(true)) { return Optional.of(false); } } return Optional.of(true); } if (!expected.isHomogeneous() && actual.isHomogeneous()) { return Optional.of(false); } return match(expected.getIteratedItemType(), actual.getIteratedItemType(), context); } private static boolean match(@NotNull PyCollectionType expected, @NotNull PyClassType actual, @NotNull MatchContext context) { if (actual instanceof PyTupleType) { return match(expected, (PyTupleType)actual, context); } final PyClass superClass = expected.getPyClass(); final PyClass subClass = actual.getPyClass(); return matchClasses(superClass, subClass, context.context) && matchGenerics(expected, actual, context); } private static boolean match(@NotNull PyCollectionType expected, @NotNull PyTupleType actual, @NotNull MatchContext context) { if (!matchClasses(expected.getPyClass(), actual.getPyClass(), context.context)) { return false; } final PyType superElementType = expected.getIteratedItemType(); final PyType subElementType = actual.getIteratedItemType(); if (!match(superElementType, subElementType, context).orElse(true)) { return false; } return true; } private static boolean match(@NotNull PyStructuralType expected, @NotNull PyType actual, @NotNull TypeEvalContext context) { if (actual instanceof PyStructuralType) { return match(expected, (PyStructuralType)actual); } if (actual instanceof PyClassType) { return match(expected, (PyClassType)actual, context); } if (actual instanceof PyModuleType) { final PyFile module = ((PyModuleType)actual).getModule(); if (module.getLanguageLevel().isAtLeast(LanguageLevel.PYTHON37) && definesGetAttr(module, context)) { return true; } } final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context); return StreamEx .of(expected.getAttributeNames()) .noneMatch(attribute -> ContainerUtil.isEmpty(actual.resolveMember(attribute, null, AccessDirection.READ, resolveContext))); } private static boolean match(@NotNull PyStructuralType expected, @NotNull PyStructuralType actual) { if (expected.isInferredFromUsages()) { return true; } return expected.getAttributeNames().containsAll(actual.getAttributeNames()); } private static boolean match(@NotNull PyStructuralType expected, @NotNull PyClassType actual, @NotNull TypeEvalContext context) { if (overridesGetAttr(actual.getPyClass(), context)) { return true; } final Set<String> actualAttributes = actual.getMemberNames(true, context); return actualAttributes.containsAll(expected.getAttributeNames()); } @NotNull private static Optional<Boolean> match(@NotNull PyCallableType expected, @NotNull PyCallableType actual, @NotNull MatchContext context) { if (expected.isCallable() && actual.isCallable()) { final List<PyCallableParameter> expectedParameters = expected.getParameters(context.context); final List<PyCallableParameter> actualParameters = actual.getParameters(context.context); if (expectedParameters != null && actualParameters != null) { final int size = Math.min(expectedParameters.size(), actualParameters.size()); for (int i = 0; i < size; i++) { final PyCallableParameter expectedParam = expectedParameters.get(i); final PyCallableParameter actualParam = actualParameters.get(i); // TODO: Check named and star params, not only positional ones if (!match(expectedParam.getType(context.context), actualParam.getType(context.context), context).orElse(true)) { return Optional.of(false); } } } if (!match(expected.getReturnType(context.context), actual.getReturnType(context.context), context).orElse(true)) { return Optional.of(false); } return Optional.of(true); } return Optional.empty(); } private static boolean matchClassObjectAndMetaclass(@NotNull PyType expected, @NotNull PyClassLikeType actual, @NotNull MatchContext context) { if (!actual.isDefinition()) { return false; } final PyClassLikeType metaClass = actual.getMetaClassType(context.context, true); return metaClass != null && match(expected, metaClass, context).orElse(true); } private static boolean typeVarAcceptsBothClassAndInstanceTypes(@NotNull PyGenericType typeVar) { return !typeVar.isDefinition() && typeVar.getBound() == null; } private static boolean consistsOfSameElementNumberTuples(@NotNull PyUnionType unionType, int elementCount) { for (PyType type : unionType.getMembers()) { if (type instanceof PyTupleType) { final PyTupleType tupleType = (PyTupleType)type; if (!tupleType.isHomogeneous() && elementCount != tupleType.getElementCount()) { return false; } } else { return false; } } return true; } private static boolean substituteExpectedElementsWithUnions(@NotNull PyTupleType expected, int elementCount, @NotNull PyUnionType actual, @NotNull MatchContext context) { for (int i = 0; i < elementCount; i++) { final int currentIndex = i; final PyType elementType = PyUnionType.union( StreamEx .of(actual.getMembers()) .select(PyTupleType.class) .map(type -> type.getElementType(currentIndex)) .toList() ); if (!match(expected.getElementType(i), elementType, context).orElse(true)) { return false; } } return true; } private static boolean matchGenerics(@NotNull PyCollectionType expected, @NotNull PyType actual, @NotNull MatchContext context) { // TODO: Match generic parameters based on the correspondence between the generic parameters of subClass and its base classes final List<PyType> superElementTypes = expected.getElementTypes(); final PyCollectionType actualCollectionType = as(actual, PyCollectionType.class); final List<PyType> subElementTypes = actualCollectionType != null ? actualCollectionType.getElementTypes() : Collections.emptyList(); for (int i = 0; i < superElementTypes.size(); i++) { final PyType subElementType = i < subElementTypes.size() ? subElementTypes.get(i) : null; if (!match(superElementTypes.get(i), subElementType, context).orElse(true)) { return false; } } return true; } private static boolean matchNumericTypes(PyType expected, PyType actual) { final String superName = expected.getName(); final String subName = actual.getName(); final boolean subIsBool = "bool".equals(subName); final boolean subIsInt = PyNames.TYPE_INT.equals(subName); final boolean subIsLong = PyNames.TYPE_LONG.equals(subName); final boolean subIsFloat = "float".equals(subName); final boolean subIsComplex = "complex".equals(subName); if (superName == null || subName == null || superName.equals(subName) || (PyNames.TYPE_INT.equals(superName) && subIsBool) || ((PyNames.TYPE_LONG.equals(superName) || PyNames.ABC_INTEGRAL.equals(superName)) && (subIsBool || subIsInt)) || (("float".equals(superName) || PyNames.ABC_REAL.equals(superName)) && (subIsBool || subIsInt || subIsLong)) || (("complex".equals(superName) || PyNames.ABC_COMPLEX.equals(superName)) && (subIsBool || subIsInt || subIsLong || subIsFloat)) || (PyNames.ABC_NUMBER.equals(superName) && (subIsBool || subIsInt || subIsLong || subIsFloat || subIsComplex))) { return true; } return false; } public static boolean isUnknown(@Nullable PyType type, @NotNull TypeEvalContext context) { return isUnknown(type, true, context); } public static boolean isUnknown(@Nullable PyType type, boolean genericsAreUnknown, @NotNull TypeEvalContext context) { if (type == null || (genericsAreUnknown && type instanceof PyGenericType)) { return true; } if (type instanceof PyFunctionType) { final PyCallable callable = ((PyFunctionType)type).getCallable(); if (callable instanceof PyDecoratable && PyKnownDecoratorUtil.hasUnknownOrChangingReturnTypeDecorator((PyDecoratable)callable, context)) { return true; } } if (type instanceof PyUnionType) { final PyUnionType union = (PyUnionType)type; for (PyType t : union.getMembers()) { if (isUnknown(t, genericsAreUnknown, context)) { return true; } } } return false; } @Nullable public static PyType toNonWeakType(@Nullable PyType type, @NotNull TypeEvalContext context) { if (type instanceof PyUnionType) { final PyUnionType unionType = (PyUnionType)type; if (unionType.isWeak()) { return unionType.excludeNull(context); } } return type; } public static boolean hasGenerics(@Nullable PyType type, @NotNull TypeEvalContext context) { final Set<PyGenericType> collected = new HashSet<>(); collectGenerics(type, context, collected, new HashSet<>()); return !collected.isEmpty(); } private static void collectGenerics(@Nullable PyType type, @NotNull TypeEvalContext context, @NotNull Set<PyGenericType> collected, @NotNull Set<PyType> visited) { if (visited.contains(type)) { return; } visited.add(type); if (type instanceof PyGenericType) { collected.add((PyGenericType)type); } else if (type instanceof PyUnionType) { final PyUnionType union = (PyUnionType)type; for (PyType t : union.getMembers()) { collectGenerics(t, context, collected, visited); } } else if (type instanceof PyTupleType) { final PyTupleType tuple = (PyTupleType)type; final int n = tuple.isHomogeneous() ? 1 : tuple.getElementCount(); for (int i = 0; i < n; i++) { collectGenerics(tuple.getElementType(i), context, collected, visited); } } else if (type instanceof PyCollectionType) { final PyCollectionType collection = (PyCollectionType)type; for (PyType elementType : collection.getElementTypes()) { collectGenerics(elementType, context, collected, visited); } } else if (type instanceof PyCallableType) { final PyCallableType callable = (PyCallableType)type; final List<PyCallableParameter> parameters = callable.getParameters(context); if (parameters != null) { for (PyCallableParameter parameter : parameters) { if (parameter != null) { collectGenerics(parameter.getType(context), context, collected, visited); } } } collectGenerics(callable.getReturnType(context), context, collected, visited); } } @Nullable public static PyType substitute(@Nullable PyType type, @NotNull Map<PyGenericType, PyType> substitutions, @NotNull TypeEvalContext context) { if (hasGenerics(type, context)) { if (type instanceof PyGenericType) { final PyGenericType typeVar = (PyGenericType)type; PyType substitution = substitutions.get(typeVar); if (substitution == null) { if (!typeVar.isDefinition()) { final PyInstantiableType<?> classType = as(substitutions.get(typeVar.toClass()), PyInstantiableType.class); if (classType != null) { substitution = classType.toInstance(); } } else { final PyInstantiableType<?> instanceType = as(substitutions.get(typeVar.toInstance()), PyInstantiableType.class); if (instanceType != null) { substitution = instanceType.toClass(); } } } if (substitution instanceof PyGenericType && !typeVar.equals(substitution) && substitutions.containsKey(substitution)) { return substitute(substitution, substitutions, context); } return substitution; } else if (type instanceof PyUnionType) { final PyUnionType union = (PyUnionType)type; final List<PyType> results = new ArrayList<>(); for (PyType t : union.getMembers()) { final PyType subst = substitute(t, substitutions, context); results.add(subst); } return PyUnionType.union(results); } else if (type instanceof PyCollectionTypeImpl) { final PyCollectionTypeImpl collection = (PyCollectionTypeImpl)type; final List<PyType> elementTypes = collection.getElementTypes(); final List<PyType> substitutes = new ArrayList<>(); for (PyType elementType : elementTypes) { substitutes.add(substitute(elementType, substitutions, context)); } return new PyCollectionTypeImpl(collection.getPyClass(), collection.isDefinition(), substitutes); } else if (type instanceof PyTupleType) { final PyTupleType tupleType = (PyTupleType)type; final PyClass tupleClass = tupleType.getPyClass(); final List<PyType> oldElementTypes = tupleType.isHomogeneous() ? Collections.singletonList(tupleType.getIteratedItemType()) : tupleType.getElementTypes(); final List<PyType> newElementTypes = ContainerUtil.map(oldElementTypes, elementType -> substitute(elementType, substitutions, context)); return new PyTupleType(tupleClass, newElementTypes, tupleType.isHomogeneous()); } else if (type instanceof PyCallableType) { final PyCallableType callable = (PyCallableType)type; List<PyCallableParameter> substParams = null; final List<PyCallableParameter> parameters = callable.getParameters(context); if (parameters != null) { substParams = new ArrayList<>(); for (PyCallableParameter parameter : parameters) { final PyType substType = substitute(parameter.getType(context), substitutions, context); final PyParameter psi = parameter.getParameter(); final PyCallableParameter subst = psi != null ? PyCallableParameterImpl.psi(psi, substType) : PyCallableParameterImpl.nonPsi(parameter.getName(), substType, parameter.getDefaultValue()); substParams.add(subst); } } final PyType substResult = substitute(callable.getReturnType(context), substitutions, context); return new PyCallableTypeImpl(substParams, substResult); } } return type; } @Nullable public static Map<PyGenericType, PyType> unifyGenericCall(@Nullable PyExpression receiver, @NotNull Map<PyExpression, PyCallableParameter> arguments, @NotNull TypeEvalContext context) { final Map<PyGenericType, PyType> substitutions = unifyReceiver(receiver, context); for (Map.Entry<PyExpression, PyCallableParameter> entry : getRegularMappedParameters(arguments).entrySet()) { final PyCallableParameter paramWrapper = entry.getValue(); PyType actualType = context.getType(entry.getKey()); if (paramWrapper.isSelf()) { // TODO find out a better way to pass the corresponding function inside final PyParameter param = paramWrapper.getParameter(); final PyFunction function = as(ScopeUtil.getScopeOwner(param), PyFunction.class); if (function != null && function.getModifier() == PyFunction.Modifier.CLASSMETHOD) { final StreamEx<PyType> types; if (actualType instanceof PyUnionType) { types = StreamEx.of(((PyUnionType)actualType).getMembers()); } else { types = StreamEx.of(actualType); } actualType = types .select(PyClassLikeType.class) .map(PyClassLikeType::toClass) .select(PyType.class) .foldLeft(PyUnionType::union) .orElse(actualType); } } final PyType expectedType = paramWrapper.getArgumentType(context); if (!match(expectedType, actualType, context, substitutions)) { return null; } } if (!matchContainer(getMappedPositionalContainer(arguments), getArgumentsMappedToPositionalContainer(arguments), substitutions, context)) { return null; } if (!matchContainer(getMappedKeywordContainer(arguments), getArgumentsMappedToKeywordContainer(arguments), substitutions, context)) { return null; } return substitutions; } private static boolean matchContainer(@Nullable PyCallableParameter container, @NotNull List<PyExpression> arguments, @NotNull Map<PyGenericType, PyType> substitutions, @NotNull TypeEvalContext context) { if (container == null) { return true; } final List<PyType> types = ContainerUtil.map(arguments, context::getType); return match(container.getArgumentType(context), PyUnionType.union(types), context, substitutions); } @NotNull public static Map<PyGenericType, PyType> unifyReceiver(@Nullable PyExpression receiver, @NotNull TypeEvalContext context) { final Map<PyGenericType, PyType> substitutions = new LinkedHashMap<>(); // Collect generic params of object type final Set<PyGenericType> generics = new LinkedHashSet<>(); final PyType qualifierType = receiver != null ? context.getType(receiver) : null; collectGenerics(qualifierType, context, generics, new HashSet<>()); for (PyGenericType t : generics) { substitutions.put(t, t); } if (qualifierType != null) { for (PyClassType type : toPossibleClassTypes(qualifierType)) { for (PyTypeProvider provider : PyTypeProvider.EP_NAME.getExtensionList()) { final PyType genericType = provider.getGenericType(type.getPyClass(), context); final Set<PyGenericType> providedTypeGenerics = new LinkedHashSet<>(); if (genericType != null) { match(genericType, type, context, substitutions); collectGenerics(genericType, context, providedTypeGenerics, new HashSet<>()); } for (Map.Entry<PyType, PyType> entry : provider.getGenericSubstitutions(type.getPyClass(), context).entrySet()) { final PyGenericType genericKey = as(entry.getKey(), PyGenericType.class); final PyType value = entry.getValue(); if (genericKey != null && value != null && !substitutions.containsKey(genericKey) && !providedTypeGenerics.contains(genericKey)) { substitutions.put(genericKey, value); } } } } } replaceUnresolvedGenericsWithAny(substitutions); return substitutions; } @NotNull private static List<PyClassType> toPossibleClassTypes(@NotNull PyType type) { final PyClassType classType = as(type, PyClassType.class); if (classType != null) { return Collections.singletonList(classType); } final PyUnionType unionType = as(type, PyUnionType.class); if (unionType != null) { return StreamEx.of(unionType.getMembers()).nonNull().flatMap(t -> toPossibleClassTypes(t).stream()).toList(); } return Collections.emptyList(); } private static void replaceUnresolvedGenericsWithAny(@NotNull Map<PyGenericType, PyType> substitutions) { final List<PyType> unresolvedGenerics = ContainerUtil.filter(substitutions.values(), type -> type instanceof PyGenericType && !substitutions.containsKey(type)); for (PyType unresolvedGeneric : unresolvedGenerics) { substitutions.put((PyGenericType)unresolvedGeneric, null); } } private static boolean matchClasses(@Nullable PyClass superClass, @Nullable PyClass subClass, @NotNull TypeEvalContext context) { if (superClass == null || subClass == null || subClass.isSubclass(superClass, context) || PyABCUtil.isSubclass(subClass, superClass, context) || isStrUnicodeMatch(subClass, superClass) || isBytearrayBytesStringMatch(subClass, superClass) || PyUtil.hasUnresolvedAncestors(subClass, context)) { return true; } else { final String superName = superClass.getName(); return superName != null && superName.equals(subClass.getName()); } } private static boolean isStrUnicodeMatch(@NotNull PyClass subClass, @NotNull PyClass superClass) { // TODO: Check for subclasses as well return PyNames.TYPE_STR.equals(subClass.getName()) && PyNames.TYPE_UNICODE.equals(superClass.getName()); } private static boolean isBytearrayBytesStringMatch(@NotNull PyClass subClass, @NotNull PyClass superClass) { if (!PyNames.TYPE_BYTEARRAY.equals(subClass.getName())) return false; final PsiFile subClassFile = subClass.getContainingFile(); final boolean isPy2 = subClassFile instanceof PyiFile ? PyBuiltinCache.getInstance(subClass).getObjectType(PyNames.TYPE_UNICODE) != null : LanguageLevel.forElement(subClass).isPython2(); final String superClassName = superClass.getName(); return isPy2 && PyNames.TYPE_STR.equals(superClassName) || !isPy2 && PyNames.TYPE_BYTES.equals(superClassName); } @Nullable public static Boolean isCallable(@Nullable PyType type) { if (type == null) { return null; } if (type instanceof PyUnionType) { return isUnionCallable((PyUnionType)type); } if (type instanceof PyCallableType) { return ((PyCallableType)type).isCallable(); } if (type instanceof PyStructuralType && ((PyStructuralType)type).isInferredFromUsages()) { return true; } if (type instanceof PyGenericType) { if (((PyGenericType)type).isDefinition()) { return true; } return isCallable(((PyGenericType)type).getBound()); } return false; } /** * If at least one is callable -- it is callable. * If at least one is unknown -- it is unknown. * It is false otherwise. */ @Nullable private static Boolean isUnionCallable(@NotNull final PyUnionType type) { for (final PyType member : type.getMembers()) { final Boolean callable = isCallable(member); if (callable == null) { return null; } if (callable) { return true; } } return false; } public static boolean definesGetAttr(@NotNull PyFile file, @NotNull TypeEvalContext context) { if (file instanceof PyTypedElement) { final PyType type = context.getType((PyTypedElement)file); if (type != null) { return resolveTypeMember(type, PyNames.GETATTR, context) != null; } } return false; } public static boolean overridesGetAttr(@NotNull PyClass cls, @NotNull TypeEvalContext context) { final PyType type = context.getType(cls); if (type != null) { if (resolveTypeMember(type, PyNames.GETATTR, context) != null) { return true; } final PsiElement method = resolveTypeMember(type, PyNames.GETATTRIBUTE, context); if (method != null && !PyBuiltinCache.getInstance(cls).isBuiltin(method)) { return true; } } return false; } @Nullable private static PsiElement resolveTypeMember(@NotNull PyType type, @NotNull String name, @NotNull TypeEvalContext context) { final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context); final List<? extends RatedResolveResult> results = type.resolveMember(name, null, AccessDirection.READ, resolveContext); return !ContainerUtil.isEmpty(results) ? results.get(0).getElement() : null; } @Nullable public static PyType getTargetTypeFromTupleAssignment(@NotNull PyTargetExpression target, @NotNull PyTupleExpression parentTuple, @NotNull PyType assignedType, @NotNull TypeEvalContext context) { if (assignedType instanceof PyTupleType) { return getTargetTypeFromTupleAssignment(target, parentTuple, (PyTupleType)assignedType); } else if (assignedType instanceof PyClassLikeType) { return StreamEx .of(((PyClassLikeType)assignedType).getAncestorTypes(context)) .select(PyNamedTupleType.class) .findFirst() .map(t -> getTargetTypeFromTupleAssignment(target, parentTuple, t)) .orElse(null); } return null; } @Nullable public static PyType getTargetTypeFromTupleAssignment(@NotNull PyTargetExpression target, @NotNull PyTupleExpression parentTuple, @NotNull PyTupleType assignedTupleType) { final int count = assignedTupleType.getElementCount(); final PyExpression[] elements = parentTuple.getElements(); if (elements.length == count || assignedTupleType.isHomogeneous()) { final int index = ArrayUtil.indexOf(elements, target); if (index >= 0) { return assignedTupleType.getElementType(index); } for (int i = 0; i < count; i++) { PyExpression element = elements[i]; while (element instanceof PyParenthesizedExpression) { element = ((PyParenthesizedExpression)element).getContainedExpression(); } if (element instanceof PyTupleExpression) { final PyType elementType = assignedTupleType.getElementType(i); if (elementType instanceof PyTupleType) { final PyType result = getTargetTypeFromTupleAssignment(target, (PyTupleExpression)element, (PyTupleType)elementType); if (result != null) { return result; } } } } } return null; } private static class MatchContext { @NotNull private final TypeEvalContext context; @NotNull private final Map<PyGenericType, PyType> substitutions; // mutable private final boolean recursive; @NotNull private final Set<Pair<PyType, PyType>> matching; // mutable MatchContext(@NotNull TypeEvalContext context, @NotNull Map<PyGenericType, PyType> substitutions) { this(context, substitutions, true, new HashSet<>()); } private MatchContext(@NotNull TypeEvalContext context, @NotNull Map<PyGenericType, PyType> substitutions, boolean recursive, @NotNull Set<Pair<PyType, PyType>> matching) { this.context = context; this.substitutions = substitutions; this.recursive = recursive; this.matching = matching; } @NotNull public MatchContext notRecursive() { return new MatchContext(context, substitutions, false, matching); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.storage.hbase; import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.persistence.StorageException; import org.apache.kylin.engine.mr.HadoopUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author yangli9 * */ public class HBaseConnection { private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class); private static final Map<String, Configuration> ConfigCache = new ConcurrentHashMap<String, Configuration>(); private static final Map<String, HConnection> ConnPool = new ConcurrentHashMap<String, HConnection>(); static { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { for (HConnection conn : ConnPool.values()) { try { conn.close(); } catch (IOException e) { e.printStackTrace(); } } } }); } public static void clearConnCache() { ConnPool.clear(); } private static final ThreadLocal<Configuration> hbaseConfig = new ThreadLocal<>(); public static Configuration getCurrentHBaseConfiguration() { if (hbaseConfig.get() == null) { String storageUrl = KylinConfig.getInstanceFromEnv().getStorageUrl(); hbaseConfig.set(newHBaseConfiguration(storageUrl)); } return hbaseConfig.get(); } private static Configuration newHBaseConfiguration(String url) { Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration()); // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath if (!(StringUtils.isEmpty(url) || "hbase".equals(url))) throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties"); // support hbase using a different FS String hbaseClusterFs = KylinConfig.getInstanceFromEnv().getHBaseClusterFs(); if (StringUtils.isNotEmpty(hbaseClusterFs)) { conf.set(FileSystem.FS_DEFAULT_NAME_KEY, hbaseClusterFs); } // https://issues.apache.org/jira/browse/KYLIN-953 if (StringUtils.isBlank(conf.get("hadoop.tmp.dir"))) { conf.set("hadoop.tmp.dir", "/tmp"); } if (StringUtils.isBlank(conf.get("hbase.fs.tmp.dir"))) { conf.set("hbase.fs.tmp.dir", "/tmp"); } // reduce rpc retry conf.set(HConstants.HBASE_CLIENT_PAUSE, "3000"); conf.set(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "5"); conf.set(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, "60000"); // conf.set(ScannerCallable.LOG_SCANNER_ACTIVITY, "true"); return conf; } public static String makeQualifiedPathInHBaseCluster(String path) { try { FileSystem fs = FileSystem.get(getCurrentHBaseConfiguration()); return fs.makeQualified(new Path(path)).toString(); } catch (IOException e) { throw new IllegalArgumentException("Cannot create FileSystem from current hbase cluster conf", e); } } // ============================================================================ // returned HConnection can be shared by multiple threads and does not require close() @SuppressWarnings("resource") public static HConnection get(String url) { // find configuration Configuration conf = ConfigCache.get(url); if (conf == null) { conf = newHBaseConfiguration(url); ConfigCache.put(url, conf); } HConnection connection = ConnPool.get(url); try { while (true) { // I don't use DCL since recreate a connection is not a big issue. if (connection == null || connection.isClosed()) { logger.info("connection is null or closed, creating a new one"); connection = HConnectionManager.createConnection(conf); ConnPool.put(url, connection); } if (connection == null || connection.isClosed()) { Thread.sleep(10000);// wait a while and retry } else { break; } } } catch (Throwable t) { logger.error("Error when open connection " + url, t); throw new StorageException("Error when open connection " + url, t); } return connection; } public static boolean tableExists(HConnection conn, String tableName) throws IOException { HBaseAdmin hbase = new HBaseAdmin(conn); try { return hbase.tableExists(TableName.valueOf(tableName)); } finally { hbase.close(); } } public static boolean tableExists(String hbaseUrl, String tableName) throws IOException { return tableExists(HBaseConnection.get(hbaseUrl), tableName); } public static void createHTableIfNeeded(String hbaseUrl, String tableName, String... families) throws IOException { createHTableIfNeeded(HBaseConnection.get(hbaseUrl), tableName, families); } public static void deleteTable(String hbaseUrl, String tableName) throws IOException { deleteTable(HBaseConnection.get(hbaseUrl), tableName); } public static void createHTableIfNeeded(HConnection conn, String tableName, String... families) throws IOException { HBaseAdmin hbase = new HBaseAdmin(conn); try { if (tableExists(conn, tableName)) { logger.debug("HTable '" + tableName + "' already exists"); return; } logger.debug("Creating HTable '" + tableName + "'"); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); if (null != families && families.length > 0) { for (String family : families) { HColumnDescriptor fd = new HColumnDescriptor(family); fd.setInMemory(true); // metadata tables are best in memory desc.addFamily(fd); } } hbase.createTable(desc); logger.debug("HTable '" + tableName + "' created"); } finally { hbase.close(); } } public static void deleteTable(HConnection conn, String tableName) throws IOException { HBaseAdmin hbase = new HBaseAdmin(conn); try { if (!tableExists(conn, tableName)) { logger.debug("HTable '" + tableName + "' does not exists"); return; } logger.debug("delete HTable '" + tableName + "'"); if (hbase.isTableEnabled(tableName)) { hbase.disableTable(tableName); } hbase.deleteTable(tableName); logger.debug("HTable '" + tableName + "' deleted"); } finally { hbase.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration2.builder.combined; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import org.apache.commons.configuration2.ConfigurationUtils; import org.apache.commons.configuration2.HierarchicalConfiguration; import org.apache.commons.configuration2.builder.BasicBuilderParameters; import org.apache.commons.configuration2.builder.BuilderParameters; import org.apache.commons.configuration2.builder.ConfigurationBuilder; import org.apache.commons.configuration2.builder.DefaultParametersHandler; import org.apache.commons.configuration2.builder.DefaultParametersManager; /** * <p> * A specialized parameters object for a {@link CombinedConfigurationBuilder}. * </p> * <p> * This class defines methods for setting properties for customizing a builder for combined configurations. Note that * some of these properties can also be set in the configuration definition file. If this is the case, the settings in * the definition file override the content of this object. * </p> * <p> * This class is not thread-safe. It is intended that an instance is constructed and initialized by a single thread * during configuration of a {@code ConfigurationBuilder}. * </p> * * @since 2.0 */ public class CombinedBuilderParametersImpl extends BasicBuilderParameters implements CombinedBuilderProperties<CombinedBuilderParametersImpl> { /** Constant for the key in the parameters map used by this class. */ private static final String PARAM_KEY = RESERVED_PARAMETER_PREFIX + CombinedBuilderParametersImpl.class.getName(); /** The definition configuration builder. */ private ConfigurationBuilder<? extends HierarchicalConfiguration<?>> definitionBuilder; /** A parameters object for the definition configuration builder. */ private BuilderParameters definitionBuilderParameters; /** A map with registered configuration builder providers. */ private final Map<String, ConfigurationBuilderProvider> providers; /** A list with default parameters for child configuration sources. */ private final Collection<BuilderParameters> childParameters; /** The manager for default handlers. */ private DefaultParametersManager childDefaultParametersManager; /** The base path for configuration sources to be loaded. */ private String basePath; /** A flag whether settings should be inherited by child builders. */ private boolean inheritSettings; /** * Creates a new instance of {@code CombinedBuilderParametersImpl}. */ public CombinedBuilderParametersImpl() { providers = new HashMap<>(); childParameters = new LinkedList<>(); inheritSettings = true; } /** * Looks up an instance of this class in the specified parameters map. This is equivalent to * {@code fromParameters(params, false);} * * @param params the map with parameters (must not be <b>null</b> * @return the instance obtained from the map or <b>null</b> * @throws NullPointerException if the map is <b>null</b> */ public static CombinedBuilderParametersImpl fromParameters(final Map<String, ?> params) { return fromParameters(params, false); } /** * Looks up an instance of this class in the specified parameters map and optionally creates a new one if none is found. * This method can be used to obtain an instance of this class which has been stored in a parameters map. It is * compatible with the {@code getParameters()} method. * * @param params the map with parameters (must not be <b>null</b> * @param createIfMissing determines the behavior if no instance is found in the map; if <b>true</b>, a new instance * with default settings is created; if <b>false</b>, <b>null</b> is returned * @return the instance obtained from the map or <b>null</b> * @throws NullPointerException if the map is <b>null</b> */ public static CombinedBuilderParametersImpl fromParameters(final Map<String, ?> params, final boolean createIfMissing) { CombinedBuilderParametersImpl result = (CombinedBuilderParametersImpl) params.get(PARAM_KEY); if (result == null && createIfMissing) { result = new CombinedBuilderParametersImpl(); } return result; } /** * {@inheritDoc} This implementation additionally copies some properties defined by this class. */ @Override public void inheritFrom(final Map<String, ?> source) { super.inheritFrom(source); final CombinedBuilderParametersImpl srcParams = fromParameters(source); if (srcParams != null) { setChildDefaultParametersManager(srcParams.getChildDefaultParametersManager()); setInheritSettings(srcParams.isInheritSettings()); } } /** * Returns the current value of the flag that controls whether the settings of the parent combined configuration builder * should be inherited by its child configurations. * * @return the flag whether settings should be inherited by child configurations */ public boolean isInheritSettings() { return inheritSettings; } @Override public CombinedBuilderParametersImpl setInheritSettings(final boolean inheritSettings) { this.inheritSettings = inheritSettings; return this; } /** * Returns the {@code ConfigurationBuilder} object for obtaining the definition configuration. * * @return the definition {@code ConfigurationBuilder} */ public ConfigurationBuilder<? extends HierarchicalConfiguration<?>> getDefinitionBuilder() { return definitionBuilder; } /** * Sets the {@code ConfigurationBuilder} for the definition configuration. This is the configuration which contains the * configuration sources that form the combined configuration. * * @param builder the definition {@code ConfigurationBuilder} * @return a reference to this object for method chaining */ @Override public CombinedBuilderParametersImpl setDefinitionBuilder(final ConfigurationBuilder<? extends HierarchicalConfiguration<?>> builder) { definitionBuilder = builder; return this; } /** * Registers the given {@code ConfigurationBuilderProvider} for the specified tag name. This means that whenever this * tag is encountered in a configuration definition file, the corresponding builder provider is invoked. * * @param tagName the name of the tag (must not be <b>null</b>) * @param provider the {@code ConfigurationBuilderProvider} (must not be <b>null</b>) * @return a reference to this object for method chaining * @throws IllegalArgumentException if a required parameter is missing */ @Override public CombinedBuilderParametersImpl registerProvider(final String tagName, final ConfigurationBuilderProvider provider) { if (tagName == null) { throw new IllegalArgumentException("Tag name must not be null!"); } if (provider == null) { throw new IllegalArgumentException("Provider must not be null!"); } providers.put(tagName, provider); return this; } /** * Registers all {@code ConfigurationBuilderProvider}s in the given map to this object which have not yet been * registered. This method is mainly used for internal purposes: a {@code CombinedConfigurationBuilder} takes the * providers contained in a parameters object and adds all standard providers. This way it is possible to override a * standard provider by registering a provider object for the same tag name at the parameters object. * * @param providers a map with tag names and corresponding providers (must not be <b>null</b> or contain <b>null</b> * entries) * @return a reference to this object for method chaining * @throws IllegalArgumentException if the map with providers is <b>null</b> or contains <b>null</b> entries */ public CombinedBuilderParametersImpl registerMissingProviders(final Map<String, ConfigurationBuilderProvider> providers) { if (providers == null) { throw new IllegalArgumentException("Map with providers must not be null!"); } for (final Map.Entry<String, ConfigurationBuilderProvider> e : providers.entrySet()) { if (!this.providers.containsKey(e.getKey())) { registerProvider(e.getKey(), e.getValue()); } } return this; } /** * Registers all {@code ConfigurationBuilderProvider}s in the given parameters object which have not yet been * registered. This method works like the method with the same name, but the map with providers is obtained from the * passed in parameters object. * * @param params the parameters object from which to copy providers(must not be <b>null</b>) * @return a reference to this object for method chaining * @throws IllegalArgumentException if the source parameters object is <b>null</b> */ public CombinedBuilderParametersImpl registerMissingProviders(final CombinedBuilderParametersImpl params) { if (params == null) { throw new IllegalArgumentException("Source parameters must not be null!"); } return registerMissingProviders(params.getProviders()); } /** * Returns an (unmodifiable) map with the currently registered {@code ConfigurationBuilderProvider} objects. * * @return the map with {@code ConfigurationBuilderProvider} objects (the keys are the tag names) */ public Map<String, ConfigurationBuilderProvider> getProviders() { return Collections.unmodifiableMap(providers); } /** * Returns the {@code ConfigurationBuilderProvider} which is registered for the specified tag name or <b>null</b> if * there is no registration for this tag. * * @param tagName the tag name * @return the provider registered for this tag or <b>null</b> */ public ConfigurationBuilderProvider providerForTag(final String tagName) { return providers.get(tagName); } /** * Returns the base path for relative names of configuration sources. Result may be <b>null</b> if no base path has been * set. * * @return the base path for resolving relative file names */ public String getBasePath() { return basePath; } /** * Sets the base path for this combined configuration builder. Normally it it not necessary to set the base path * explicitly. Per default, relative file names of configuration sources are resolved based on the location of the * definition file. If this is not desired or if the definition configuration is loaded by a different means, the base * path for relative file names can be specified using this method. * * @param path the base path for resolving relative file names * @return a reference to this object for method chaining */ @Override public CombinedBuilderParametersImpl setBasePath(final String path) { basePath = path; return this; } /** * Returns the parameters object for the definition configuration builder if present. * * @return the parameters object for the definition configuration builder or <b>null</b> */ public BuilderParameters getDefinitionBuilderParameters() { return definitionBuilderParameters; } /** * Sets the parameters object for the definition configuration builder. This property is evaluated only if the * definition configuration builder is not set explicitly (using the {@link #setDefinitionBuilder(ConfigurationBuilder)} * method). In this case, a builder for an XML configuration is created and configured with this parameters object. * * @param params the parameters object for the definition configuration builder * @return a reference to this object for method chaining */ @Override public CombinedBuilderParametersImpl setDefinitionBuilderParameters(final BuilderParameters params) { definitionBuilderParameters = params; return this; } /** * Returns a collection with default parameter objects for child configuration sources. This collection contains the * same objects (in the same order) that were passed to {@code addChildParameters()}. The returned collection is a * defensive copy; it can be modified, but this has no effect on the parameters stored in this object. * * @return a map with default parameters for child sources */ public Collection<? extends BuilderParameters> getDefaultChildParameters() { return new ArrayList<>(childParameters); } /** * Returns the {@code DefaultParametersManager} object for initializing parameter objects for child configuration * sources. This method never returns <b>null</b>. If no manager was set, a new instance is created right now. * * @return the {@code DefaultParametersManager} for child configuration sources */ public DefaultParametersManager getChildDefaultParametersManager() { if (childDefaultParametersManager == null) { childDefaultParametersManager = new DefaultParametersManager(); } return childDefaultParametersManager; } /** * {@inheritDoc} This implementation stores the passed in manager object. An already existing manager object (either * explicitly set or created on demand) is overridden. This also removes all default handlers registered before! */ @Override public CombinedBuilderParametersImpl setChildDefaultParametersManager(final DefaultParametersManager manager) { childDefaultParametersManager = manager; return this; } /** * {@inheritDoc} This implementation registers the passed in handler at an internal {@link DefaultParametersManager} * instance. If none was set, a new instance is created now. */ @Override public <D> CombinedBuilderParametersImpl registerChildDefaultsHandler(final Class<D> paramClass, final DefaultParametersHandler<? super D> handler) { getChildDefaultParametersManager().registerDefaultsHandler(paramClass, handler); return this; } /** * {@inheritDoc} This implementation registers the passed in handler at an internal {@link DefaultParametersManager} * instance. If none was set, a new instance is created now. */ @Override public <D> CombinedBuilderParametersImpl registerChildDefaultsHandler(final Class<D> paramClass, final DefaultParametersHandler<? super D> handler, final Class<?> startClass) { getChildDefaultParametersManager().registerDefaultsHandler(paramClass, handler, startClass); return this; } /** * {@inheritDoc} This implementation returns a map which contains this object itself under a specific key. The static * {@code fromParameters()} method can be used to extract an instance from a parameters map. */ @Override public Map<String, Object> getParameters() { final Map<String, Object> params = super.getParameters(); params.put(PARAM_KEY, this); return params; } /** * {@inheritDoc} This implementation also clones the parameters object for the definition builder if possible. */ @Override public CombinedBuilderParametersImpl clone() { final CombinedBuilderParametersImpl copy = (CombinedBuilderParametersImpl) super.clone(); copy.setDefinitionBuilderParameters((BuilderParameters) ConfigurationUtils.cloneIfPossible(getDefinitionBuilderParameters())); return copy; } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.backend.converters.tostunner; import java.util.ArrayDeque; import java.util.Collection; import java.util.Deque; import java.util.HashSet; import java.util.List; import java.util.Set; import org.kie.workbench.common.stunner.bpmn.backend.converters.TypedFactoryManager; import org.kie.workbench.common.stunner.bpmn.backend.converters.VoidMatch; import org.kie.workbench.common.stunner.core.api.DefinitionManager; import org.kie.workbench.common.stunner.core.command.Command; import org.kie.workbench.common.stunner.core.command.CommandResult; import org.kie.workbench.common.stunner.core.command.impl.DeferredCompositeCommand; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Graph; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.command.EmptyRulesCommandExecutionContext; import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext; import org.kie.workbench.common.stunner.core.graph.command.GraphCommandManager; import org.kie.workbench.common.stunner.core.graph.command.impl.AddChildNodeCommand; import org.kie.workbench.common.stunner.core.graph.command.impl.AddDockedNodeCommand; import org.kie.workbench.common.stunner.core.graph.command.impl.AddNodeCommand; import org.kie.workbench.common.stunner.core.graph.command.impl.GraphCommandFactory; import org.kie.workbench.common.stunner.core.graph.command.impl.UpdateElementPositionCommand; import org.kie.workbench.common.stunner.core.graph.content.Bound; import org.kie.workbench.common.stunner.core.graph.content.Bounds; import org.kie.workbench.common.stunner.core.graph.content.definition.DefinitionSet; import org.kie.workbench.common.stunner.core.graph.content.view.Connection; import org.kie.workbench.common.stunner.core.graph.content.view.ControlPoint; import org.kie.workbench.common.stunner.core.graph.content.view.Point2D; import org.kie.workbench.common.stunner.core.graph.content.view.View; import org.kie.workbench.common.stunner.core.graph.processing.index.map.MapIndexBuilder; import org.kie.workbench.common.stunner.core.rule.RuleManager; import org.kie.workbench.common.stunner.core.rule.RuleViolation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A wrapper for graph command execution, * exposing a simple, method-based API. * <p> * A `GraphBuilder` object issues commands to the canvas while building the graph. * It is a wrapper around * <ul> * <li>GraphCommandExecutionContext</li> * <li>GraphCommandFactory</li> * <li>GraphCommandManager</li> * </ul> * <p> * `GraphBuilder` is used for convenience, to avoid explicitly creating command instances. * It also implements custom logic for some actions. For example, in the case of adding child nodes, * it translates the coordinates of a child node into the new reference system (the parent boundaries). * <p> * `GraphBuilder` builds the entire graph {@link GraphBuilder#buildGraph(BpmnNode)} * once all the conversions have took place: it traverses the entire directed graph described by the `BPMNNode`s * starting from the "root node", which represents the root of the diagram, and visiting * the parent/child relations in each BPMNNode and the `BPMNEdge` they may contain. */ public class GraphBuilder { private static final Logger logger = LoggerFactory.getLogger(GraphBuilder.class); private final GraphCommandExecutionContext executionContext; private final GraphCommandFactory commandFactory; private final GraphCommandManager commandManager; private final Graph<DefinitionSet, Node> graph; public GraphBuilder( Graph<DefinitionSet, Node> graph, DefinitionManager definitionManager, TypedFactoryManager typedFactoryManager, RuleManager ruleManager, GraphCommandFactory commandFactory, GraphCommandManager commandManager) { this.graph = graph; this.executionContext = new EmptyRulesCommandExecutionContext( definitionManager, typedFactoryManager.untyped(), ruleManager, new MapIndexBuilder().build(graph)); this.commandFactory = commandFactory; this.commandManager = commandManager; } /** * Clears the context and then walks the graph root * to draw it on the canvas */ public void render(BpmnNode root) { clearGraph(); buildGraph(root); } /** * Starting from the given root node, * it walks the graph breadth-first and issues * all the required commands to draw it on the canvas */ public void buildGraph(BpmnNode rootNode) { this.addNode(rootNode.value()); rootNode.getEdges().forEach(this::addEdge); List<BpmnNode> nodes = rootNode.getChildren(); Deque<BpmnNode> workingSet = new ArrayDeque<>(prioritized(nodes)); Set<BpmnNode> workedOff = new HashSet<>(); while (!workingSet.isEmpty()) { BpmnNode current = workingSet.pop(); // ensure we visit this node only once if (workedOff.contains(current)) { continue; } workedOff.add(current); workingSet.addAll( prioritized(current.getChildren())); logger.debug("{} :: {}", current.getParent().value().getUUID(), current.value().getUUID()); this.addChildNode(current); current.getEdges().forEach(this::addEdge); } } // make sure that docked nodes are processed *after* its siblings // for compat with drawing routines private Collection<BpmnNode> prioritized(List<BpmnNode> children) { ArrayDeque<BpmnNode> prioritized = new ArrayDeque<>(); for (BpmnNode node : children) { if (node.isDocked()) { prioritized.add(node); } else { prioritized.push(node); } } return prioritized; } private void addDockedNode(Node parent, Node candidate) { AddDockedNodeCommand addNodeCommand = commandFactory.addDockedNode(parent, candidate); execute(addNodeCommand); } private void addChildNode(BpmnNode current) { addChildNode(current.getParent().value(), current.value()); if (!current.isDocked()) { translate( current.value(), current.getParent().value().getContent().getBounds().getUpperLeft()); } } private void addChildNode(Node<? extends View, ?> parent, Node<? extends View, ?> child) { AddChildNodeCommand addChildNodeCommand = commandFactory.addChildNode(parent, child); execute(addChildNodeCommand); } /** * Move node into a new coordinate system with origin in newOrigin. * <p> * E.g., assume origin is currently (0,0), and consider node at (10,11). * If we move node into a new coordinate system where the origin is in (3, 4) * then the new coordinates for node are: (10-3, 11-4) = (7,7) */ private void translate(Node<? extends View, ?> node, Bound newOrigin) { logger.debug("Translating {} from {} into constraints {}", node.getUUID(), node.getContent().getBounds(), newOrigin); Bounds childBounds = node.getContent().getBounds(); double constrainedX = childBounds.getUpperLeft().getX() - newOrigin.getX(); double constrainedY = childBounds.getUpperLeft().getY() - newOrigin.getY(); Point2D coords = Point2D.create(constrainedX, constrainedY); updatePosition(node, coords); } private void updatePosition(Node node, Point2D position) { UpdateElementPositionCommand updateElementPositionCommand = commandFactory.updatePosition(node, position); execute(updateElementPositionCommand); } private void addNode(Node node) { AddNodeCommand addNodeCommand = commandFactory.addNode(node); execute(addNodeCommand); } @SuppressWarnings("unchecked") private void addEdge( Edge<? extends View<?>, Node> edge, Node source, Connection sourceConnection, List<Point2D> controlPoints, Node target, Connection targetConnection) { final DeferredCompositeCommand.Builder<GraphCommandExecutionContext, RuleViolation> commandBuilder = new DeferredCompositeCommand.Builder<>(); addConnector(commandBuilder, source, edge, sourceConnection); final ControlPoint[] cps = new ControlPoint[controlPoints.size()]; for (int i = 0; i < cps.length; i++) { final ControlPoint cp = ControlPoint.build(controlPoints.get(i)); addControlPoint(commandBuilder, edge, cp, i); } setTargetNode(commandBuilder, target, edge, targetConnection); execute(commandBuilder.build()); } private void addConnector(final DeferredCompositeCommand.Builder<GraphCommandExecutionContext, RuleViolation> commandBuilder, final Node<? extends View<?>, Edge> sourceNode, final Edge<? extends View<?>, Node> edge, final Connection connection) { commandBuilder.deferCommand(() -> commandFactory.addConnector(sourceNode, edge, connection)); } private void setTargetNode(final DeferredCompositeCommand.Builder<GraphCommandExecutionContext, RuleViolation> commandBuilder, final Node<? extends View<?>, Edge> targetNode, final Edge<? extends View<?>, Node> edge, final Connection connection) { commandBuilder.deferCommand(() -> commandFactory.setTargetNode(targetNode, edge, connection)); } private void addControlPoint(final DeferredCompositeCommand.Builder<GraphCommandExecutionContext, RuleViolation> commandBuilder, final Edge edge, final ControlPoint controlPoint, final int index) { commandBuilder.deferCommand(() -> commandFactory.addControlPoint(edge, controlPoint, index)); } private CommandResult<RuleViolation> execute(Command<GraphCommandExecutionContext, RuleViolation> command) { return commandManager.execute(executionContext, command); } private CommandResult<RuleViolation> clearGraph() { return commandManager.execute(executionContext, commandFactory.clearGraph()); } private void addEdge(BpmnEdge edge) { VoidMatch.of(BpmnEdge.class) .when(BpmnEdge.Simple.class, e -> addEdge(e.getEdge(), e.getSource().value(), e.getSourceConnection(), e.getControlPoints(), e.getTarget().value(), e.getTargetConnection()) ) .when(BpmnEdge.Docked.class, e -> addDockedNode(e.getSource().value(), e.getTarget().value()) ).apply(edge); } }
/* * Copyright (C) 2012 Jake Wharton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.viewpagerindicator; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.support.v4.view.MotionEventCompat; import android.support.v4.view.ViewConfigurationCompat; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import net.ib.ota.R; /** * Draws a line for each page. The current page line is colored differently * than the unselected page lines. */ public class UnderlinePageIndicator extends View implements PageIndicator { private static final int INVALID_POINTER = -1; private static final int FADE_FRAME_MS = 30; private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private boolean mFades; private int mFadeDelay; private int mFadeLength; private int mFadeBy; private ViewPager mViewPager; private ViewPager.OnPageChangeListener mListener; private int mScrollState; private int mCurrentPage; private float mPositionOffset; private int mTouchSlop; private float mLastMotionX = -1; private int mActivePointerId = INVALID_POINTER; private boolean mIsDragging; private final Runnable mFadeRunnable = new Runnable() { @Override public void run() { if (!mFades) return; final int alpha = Math.max(mPaint.getAlpha() - mFadeBy, 0); mPaint.setAlpha(alpha); invalidate(); if (alpha > 0) { postDelayed(this, FADE_FRAME_MS); } } }; public UnderlinePageIndicator(Context context) { this(context, null); } public UnderlinePageIndicator(Context context, AttributeSet attrs) { this(context, attrs, R.attr.vpiUnderlinePageIndicatorStyle); } public UnderlinePageIndicator(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); if (isInEditMode()) return; final Resources res = getResources(); //Load defaults from resources final boolean defaultFades = res.getBoolean(R.bool.default_underline_indicator_fades); final int defaultFadeDelay = res.getInteger(R.integer.default_underline_indicator_fade_delay); final int defaultFadeLength = res.getInteger(R.integer.default_underline_indicator_fade_length); final int defaultSelectedColor = res.getColor(R.color.default_underline_indicator_selected_color); //Retrieve styles attributes TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.UnderlinePageIndicator, defStyle, 0); setFades(a.getBoolean(R.styleable.UnderlinePageIndicator_fades, defaultFades)); setSelectedColor(a.getColor(R.styleable.UnderlinePageIndicator_selectedColor, defaultSelectedColor)); setFadeDelay(a.getInteger(R.styleable.UnderlinePageIndicator_fadeDelay, defaultFadeDelay)); setFadeLength(a.getInteger(R.styleable.UnderlinePageIndicator_fadeLength, defaultFadeLength)); Drawable background = a.getDrawable(R.styleable.UnderlinePageIndicator_android_background); if (background != null) { setBackgroundDrawable(background); } a.recycle(); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration); } public boolean getFades() { return mFades; } public void setFades(boolean fades) { if (fades != mFades) { mFades = fades; if (fades) { post(mFadeRunnable); } else { removeCallbacks(mFadeRunnable); mPaint.setAlpha(0xFF); invalidate(); } } } public int getFadeDelay() { return mFadeDelay; } public void setFadeDelay(int fadeDelay) { mFadeDelay = fadeDelay; } public int getFadeLength() { return mFadeLength; } public void setFadeLength(int fadeLength) { mFadeLength = fadeLength; mFadeBy = 0xFF / (mFadeLength / FADE_FRAME_MS); } public int getSelectedColor() { return mPaint.getColor(); } public void setSelectedColor(int selectedColor) { mPaint.setColor(selectedColor); invalidate(); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mViewPager == null) { return; } final int count = mViewPager.getAdapter().getCount(); if (count == 0) { return; } if (mCurrentPage >= count) { setCurrentItem(count - 1); return; } final int paddingLeft = getPaddingLeft(); final float pageWidth = (getWidth() - paddingLeft - getPaddingRight()) / (1f * count); final float left = paddingLeft + pageWidth * (mCurrentPage + mPositionOffset); final float right = left + pageWidth; final float top = getPaddingTop(); final float bottom = getHeight() - getPaddingBottom(); canvas.drawRect(left, top, right, bottom, mPaint); } public boolean onTouchEvent(MotionEvent ev) { if (super.onTouchEvent(ev)) { return true; } if ((mViewPager == null) || (mViewPager.getAdapter().getCount() == 0)) { return false; } final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; switch (action) { case MotionEvent.ACTION_DOWN: mActivePointerId = MotionEventCompat.getPointerId(ev, 0); mLastMotionX = ev.getX(); break; case MotionEvent.ACTION_MOVE: { final int activePointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final float deltaX = x - mLastMotionX; if (!mIsDragging) { if (Math.abs(deltaX) > mTouchSlop) { mIsDragging = true; } } if (mIsDragging) { mLastMotionX = x; if (mViewPager.isFakeDragging() || mViewPager.beginFakeDrag()) { mViewPager.fakeDragBy(deltaX); } } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: if (!mIsDragging) { final int count = mViewPager.getAdapter().getCount(); final int width = getWidth(); final float halfWidth = width / 2f; final float sixthWidth = width / 6f; if ((mCurrentPage > 0) && (ev.getX() < halfWidth - sixthWidth)) { if (action != MotionEvent.ACTION_CANCEL) { mViewPager.setCurrentItem(mCurrentPage - 1); } return true; } else if ((mCurrentPage < count - 1) && (ev.getX() > halfWidth + sixthWidth)) { if (action != MotionEvent.ACTION_CANCEL) { mViewPager.setCurrentItem(mCurrentPage + 1); } return true; } } mIsDragging = false; mActivePointerId = INVALID_POINTER; if (mViewPager.isFakeDragging()) mViewPager.endFakeDrag(); break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); mLastMotionX = MotionEventCompat.getX(ev, index); mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); } mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } return true; } @Override public void setViewPager(ViewPager viewPager) { if (mViewPager == viewPager) { return; } if (mViewPager != null) { //Clear us from the old pager. mViewPager.setOnPageChangeListener(null); } if (viewPager.getAdapter() == null) { throw new IllegalStateException("ViewPager does not have adapter instance."); } mViewPager = viewPager; mViewPager.setOnPageChangeListener(this); invalidate(); post(new Runnable() { @Override public void run() { if (mFades) { post(mFadeRunnable); } } }); } @Override public void setViewPager(ViewPager view, int initialPosition) { setViewPager(view); setCurrentItem(initialPosition); } @Override public void setCurrentItem(int item) { if (mViewPager == null) { throw new IllegalStateException("ViewPager has not been bound."); } mViewPager.setCurrentItem(item); mCurrentPage = item; invalidate(); } @Override public void notifyDataSetChanged() { invalidate(); } @Override public void onPageScrollStateChanged(int state) { mScrollState = state; if (mListener != null) { mListener.onPageScrollStateChanged(state); } } @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { mCurrentPage = position; mPositionOffset = positionOffset; if (mFades) { if (positionOffsetPixels > 0) { removeCallbacks(mFadeRunnable); mPaint.setAlpha(0xFF); } else if (mScrollState != ViewPager.SCROLL_STATE_DRAGGING) { postDelayed(mFadeRunnable, mFadeDelay); } } invalidate(); if (mListener != null) { mListener.onPageScrolled(position, positionOffset, positionOffsetPixels); } } @Override public void onPageSelected(int position) { if (mScrollState == ViewPager.SCROLL_STATE_IDLE) { mCurrentPage = position; mPositionOffset = 0; invalidate(); mFadeRunnable.run(); } if (mListener != null) { mListener.onPageSelected(position); } } @Override public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) { mListener = listener; } @Override public void onRestoreInstanceState(Parcelable state) { SavedState savedState = (SavedState)state; super.onRestoreInstanceState(savedState.getSuperState()); mCurrentPage = savedState.currentPage; requestLayout(); } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState savedState = new SavedState(superState); savedState.currentPage = mCurrentPage; return savedState; } static class SavedState extends BaseSavedState { int currentPage; public SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); currentPage = in.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(currentPage); } @SuppressWarnings("UnusedDeclaration") public static final Creator<SavedState> CREATOR = new Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.compiler.backwardRefs; import com.intellij.ProjectTopics; import com.intellij.compiler.CompilerConfiguration; import com.intellij.compiler.CompilerReferenceService; import com.intellij.compiler.backwardRefs.view.DirtyScopeTestInfo; import com.intellij.compiler.server.CustomBuilderMessageHandler; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.compiler.options.ExcludeEntryDescription; import com.intellij.openapi.compiler.options.ExcludedEntriesListener; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.ModuleRootListener; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import org.jetbrains.jps.backwardRefs.BackwardReferenceIndexBuilder; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class DirtyScopeHolder extends UserDataHolderBase { private final CompilerReferenceServiceImpl myService; private final FileDocumentManager myFileDocManager; private final PsiDocumentManager myPsiDocManager; private final Object myLock = new Object(); private final Set<Module> myVFSChangedModules = ContainerUtil.newHashSet(); // guarded by myLock private final Set<Module> myChangedModulesDuringCompilation = ContainerUtil.newHashSet(); // guarded by myLock private final List<ExcludeEntryDescription> myExcludedDescriptions = new SmartList<>(); // guarded by myLock private boolean myCompilationPhase; // guarded by myLock private volatile GlobalSearchScope myExcludedFilesScope; // calculated outside myLock private final Set<String> myCompilationAffectedModules = ContainerUtil.newConcurrentSet(); // used outside myLock public DirtyScopeHolder(@NotNull CompilerReferenceServiceImpl service, FileDocumentManager fileDocumentManager, PsiDocumentManager psiDocumentManager){ myService = service; myFileDocManager = fileDocumentManager; myPsiDocManager = psiDocumentManager; if (CompilerReferenceService.isEnabled()) { final MessageBusConnection connect = service.getProject().getMessageBus().connect(); connect.subscribe(ExcludedEntriesListener.TOPIC, new ExcludedEntriesListener() { @Override public void onEntryAdded(@NotNull ExcludeEntryDescription description) { synchronized (myLock) { if (myCompilationPhase) { myExcludedDescriptions.add(description); } } } }); connect.subscribe(CustomBuilderMessageHandler.TOPIC, (builderId, messageType, messageText) -> { if (BackwardReferenceIndexBuilder.BUILDER_ID.equals(builderId)) { myCompilationAffectedModules.add(messageText); } }); connect.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() { @Override public void beforeRootsChange(ModuleRootEvent event) { final Module[] modules = ModuleManager.getInstance(myService.getProject()).getModules(); synchronized (myLock) { ContainerUtil.addAll(myVFSChangedModules, modules); } } }); } } void compilerActivityStarted() { final ExcludeEntryDescription[] excludeEntryDescriptions = CompilerConfiguration.getInstance(myService.getProject()).getExcludedEntriesConfiguration().getExcludeEntryDescriptions(); synchronized (myLock) { myCompilationPhase = true; Collections.addAll(myExcludedDescriptions, excludeEntryDescriptions); myExcludedFilesScope = null; myCompilationAffectedModules.clear(); } } void upToDateChecked(boolean isUpToDate) { final Module[] modules = ReadAction.compute(() -> { final Project project = myService.getProject(); if (project.isDisposed()) { return null; } return ModuleManager.getInstance(project).getModules(); }); if (modules == null) return; compilationFinished(() -> { if (!isUpToDate) { ContainerUtil.addAll(myVFSChangedModules, modules); } }); } void compilerActivityFinished() { final List<Module> compiledModules = ReadAction.compute(() -> { final Project project = myService.getProject(); if (project.isDisposed()) { return null; } final ModuleManager moduleManager = ModuleManager.getInstance(myService.getProject()); return myCompilationAffectedModules.stream().map(moduleManager::findModuleByName).collect(Collectors.toList()); }); compilationFinished(() -> { if (compiledModules == null) return; myVFSChangedModules.removeAll(compiledModules); }); } private void compilationFinished(Runnable action) { ExcludeEntryDescription[] descriptions; synchronized (myLock) { myCompilationPhase = false; action.run(); myVFSChangedModules.addAll(myChangedModulesDuringCompilation); myChangedModulesDuringCompilation.clear(); descriptions = myExcludedDescriptions.toArray(new ExcludeEntryDescription[myExcludedDescriptions.size()]); myExcludedDescriptions.clear(); } myCompilationAffectedModules.clear(); myExcludedFilesScope = ExcludedFromCompileFilesUtil.getExcludedFilesScope(descriptions, myService.getFileTypes(), myService.getProject(), myService.getFileIndex()); } GlobalSearchScope getDirtyScope() { final Project project = myService.getProject(); return ReadAction.compute(() -> { synchronized (myLock) { if (myCompilationPhase) { return GlobalSearchScope.allScope(project); } if (project.isDisposed()) throw new ProcessCanceledException(); return CachedValuesManager.getManager(project).getCachedValue(this, () -> CachedValueProvider.Result .create(calculateDirtyScope(), PsiModificationTracker.MODIFICATION_COUNT, VirtualFileManager.getInstance(), myService)); } }); } private GlobalSearchScope calculateDirtyScope() { final Set<Module> dirtyModules = getAllDirtyModules(); if (dirtyModules.isEmpty()) return myExcludedFilesScope; GlobalSearchScope dirtyModuleScope = GlobalSearchScope.union(dirtyModules .stream() .map(Module::getModuleWithDependentsScope) .toArray(GlobalSearchScope[]::new)); return dirtyModuleScope.union(myExcludedFilesScope); } @NotNull Set<Module> getAllDirtyModules() { final Set<Module> dirtyModules = new THashSet<>(myVFSChangedModules); for (Document document : myFileDocManager.getUnsavedDocuments()) { final VirtualFile file = myFileDocManager.getFile(document); if (file == null) continue; final Module m = getModuleForSourceContentFile(file); if (m != null) dirtyModules.add(m); } for (Document document : myPsiDocManager.getUncommittedDocuments()) { final PsiFile psiFile = myPsiDocManager.getPsiFile(document); if (psiFile == null) continue; final VirtualFile file = psiFile.getVirtualFile(); if (file == null) continue; final Module m = getModuleForSourceContentFile(file); if (m != null) dirtyModules.add(m); } return dirtyModules; } boolean contains(VirtualFile file) { return getDirtyScope().contains(file); } void installVFSListener() { VirtualFileManager.getInstance().addVirtualFileListener(new VirtualFileListener() { @Override public void fileCreated(@NotNull VirtualFileEvent event) { fileChanged(event.getFile()); } @Override public void fileCopied(@NotNull VirtualFileCopyEvent event) { fileChanged(event.getFile()); } @Override public void fileMoved(@NotNull VirtualFileMoveEvent event) { fileChanged(event.getFile()); } @Override public void beforePropertyChange(@NotNull VirtualFilePropertyEvent event) { if (VirtualFile.PROP_NAME.equals(event.getPropertyName()) && event.getFile().isDirectory() && event.getFile().isInLocalFileSystem()) { final String path = event.getFile().getPath(); for (Module module : ModuleManager.getInstance(myService.getProject()).getModules()) { if (FileUtil.isAncestor(path, module.getModuleFilePath(), true)) { addToDirtyModules(module); } } } } @Override public void propertyChanged(@NotNull VirtualFilePropertyEvent event) { if (VirtualFile.PROP_NAME.equals(event.getPropertyName()) || VirtualFile.PROP_SYMLINK_TARGET.equals(event.getPropertyName())) { fileChanged(event.getFile()); } } @Override public void beforeContentsChange(@NotNull VirtualFileEvent event) { fileChanged(event.getFile()); } @Override public void beforeFileDeletion(@NotNull VirtualFileEvent event) { fileChanged(event.getFile()); } @Override public void beforeFileMovement(@NotNull VirtualFileMoveEvent event) { fileChanged(event.getFile()); } private void fileChanged(VirtualFile file) { final Module module = getModuleForSourceContentFile(file); if (module != null) { addToDirtyModules(module); } } private void addToDirtyModules(Module module) { synchronized (myLock) { if (myCompilationPhase) { myChangedModulesDuringCompilation.add(module); } else { myVFSChangedModules.add(module); } } } }, myService.getProject()); } private Module getModuleForSourceContentFile(@NotNull VirtualFile file) { if (myService.getFileTypes().contains(file.getFileType()) && myService.getFileIndex().isInSourceContent(file)) { return myService.getFileIndex().getModuleForFile(file); } return null; } @TestOnly @NotNull public Set<Module> getAllDirtyModulesForTest() { synchronized (myLock) { return getAllDirtyModules(); } } @SuppressWarnings("unchecked") @NotNull DirtyScopeTestInfo getState() { synchronized (myLock) { final Module[] vfsChangedModules = myVFSChangedModules.toArray(Module.EMPTY_ARRAY); final List<Module> unsavedChangedModuleList = new ArrayList<>(getAllDirtyModules()); ContainerUtil.removeAll(unsavedChangedModuleList, vfsChangedModules); final Module[] unsavedChangedModules = unsavedChangedModuleList.toArray(Module.EMPTY_ARRAY); final List<VirtualFile> excludedFiles = myExcludedFilesScope instanceof Iterable ? ContainerUtil.newArrayList((Iterable<VirtualFile>)myExcludedFilesScope) : Collections.emptyList(); return new DirtyScopeTestInfo(vfsChangedModules, unsavedChangedModules, excludedFiles.toArray(VirtualFile.EMPTY_ARRAY), getDirtyScope()); } } }
/* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2003 The JAT Project. All rights reserved. * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement, version 1.3 or later. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the NASA Goddard * Space Flight Center at opensource@gsfc.nasa.gov. * */ package jat.attitude.eom; import jat.matvec.data.*; import jat.plot.*; import jat.alg.integrators.*; import jat.attitude.QuatToDeg; /** * This class contains the equations of motion for the simulation * of a rigid spacecraft subjected to gravity gradient torque * while it orbit around the earth.The spacecraft has a spherical * damper for stabilization. * * The class implements jat.alg.integrators.Derivatives and * jat.alg.integrateors.Printble, so an outside application code * can perform a numerical simulation of equations of motion * defined in this class and get the output from the simulation. * * @author Noriko Takada * Modification since the last version * Switched to the interface EquationsOfMotion * */ public class RSphericalDamper implements EquationsOfMotion { // time_step: Time step of numerical integration // quat_values[][] Two dimensional array that contains quarternions from simulation // I1,I2,I3 Principal moments of inertia about 1,2, and 3 axes // c Damping coefficient // j Spherical damper inertia // rotation_plot Plots of angular velocities // damper_rotation_plot Plots of angular velocities of spherical damper // angle_plot Plots of euler angles // quarternion_check Plot of e1^2 + e2^2 + e3^2 +e4^2 double time_step; private float quat_values[][]; // Create variables for the necessary plots private ThreePlots angular_velocity_plot = new ThreePlots(); private ThreePlots damper_rotation_plot = new ThreePlots(); private ThreePlots euler_angle_plot = new ThreePlots(); private SinglePlot quarternion_check = new SinglePlot(); private SinglePlot angular_momentum = new SinglePlot(); private FourPlots quaternion_plot = new FourPlots(); private double I1 = 2000;//10.42; // spacecraft inertia private double I2 = 1500;//35.42; private double I3 = 1000;//41.67; private double c = 30; // damping coefficient private double j = 18; // spherical damper inertia public static final int GG_YES = 1; public static final int GG_NO = 0; private int gravity_gradient = 0; /** * Constructor 1 * @param time_step: Time step of numerical integration * @param quat_values[][] Two dimensional array that contains quarternions from simulation * @param I1 Principle moment of inertia about 1 axis * @param I2 Principle moment of inertia about 2 axis * @param I3 Principle moment of inertia about 3 axis * @param c Damping coefficient * @param j Spherical damper inertia */ public RSphericalDamper(double time_step, double I1, double I2, double I3, double c, double j, float quat_values[][]) { setupPlots(); this.time_step = time_step; this.quat_values = quat_values; this.I1 = I1; this.I2 = I2; this.I3 = I3; this.c = c; this.j = j; } /** * Constructor 2 * @param time_step: Time step of numerical integration * @param quat_values[][] Two dimensional array that contains quarternions from simulation */ public RSphericalDamper(double time_step, float quat_values[][]) { setupPlots(); this.time_step = time_step; this.quat_values = quat_values; } ///** // * Apply or Remove Gracity Gradient on a spacecraft // * @param a (int) GG_YES = 1: Applies gravity gradient torque assming a circular orbit // * GG_NO = 0: No gravity gradient, simulation in seconds // */ //public void setGravityGradient(int a) //{ // gravity_gradient = a; //} /** * setupPlots() sets up Plots */ void setupPlots() { // Setup plots angular_velocity_plot.setTitle("Angular Velocities"); angular_velocity_plot.topPlot.setXLabel("t(sec)"); angular_velocity_plot.topPlot.setYLabel("w1"); angular_velocity_plot.middlePlot.setXLabel("t(sec)"); angular_velocity_plot.middlePlot.setYLabel("w2"); angular_velocity_plot.bottomPlot.setXLabel("t(sec)"); angular_velocity_plot.bottomPlot.setYLabel("w3"); damper_rotation_plot.setTitle("Damper/Spacecraft relative rates"); damper_rotation_plot.topPlot.setXLabel("t (sec)"); damper_rotation_plot.topPlot.setYLabel("Sigma1"); damper_rotation_plot.middlePlot.setXLabel("t (sec)"); damper_rotation_plot.middlePlot.setYLabel("Sigma2 "); damper_rotation_plot.bottomPlot.setXLabel("t (sec)"); damper_rotation_plot.bottomPlot.setYLabel("Sigma3"); euler_angle_plot.setTitle("Euler Angles"); euler_angle_plot.topPlot.setXLabel("t(sec)"); euler_angle_plot.topPlot.setYLabel("Theta(degrees)"); euler_angle_plot.middlePlot.setXLabel("t(sec)"); euler_angle_plot.middlePlot.setYLabel("Psi(degrees)"); euler_angle_plot.bottomPlot.setXLabel("t(sec)"); euler_angle_plot.bottomPlot.setYLabel("Phi(degrees)"); quarternion_check.setTitle("Quarternion Check"); quarternion_check.plot.setXLabel("t(sec)"); quarternion_check.plot.setYLabel("e1^2 + e2^2 + e3^2 + e4^2"); angular_momentum.setTitle("Angular Momentum"); angular_momentum.plot.setXLabel("t (sec)"); angular_momentum.plot.setYLabel("Angular Momentum"); quaternion_plot.setTitle("Quarternions"); quaternion_plot.firstPlot.setXLabel("t (sec)"); quaternion_plot.firstPlot.setYLabel("q1"); quaternion_plot.secondPlot.setXLabel("t (sec)"); quaternion_plot.secondPlot.setYLabel("q2"); quaternion_plot.thirdPlot.setXLabel("t (sec) "); quaternion_plot.thirdPlot.setYLabel("q3"); quaternion_plot.fourthPlot.setXLabel("t (sec)"); quaternion_plot.fourthPlot.setYLabel("q4"); } /** Compute the derivatives. * Equations of Motion * @params t double containing time or the independent variable. * @params x VectorN containing the required data. * @return double [] containing the derivatives. */ public double[] derivs(double t, double[] x) { double c11 = 1- 2*( x[4]*x[4] + x[5]*x[5]); double c21 = 2* (x[3]*x[4]-x[5]*x[6]); double c31 = 2* (x[3]*x[5]+x[4]*x[6]); // definition double w1 = x[0]; //nondimensionalized double w2 = x[1]; //nondimensionalized double w3 = x[2]; //nondimensionalized double q1 = x[3]; //nondimensionalized double q2 = x[4]; //nondimensionalized double q3 = x[5]; //nondimensionalized double q4 = x[6]; //nondimensionalized double sigma1 = x[7]; //nondimensionalized double sigma2 = x[8]; //nondimensionalized double sigma3 = x[9]; //nondimensionalized double [] out = new double[10]; if (gravity_gradient == 1) { //out[0] = 2*Math.PI*((I2-I3)/I1)* (x[1]*x[2] - 3*c21*c31) - 2*Math.PI*(c/I1)*(x[0]-x[7]); //out[1] = 2*Math.PI*((I3-I1)/I2)* (x[0]*x[2] - 3*c31*c11) - 2*Math.PI*(c/I2)*(x[1]-x[8]); //out[2] = 2*Math.PI*((I1-I2)/I3)* (x[0]*x[1] - 3*c11*c21) - 2*Math.PI*(c/I3)*(x[2]-x[9]); //out[3] = -Math.PI* (-(x[2]+1)*x[4] + x[1]*x[5] - x[0]*x[6]); //out[4] = -Math.PI* ((x[2]+1)*x[3] - x[0]*x[5] - x[1]*x[6]); //out[5] = -Math.PI* (-(x[2]-1)*x[6] + x[0]*x[4] - x[1]*x[3]); //out[6] = -Math.PI* ((x[2]-1)*x[5] + x[1]*x[4] + x[0]*x[3]); //out[7] = 2*Math.PI*(c/j)*(x[0]-x[7]) -2*Math.PI*(x[1]*x[9] - x[2]*x[8]); //out[8] = 2*Math.PI*(c/j)*(x[1]-x[8]) -2*Math.PI*(x[2]*x[7] - x[0]*x[9]); //out[9] = 2*Math.PI*(c/j)*(x[2]-x[9]) -2*Math.PI*(x[0]*x[8] - x[1]*x[7]); } else if (gravity_gradient==0) { out[0] = ((I2-I3)/(I1-j))*w2*w3 +(c/(I1-j))*sigma1; //+ (c/(I1-j))*(sigma1); out[1] = ((I3-I1)/(I2-j))*w1*w3 +(c/(I2-j))*sigma2; //- (c/(I2-j))*(sigma2); out[2] = ((I1-I2)/(I3-j))*w1*w2 +(c/(I3-j))*sigma3; //- (c/(I3-j))*(sigma3); out[3] = -0.5* (-x[2]*x[4] + x[1]*x[5] - x[0]*x[6]); out[4] = -0.5* (x[2]*x[3] - x[0]*x[5] - x[1]*x[6]); out[5] = -0.5* (-x[2]*x[6] + x[0]*x[4] - x[1]*x[3]); out[6] = -0.5* (x[2]*x[5] + x[1]*x[4] + x[0]*x[3]); out[7] = -out[0] - (c/j)*(sigma1)- w2*sigma3 + w3*sigma2; out[8] = -out[1] - (c/j)*(sigma2)- w3*sigma1 + w1*sigma3; out[9] = -out[2] - (c/j)*(sigma3)- w1*sigma2 + w2*sigma1; } return out; }// End of derivs /** Implements the Printable interface to get the data out of the propagator and pass it to the plot. * This method is executed by the propagator at each integration step. * @param t Time. * @param y Data array. */ public void print(double t, double [] y) { boolean first = true; if (t == 0.0) first = false; int currentPts = (int)(t/time_step); // This is the array index System.out.println(t+" "+y[0]+" "+y[1]+" "+first); // Define state variables double w1 = y[0]; double w2 = y[1]; double w3 = y[2]; double q1 = y[3]; double q2 = y[4]; double q3 = y[5]; double q4 = y[6]; double sigma1 = y[7]; double sigma2 = y[8]; double sigma3 = y[9]; QuatToDeg tester = new QuatToDeg(q1, q2, q3, q4); double[] angle = new double[3]; angle = tester.calculateAngle(); double Theta = angle[0]; double Psi = angle[1]; double Phi = angle[2]; double quat_check = q1*q1+q2*q2+q3*q3+q4*q4; double angMomentum = Math.sqrt((I1*w1+ j*(sigma1))*(I1*w1+ j*(sigma1))+ (I2*w2 + j*(sigma2))*(I2*w2 + j*(sigma2))+ (I3*w3 + j*(sigma3))*(I3*w3 + j*(sigma3))); //double angMomentum = Math.sqrt((I1*w1+j*Alpha)*(I1*w1+j*Alpha)+(I2*w2+j*Beta)*(I2*w2+j*Beta)+(I3*w3 + j*Gamma)*(I3*w3 + j*Gamma)); // Calculate Transformation matrix elements // Transform from A to B see Bong Wie (p.318) double c11 = 1- 2*(q2*q2 + q3*q3); double c12 = 2* (q1*q2 + q3*q4); double c13 = 2* (q1*q3 - q2*q4); double c21 = 2* (q2*q1 - q3*q4); double c22 = 1- 2*(q3*q3 + q1*q1); double c23 = 2* (q2*q3 + q1*q4); double c31 = 2* (q3*q1 + q2*q4); double c32 = 2* (q3*q2 - q1*q4); double c33 = 1- 2*(q1*q1 + q2*q2); // Build Transformation Matrix double[][] array = new double[3][3]; // Row1 array[0][0] = c11; array[0][1] = c12; array[0][2] = c13; // Row2 array[1][0] = c21; array[1][1] = c22; array[1][2] = c23; // Row3 array[2][0] = c31; array[2][1] = c32; array[2][2] = c33; //Construct Transformation Matrix Matrix T = new Matrix(array,3,3); Matrix T_transpose = new Matrix(T.transpose().A, 3,3); // add data point to the plot angular_velocity_plot.topPlot.addPoint(0, t,y[0], first); angular_velocity_plot.middlePlot.addPoint(0, t, y[1], first); angular_velocity_plot.bottomPlot.addPoint(0, t, y[2], first); damper_rotation_plot.topPlot.addPoint(0, t, y[7], first); damper_rotation_plot.middlePlot.addPoint(0, t, y[8], first); damper_rotation_plot.bottomPlot.addPoint(0, t, y[9], first); euler_angle_plot.topPlot.addPoint(0, t, Theta, first); euler_angle_plot.middlePlot.addPoint(0, t, Psi, first); euler_angle_plot.bottomPlot.addPoint(0, t, Phi, first); quarternion_check.plot.addPoint(0, t, quat_check, first); angular_momentum.plot.addPoint(0, t, angMomentum, first); quaternion_plot.firstPlot.addPoint(0, t, q1, first); quaternion_plot.secondPlot.addPoint(0, t, q2, first); quaternion_plot.thirdPlot.addPoint(0, t, q3, first); quaternion_plot.fourthPlot.addPoint(0, t, q4, first); // Store quarternion values for use in animation quat_values[0][currentPts] = (float)t; // time value quat_values[1][currentPts] = (float)q1; // quaternion 1 quat_values[2][currentPts] = (float)q2; // quarternion 2 quat_values[3][currentPts] = (float)q3; // quarternion 3 quat_values[4][currentPts] = (float)q4; // quarternion 4 } /** * Return the quarternion values after simulation * @author Noriko Takada */ public float[][] getQuaternion() { return quat_values; } /** * Make the plots visible after simulation * @author Noriko Takada */ public void makePlotsVisible() { angular_velocity_plot.setVisible(true); damper_rotation_plot.setVisible(true); euler_angle_plot.setVisible(true); quarternion_check.setVisible(true); angular_momentum.setVisible(true); quaternion_plot.setVisible(true); } /** Runs the example. * @param args Arguments. */ public static void main(String[] args) { double time_step=0.1; double timeDuration=10; double tf = 20; double t0 = 0.0; RungeKutta8 rk8 = new RungeKutta8(time_step); timeDuration=tf; // Duration of the simulation time is the same as the final time int numberOfPts = (int)(timeDuration/time_step) +1 ; float quat_values[][]= new float[5][numberOfPts+1];// +1 is for AnimationWindow // create an instance RSphericalDamper si = new RSphericalDamper(time_step, quat_values); // initialize the variables double [] x0 = new double[10]; x0[0] = 0.1224; x0[1] = 0.0; x0[2] = 2.99; x0[3] = 0.0; x0[4] = 0.0; x0[5] = 0.0; x0[6] = 1.0; x0[7] = 0.0; x0[8] = 0.0; x0[9] = 0.0; // integrate the equations rk8.integrate(t0, x0, tf, si, true); // make the plot visible si.makePlotsVisible(); } }// End of File
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.service; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.JdkSSLOptions; import com.datastax.driver.core.Metadata; import com.datastax.driver.core.ProtocolOptions; import com.datastax.driver.core.Session; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnDisabled; import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.authentication.exception.ProviderCreationException; import org.apache.nifi.cassandra.CassandraSessionProviderService; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.controller.ControllerServiceInitializationContext; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.security.util.SslContextFactory; import org.apache.nifi.ssl.SSLContextService; import javax.net.ssl.SSLContext; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @Tags({"cassandra", "dbcp", "database", "connection", "pooling"}) @CapabilityDescription("Provides connection session for Cassandra processors to work with Apache Cassandra.") public class CassandraSessionProvider extends AbstractControllerService implements CassandraSessionProviderService { public static final int DEFAULT_CASSANDRA_PORT = 9042; // Common descriptors public static final PropertyDescriptor CONTACT_POINTS = new PropertyDescriptor.Builder() .name("Cassandra Contact Points") .description("Contact points are addresses of Cassandra nodes. The list of contact points should be " + "comma-separated and in hostname:port format. Example node1:port,node2:port,...." + " The default client port for Cassandra is 9042, but the port(s) must be explicitly specified.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.HOSTNAME_PORT_LIST_VALIDATOR) .build(); public static final PropertyDescriptor KEYSPACE = new PropertyDescriptor.Builder() .name("Keyspace") .description("The Cassandra Keyspace to connect to. If no keyspace is specified, the query will need to " + "include the keyspace name before any table reference, in case of 'query' native processors or " + "if the processor supports the 'Table' property, the keyspace name has to be provided with the " + "table name in the form of <KEYSPACE>.<TABLE>") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor PROP_SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder() .name("SSL Context Service") .description("The SSL Context Service used to provide client certificate information for TLS/SSL " + "connections.") .required(false) .identifiesControllerService(SSLContextService.class) .build(); public static final PropertyDescriptor CLIENT_AUTH = new PropertyDescriptor.Builder() .name("Client Auth") .description("Client authentication policy when connecting to secure (TLS/SSL) cluster. " + "Possible values are REQUIRED, WANT, NONE. This property is only used when an SSL Context " + "has been defined and enabled.") .required(false) .allowableValues(SSLContextService.ClientAuth.values()) .defaultValue("REQUIRED") .build(); public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder() .name("Username") .description("Username to access the Cassandra cluster") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder() .name("Password") .description("Password to access the Cassandra cluster") .required(false) .sensitive(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor CONSISTENCY_LEVEL = new PropertyDescriptor.Builder() .name("Consistency Level") .description("The strategy for how many replicas must respond before results are returned.") .required(true) .allowableValues(ConsistencyLevel.values()) .defaultValue("ONE") .build(); static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder() .name("Compression Type") .description("Enable compression at transport-level requests and responses") .required(false) .allowableValues(ProtocolOptions.Compression.values()) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .defaultValue("NONE") .build(); private List<PropertyDescriptor> properties; private Cluster cluster; private Session cassandraSession; @Override public void init(final ControllerServiceInitializationContext context) { List<PropertyDescriptor> props = new ArrayList<>(); props.add(CONTACT_POINTS); props.add(CLIENT_AUTH); props.add(CONSISTENCY_LEVEL); props.add(COMPRESSION_TYPE); props.add(KEYSPACE); props.add(USERNAME); props.add(PASSWORD); props.add(PROP_SSL_CONTEXT_SERVICE); properties = props; } @Override public List<PropertyDescriptor> getSupportedPropertyDescriptors() { return properties; } @OnEnabled public void onEnabled(final ConfigurationContext context) { connectToCassandra(context); } @OnDisabled public void onDisabled(){ if (cassandraSession != null) { cassandraSession.close(); } if (cluster != null) { cluster.close(); } } @OnStopped public void onStopped() { if (cassandraSession != null) { cassandraSession.close(); } if (cluster != null) { cluster.close(); } } @Override public Cluster getCluster() { if (cluster != null) { return cluster; } else { throw new ProcessException("Unable to get the Cassandra cluster detail."); } } @Override public Session getCassandraSession() { if (cassandraSession != null) { return cassandraSession; } else { throw new ProcessException("Unable to get the Cassandra session."); } } private void connectToCassandra(ConfigurationContext context) { if (cluster == null) { ComponentLog log = getLogger(); final String contactPointList = context.getProperty(CONTACT_POINTS).evaluateAttributeExpressions().getValue(); final String consistencyLevel = context.getProperty(CONSISTENCY_LEVEL).getValue(); final String compressionType = context.getProperty(COMPRESSION_TYPE).getValue(); List<InetSocketAddress> contactPoints = getContactPoints(contactPointList); // Set up the client for secure (SSL/TLS communications) if configured to do so final SSLContextService sslService = context.getProperty(PROP_SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); final String rawClientAuth = context.getProperty(CLIENT_AUTH).getValue(); final SSLContext sslContext; if (sslService != null) { final SSLContextService.ClientAuth clientAuth; if (StringUtils.isBlank(rawClientAuth)) { clientAuth = SSLContextService.ClientAuth.REQUIRED; } else { try { clientAuth = SSLContextService.ClientAuth.valueOf(rawClientAuth); } catch (final IllegalArgumentException iae) { throw new ProviderCreationException(String.format("Unrecognized client auth '%s'. Possible values are [%s]", rawClientAuth, StringUtils.join(SslContextFactory.ClientAuth.values(), ", "))); } } sslContext = sslService.createSSLContext(clientAuth); } else { sslContext = null; } final String username, password; PropertyValue usernameProperty = context.getProperty(USERNAME).evaluateAttributeExpressions(); PropertyValue passwordProperty = context.getProperty(PASSWORD).evaluateAttributeExpressions(); if (usernameProperty != null && passwordProperty != null) { username = usernameProperty.getValue(); password = passwordProperty.getValue(); } else { username = null; password = null; } // Create the cluster and connect to it Cluster newCluster = createCluster(contactPoints, sslContext, username, password, compressionType); PropertyValue keyspaceProperty = context.getProperty(KEYSPACE).evaluateAttributeExpressions(); final Session newSession; if (keyspaceProperty != null) { newSession = newCluster.connect(keyspaceProperty.getValue()); } else { newSession = newCluster.connect(); } newCluster.getConfiguration().getQueryOptions().setConsistencyLevel(ConsistencyLevel.valueOf(consistencyLevel)); Metadata metadata = newCluster.getMetadata(); log.info("Connected to Cassandra cluster: {}", new Object[]{metadata.getClusterName()}); cluster = newCluster; cassandraSession = newSession; } } private List<InetSocketAddress> getContactPoints(String contactPointList) { if (contactPointList == null) { return null; } final List<String> contactPointStringList = Arrays.asList(contactPointList.split(",")); List<InetSocketAddress> contactPoints = new ArrayList<>(); for (String contactPointEntry : contactPointStringList) { String[] addresses = contactPointEntry.split(":"); final String hostName = addresses[0].trim(); final int port = (addresses.length > 1) ? Integer.parseInt(addresses[1].trim()) : DEFAULT_CASSANDRA_PORT; contactPoints.add(new InetSocketAddress(hostName, port)); } return contactPoints; } private Cluster createCluster(List<InetSocketAddress> contactPoints, SSLContext sslContext, String username, String password, String compressionType) { Cluster.Builder builder = Cluster.builder().addContactPointsWithPorts(contactPoints); if (sslContext != null) { JdkSSLOptions sslOptions = JdkSSLOptions.builder() .withSSLContext(sslContext) .build(); builder = builder.withSSL(sslOptions); } if (username != null && password != null) { builder = builder.withCredentials(username, password); } if(ProtocolOptions.Compression.SNAPPY.equals(compressionType)) { builder = builder.withCompression(ProtocolOptions.Compression.SNAPPY); } else if(ProtocolOptions.Compression.LZ4.equals(compressionType)) { builder = builder.withCompression(ProtocolOptions.Compression.LZ4); } return builder.build(); } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.components; // Start of user code for imports import java.util.Iterator; import java.util.List; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.Diagnostic; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.WrappedException; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.EcorePackage; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.Diagnostician; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.eef.runtime.api.notify.EStructuralFeatureNotificationFilter; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.notify.NotificationFilter; import org.eclipse.emf.eef.runtime.context.PropertiesEditingContext; import org.eclipse.emf.eef.runtime.impl.components.SinglePartPropertiesEditingComponent; import org.eclipse.emf.eef.runtime.impl.filters.EObjectFilter; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.utils.EEFConverterUtil; import org.eclipse.emf.eef.runtime.impl.utils.EEFUtils; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.wso2.developerstudio.eclipse.gmf.esb.CallMediator; import org.wso2.developerstudio.eclipse.gmf.esb.CallMediatorEndpointType; import org.wso2.developerstudio.eclipse.gmf.esb.EndPoint; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty; import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.gmf.esb.impl.EsbFactoryImpl; import org.wso2.developerstudio.eclipse.gmf.esb.parts.CallMediatorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFPropertyViewUtil; // End of user code /** * * */ public class CallMediatorPropertiesEditionComponent extends SinglePartPropertiesEditingComponent { public static String BASE_PART = "Base"; //$NON-NLS-1$ /** * Settings for endpoint ReferencesTable */ private ReferencesTableSettings endpointSettings; /** * Default constructor * */ public CallMediatorPropertiesEditionComponent(PropertiesEditingContext editingContext, EObject callMediator, String editing_mode) { super(editingContext, callMediator, editing_mode); parts = new String[] { BASE_PART }; repositoryKey = EsbViewsRepository.class; partKey = EsbViewsRepository.CallMediator.class; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#initPart(java.lang.Object, int, org.eclipse.emf.ecore.EObject, * org.eclipse.emf.ecore.resource.ResourceSet) * */ public void initPart(Object key, int kind, EObject elt, ResourceSet allResource) { setInitializing(true); if (editingPart != null && key == partKey) { editingPart.setContext(elt, allResource); final CallMediator callMediator = (CallMediator)elt; final CallMediatorPropertiesEditionPart basePart = (CallMediatorPropertiesEditionPart)editingPart; // init values if (isAccessible(EsbViewsRepository.CallMediator.Properties.description)) basePart.setDescription(EEFConverterUtil.convertToString(EcorePackage.Literals.ESTRING, callMediator.getDescription())); if (isAccessible(EsbViewsRepository.CallMediator.Properties.commentsList)) basePart.setCommentsList(callMediator.getCommentsList()); if (isAccessible(EsbViewsRepository.CallMediator.Properties.reverse)) { basePart.setReverse(callMediator.isReverse()); } if (isAccessible(EsbViewsRepository.CallMediator.Properties.endpoint)) { endpointSettings = new ReferencesTableSettings(callMediator, EsbPackage.eINSTANCE.getCallMediator_Endpoint()); basePart.initEndpoint(endpointSettings); } if (isAccessible(EsbViewsRepository.CallMediator.Properties.endpointType)) { basePart.initEndpointType(EEFUtils.choiceOfValues(callMediator, EsbPackage.eINSTANCE.getCallMediator_EndpointType()), callMediator.getEndpointType()); } if (isAccessible(EsbViewsRepository.CallMediator.Properties.enableBlockingCalls)) { basePart.setEnableBlockingCalls(callMediator.isEnableBlockingCalls()); } // Start of user code for endpointXPath command update if (isAccessible(EsbViewsRepository.CallMediator.Properties.endpointXpath)) { basePart.setEndpointXPath(callMediator.getEndpointXpath()); } // End of user code // Start of user code for endpointRegistryKey command update if (isAccessible(EsbViewsRepository.CallMediator.Properties.endpointRegistryKey)) { basePart.setEndpointRegistryKey(callMediator.getEndpointRegistrykey()); } // End of user code // init filters if (isAccessible(EsbViewsRepository.CallMediator.Properties.endpoint)) { basePart.addFilterToEndpoint(new EObjectFilter(EsbPackage.Literals.END_POINT)); // Start of user code for additional businessfilters for endpoint // End of user code } // Start of user code for endpointXPath filter update // End of user code // Start of user code for endpointRegistryKey filter update // End of user code // init values for referenced views // init filters for referenced views } setInitializing(false); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#associatedFeature(java.lang.Object) */ public EStructuralFeature associatedFeature(Object editorKey) { if (editorKey == EsbViewsRepository.CallMediator.Properties.description) { return EsbPackage.eINSTANCE.getEsbElement_Description(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.commentsList) { return EsbPackage.eINSTANCE.getEsbElement_CommentsList(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.reverse) { return EsbPackage.eINSTANCE.getMediator_Reverse(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.endpoint) { return EsbPackage.eINSTANCE.getCallMediator_Endpoint(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.endpointType) { return EsbPackage.eINSTANCE.getCallMediator_EndpointType(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.enableBlockingCalls) { return EsbPackage.eINSTANCE.getCallMediator_EnableBlockingCalls(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.endpointXpath) { return EsbPackage.eINSTANCE.getCallMediator_EndpointXpath(); } if (editorKey == EsbViewsRepository.CallMediator.Properties.endpointRegistryKey) { return EsbPackage.eINSTANCE.getCallMediator_EndpointRegistrykey(); } return super.associatedFeature(editorKey); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updateSemanticModel(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void updateSemanticModel(final IPropertiesEditionEvent event) { CallMediator callMediator = (CallMediator)semanticObject; if (EsbViewsRepository.CallMediator.Properties.description == event.getAffectedEditor()) { callMediator.setDescription((java.lang.String)EEFConverterUtil.createFromString(EcorePackage.Literals.ESTRING, (String)event.getNewValue())); } if (EsbViewsRepository.CallMediator.Properties.commentsList == event.getAffectedEditor()) { if (event.getKind() == PropertiesEditionEvent.SET) { callMediator.getCommentsList().clear(); callMediator.getCommentsList().addAll(((EList) event.getNewValue())); } } if (EsbViewsRepository.CallMediator.Properties.reverse == event.getAffectedEditor()) { callMediator.setReverse((Boolean)event.getNewValue()); } if (EsbViewsRepository.CallMediator.Properties.endpoint == event.getAffectedEditor()) { if (event.getKind() == PropertiesEditionEvent.ADD) { if (event.getNewValue() instanceof EndPoint) { endpointSettings.addToReference((EObject) event.getNewValue()); } } else if (event.getKind() == PropertiesEditionEvent.REMOVE) { endpointSettings.removeFromReference((EObject) event.getNewValue()); } else if (event.getKind() == PropertiesEditionEvent.MOVE) { endpointSettings.move(event.getNewIndex(), (EndPoint) event.getNewValue()); } } if (EsbViewsRepository.CallMediator.Properties.endpointType == event.getAffectedEditor()) { callMediator.setEndpointType((CallMediatorEndpointType)event.getNewValue()); } if (EsbViewsRepository.CallMediator.Properties.enableBlockingCalls == event.getAffectedEditor()) { callMediator.setEnableBlockingCalls((Boolean)event.getNewValue()); } if (EsbViewsRepository.CallMediator.Properties.endpointXpath == event.getAffectedEditor()) { // Start of user code for updateEndpointXPath method body if (event.getNewValue() != null) { NamespacedProperty nsp = (NamespacedProperty) event.getNewValue(); callMediator.setEndpointXpath(nsp); } else { callMediator.setEndpointXpath(EsbFactoryImpl.eINSTANCE.createNamespacedProperty()); } // End of user code } if (EsbViewsRepository.CallMediator.Properties.endpointRegistryKey == event.getAffectedEditor()) { // Start of user code for updateEndpointRegistryKey method body if (event.getNewValue() != null) { RegistryKeyProperty rkp = (RegistryKeyProperty) event.getNewValue(); callMediator.setEndpointRegistrykey(rkp); } else { callMediator.setEndpointRegistrykey(EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty()); } // End of user code } } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updatePart(org.eclipse.emf.common.notify.Notification) */ public void updatePart(Notification msg) { super.updatePart(msg); if (editingPart.isVisible()) { CallMediatorPropertiesEditionPart basePart = (CallMediatorPropertiesEditionPart)editingPart; if (EsbPackage.eINSTANCE.getEsbElement_Description().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.description)) { if (msg.getNewValue() != null) { basePart.setDescription(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, msg.getNewValue())); } else { basePart.setDescription(""); } } if (EsbPackage.eINSTANCE.getEsbElement_CommentsList().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.commentsList)) { if (msg.getNewValue() instanceof EList<?>) { basePart.setCommentsList((EList<?>)msg.getNewValue()); } else if (msg.getNewValue() == null) { basePart.setCommentsList(new BasicEList<Object>()); } else { BasicEList<Object> newValueAsList = new BasicEList<Object>(); newValueAsList.add(msg.getNewValue()); basePart.setCommentsList(newValueAsList); } } if (EsbPackage.eINSTANCE.getMediator_Reverse().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.reverse)) basePart.setReverse((Boolean)msg.getNewValue()); if (EsbPackage.eINSTANCE.getCallMediator_Endpoint().equals(msg.getFeature()) && isAccessible(EsbViewsRepository.CallMediator.Properties.endpoint)) basePart.updateEndpoint(); if (EsbPackage.eINSTANCE.getCallMediator_EndpointType().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && isAccessible(EsbViewsRepository.CallMediator.Properties.endpointType)) basePart.setEndpointType((CallMediatorEndpointType)msg.getNewValue()); if (EsbPackage.eINSTANCE.getCallMediator_EnableBlockingCalls().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.enableBlockingCalls)) basePart.setEnableBlockingCalls((Boolean)msg.getNewValue()); // Start of user code for endpointXPath live update if (EsbPackage.eINSTANCE.getCallMediator_EndpointXpath().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.endpointXpath)) { if (msg.getNewValue() != null) { basePart.setEndpointXPath((NamespacedProperty)msg.getNewValue()); } else { basePart.setEndpointXPath(EsbFactoryImpl.eINSTANCE.createNamespacedProperty()); } } // End of user code // Start of user code for endpointRegistryKey live update if (EsbPackage.eINSTANCE.getCallMediator_EndpointRegistrykey().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.CallMediator.Properties.endpointRegistryKey)) { if (msg.getNewValue() != null) { basePart.setEndpointRegistryKey((RegistryKeyProperty)msg.getNewValue()); } else { basePart.setEndpointRegistryKey(EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty()); } } // End of user code } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#getNotificationFilters() */ @Override protected NotificationFilter[] getNotificationFilters() { NotificationFilter filter = new EStructuralFeatureNotificationFilter( EsbPackage.eINSTANCE.getEsbElement_Description(), EsbPackage.eINSTANCE.getEsbElement_CommentsList(), EsbPackage.eINSTANCE.getMediator_Reverse(), EsbPackage.eINSTANCE.getCallMediator_Endpoint(), EsbPackage.eINSTANCE.getCallMediator_EndpointType(), EsbPackage.eINSTANCE.getCallMediator_EnableBlockingCalls(), EsbPackage.eINSTANCE.getCallMediator_EndpointXpath(), EsbPackage.eINSTANCE.getCallMediator_EndpointRegistrykey() ); return new NotificationFilter[] {filter,}; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#validateValue(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public Diagnostic validateValue(IPropertiesEditionEvent event) { Diagnostic ret = Diagnostic.OK_INSTANCE; if (event.getNewValue() != null) { try { if (EsbViewsRepository.CallMediator.Properties.description == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getEsbElement_Description().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getEsbElement_Description().getEAttributeType(), newValue); } if (EsbViewsRepository.CallMediator.Properties.commentsList == event.getAffectedEditor()) { BasicDiagnostic chain = new BasicDiagnostic(); for (Iterator iterator = ((List)event.getNewValue()).iterator(); iterator.hasNext();) { chain.add(Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getEsbElement_CommentsList().getEAttributeType(), iterator.next())); } ret = chain; } if (EsbViewsRepository.CallMediator.Properties.reverse == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getMediator_Reverse().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getMediator_Reverse().getEAttributeType(), newValue); } if (EsbViewsRepository.CallMediator.Properties.endpointType == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getCallMediator_EndpointType().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getCallMediator_EndpointType().getEAttributeType(), newValue); } if (EsbViewsRepository.CallMediator.Properties.enableBlockingCalls == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getCallMediator_EnableBlockingCalls().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getCallMediator_EnableBlockingCalls().getEAttributeType(), newValue); } } catch (IllegalArgumentException iae) { ret = BasicDiagnostic.toDiagnostic(iae); } catch (WrappedException we) { ret = BasicDiagnostic.toDiagnostic(we); } } return ret; } // Start of user code for help compatibility /** * @generated NOT */ @Override public String getHelpContent(Object key, int kind) { return EEFPropertyViewUtil.getHelpContent(key); } // End of user code }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.service; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.web.applicationmap.histogram.TimeHistogram; import com.navercorp.pinpoint.web.applicationmap.rawdata.*; import com.navercorp.pinpoint.web.dao.HostApplicationMapDao; import com.navercorp.pinpoint.web.dao.MapStatisticsCalleeDao; import com.navercorp.pinpoint.web.dao.MapStatisticsCallerDao; import com.navercorp.pinpoint.web.service.map.AcceptApplication; import com.navercorp.pinpoint.web.service.map.AcceptApplicationLocalCache; import com.navercorp.pinpoint.web.service.map.RpcApplication; import com.navercorp.pinpoint.web.vo.Application; import com.navercorp.pinpoint.web.vo.LinkKey; import com.navercorp.pinpoint.web.vo.Range; import com.navercorp.pinpoint.web.vo.SearchOption; import org.apache.commons.collections.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** * Breadth-first link search * not thread safe * @author emeroad */ public class BFSLinkSelector implements LinkSelector { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final LinkVisitChecker linkVisitChecker = new LinkVisitChecker(); private final MapStatisticsCalleeDao mapStatisticsCalleeDao; private final MapStatisticsCallerDao mapStatisticsCallerDao; private final HostApplicationMapDao hostApplicationMapDao; private final AcceptApplicationLocalCache acceptApplicationLocalCache = new AcceptApplicationLocalCache(); private final Set<LinkData> emulationLinkMarker = new HashSet<>(); private final Queue nextQueue = new Queue(); public BFSLinkSelector(MapStatisticsCallerDao mapStatisticsCallerDao, MapStatisticsCalleeDao mapStatisticsCalleeDao, HostApplicationMapDao hostApplicationMapDao) { if (mapStatisticsCalleeDao == null) { throw new NullPointerException("mapStatisticsCalleeDao must not be null"); } if (mapStatisticsCallerDao == null) { throw new NullPointerException("mapStatisticsCallerDao must not be null"); } if (hostApplicationMapDao == null) { throw new NullPointerException("hostApplicationMapDao must not be null"); } this.mapStatisticsCalleeDao = mapStatisticsCalleeDao; this.mapStatisticsCallerDao = mapStatisticsCallerDao; this.hostApplicationMapDao = hostApplicationMapDao; } /** * Queries for all applications(caller&callee) called by the targetApplicationList * * @param targetApplicationList * @param range * @return */ private LinkDataDuplexMap selectLink(List<Application> targetApplicationList, Range range, SearchDepth callerDepth, SearchDepth calleeDepth) { final LinkDataDuplexMap searchResult = new LinkDataDuplexMap(); for (Application targetApplication : targetApplicationList) { final boolean searchCallerNode = checkNextCaller(targetApplication, callerDepth); if (searchCallerNode) { final LinkDataMap caller = mapStatisticsCallerDao.selectCaller(targetApplication, range); if (logger.isDebugEnabled()) { logger.debug("Found Caller. count={}, caller={}, depth={}", caller.size(), targetApplication, callerDepth.getDepth()); } final LinkDataMap replaceRpcCaller = replaceRpcCaller(caller, range); for (LinkData link : replaceRpcCaller.getLinkDataList()) { searchResult.addSourceLinkData(link); final Application toApplication = link.getToApplication(); // skip if nextApplication is a terminal or an unknown cloud if (toApplication.getServiceType().isTerminal() || toApplication.getServiceType().isUnknown()) { continue; } addNextNode(toApplication); } } final boolean searchCalleeNode = checkNextCallee(targetApplication, calleeDepth); if (searchCalleeNode) { final LinkDataMap callee = mapStatisticsCalleeDao.selectCallee(targetApplication, range); if (logger.isInfoEnabled()) { logger.debug("Found Callee. count={}, callee={}, depth={}", callee.size(), targetApplication, calleeDepth.getDepth()); } for (LinkData stat : callee.getLinkDataList()) { searchResult.addTargetLinkData(stat); final Application fromApplication = stat.getFromApplication(); addNextNode(fromApplication); } } } logger.debug("{} depth search end", callerDepth.getDepth()); return searchResult; } private void addNextNode(Application sourceApplication) { final boolean add = this.nextQueue.addNextNode(sourceApplication); if (!add) { logger.debug("already visited. nextNode:{}", sourceApplication); } } private boolean checkNextCaller(Application targetApplication, SearchDepth depth) { if (depth.isDepthOverflow()) { logger.debug("caller depth overflow application:{} depth:{}", targetApplication, depth.getDepth()); return false; } if (linkVisitChecker.visitCaller(targetApplication)) { logger.debug("already visited caller:{}", targetApplication); return false; } return true; } private boolean checkNextCallee(Application targetApplication, SearchDepth depth) { if (depth.isDepthOverflow()) { logger.debug("callee depth overflow application:{} depth:{}", targetApplication, depth.getDepth()); return false; } if (linkVisitChecker.visitCallee(targetApplication)) { logger.debug("already visited callee:{}", targetApplication); return false; } return true; } private List<LinkData> checkRpcCallAccepted(LinkData linkData, Range range) { // replace if the rpc client's destination has an agent installed and thus has an application name final Application toApplication = linkData.getToApplication(); if (!toApplication.getServiceType().isRpcClient() && !toApplication.getServiceType().isQueue()) { return Collections.singletonList(linkData); } logger.debug("checkRpcCallAccepted(). Find applicationName:{} {}", toApplication, range); final Set<AcceptApplication> acceptApplicationList = findAcceptApplication(linkData.getFromApplication(), toApplication.getName(), range); logger.debug("find accept application:{}", acceptApplicationList); if (CollectionUtils.isNotEmpty(acceptApplicationList)) { if (acceptApplicationList.size() == 1) { logger.debug("Application info replaced. {} => {}", linkData, acceptApplicationList); AcceptApplication first = acceptApplicationList.iterator().next(); final LinkData acceptedLinkData = new LinkData(linkData.getFromApplication(), first.getApplication()); acceptedLinkData.setLinkCallDataMap(linkData.getLinkCallDataMap()); return Collections.singletonList(acceptedLinkData); } else { // special case - there are more than 2 nodes grouped by a single url return createVirtualLinkData(linkData, toApplication, acceptApplicationList); } } else { // for queues, accept application may not exist if no consumers have an agent installed if (toApplication.getServiceType().isQueue()) { return Collections.singletonList(linkData); } else { final Application unknown = new Application(toApplication.getName(), ServiceType.UNKNOWN); final LinkData unknownLinkData = new LinkData(linkData.getFromApplication(), unknown); unknownLinkData.setLinkCallDataMap(linkData.getLinkCallDataMap()); return Collections.singletonList(unknownLinkData); } } } private List<LinkData> createVirtualLinkData(LinkData linkData, Application toApplication, Set<AcceptApplication> acceptApplicationList) { logger.warn("one to N replaced. node:{}->host:{} accept:{}", linkData.getFromApplication(), toApplication.getName(), acceptApplicationList); List<LinkData> emulationLink = new ArrayList<>(); for (AcceptApplication acceptApplication : acceptApplicationList) { // linkCallData needs to be modified - remove callHistogram on purpose final LinkData acceptedLinkData = new LinkData(linkData.getFromApplication(), acceptApplication.getApplication()); acceptedLinkData.setLinkCallDataMap(linkData.getLinkCallDataMap()); emulationLink.add(acceptedLinkData); traceEmulationLink(acceptedLinkData); } return emulationLink; } private void traceEmulationLink(LinkData acceptApplication) { final boolean add = emulationLinkMarker.add(acceptApplication); if (!add) { logger.warn("emulationLink add error. {}", acceptApplication); } } private Set<AcceptApplication> findAcceptApplication(Application fromApplication, String host, Range range) { logger.debug("findAcceptApplication {} {}", fromApplication, host); final RpcApplication rpcApplication = new RpcApplication(host, fromApplication); final Set<AcceptApplication> hit = this.acceptApplicationLocalCache.get(rpcApplication); if (CollectionUtils.isNotEmpty(hit)) { logger.debug("acceptApplicationLocalCache hit {}", rpcApplication); return hit; } final Set<AcceptApplication> acceptApplicationSet= hostApplicationMapDao.findAcceptApplicationName(fromApplication, range); this.acceptApplicationLocalCache.put(rpcApplication, acceptApplicationSet); Set<AcceptApplication> acceptApplication = this.acceptApplicationLocalCache.get(rpcApplication); logger.debug("findAcceptApplication {}->{} result:{}", fromApplication, host, acceptApplication); return acceptApplication; } private void fillEmulationLink(LinkDataDuplexMap linkDataDuplexMap, Range range) { // TODO need to be reimplemented - virtual node creation logic needs an overhaul. // Currently, only the reversed relationship node is displayed. We need to create a virtual node and convert the rpc data appropriately. logger.debug("this.emulationLinkMarker:{}", this.emulationLinkMarker); List<LinkData> emulationLinkDataList = findEmulationLinkData(linkDataDuplexMap); for (LinkData emulationLinkData : emulationLinkDataList) { LinkCallDataMap beforeImage = emulationLinkData.getLinkCallDataMap(); logger.debug("beforeImage:{}", beforeImage); emulationLinkData.resetLinkData(); LinkKey findLinkKey = new LinkKey(emulationLinkData.getFromApplication(), emulationLinkData.getToApplication()); LinkData targetLinkData = linkDataDuplexMap.getTargetLinkData(findLinkKey); if (targetLinkData == null) { // This is a case where the emulation target node has been only "partially" visited, (ie. does not have a target link data) // Most likely due to the limit imposed by inbound search depth. // Must go fetch the target link data here. final Application targetApplication = emulationLinkData.getToApplication(); final LinkDataMap callee = mapStatisticsCalleeDao.selectCallee(targetApplication, range); targetLinkData = callee.getLinkData(findLinkKey); if (targetLinkData == null) { // There has been a case where targetLinkData was null, but exact event could not be captured for analysis. // Logging the case for further analysis should it happen again in the future. logger.error("targetLinkData not found findLinkKey:{}", findLinkKey); continue; } } // create reversed link data - convert data accepted by the target to target's call data LinkCallDataMap targetList = targetLinkData.getLinkCallDataMap(); Collection<LinkCallData> beforeLinkDataList = beforeImage.getLinkDataList(); LinkCallData beforeLinkCallData = beforeLinkDataList.iterator().next(); for (LinkCallData agentHistogram : targetList.getLinkDataList()) { Collection<TimeHistogram> timeHistogramList = agentHistogram.getTimeHistogram(); LinkCallDataMap linkCallDataMap = emulationLinkData.getLinkCallDataMap(); if (logger.isDebugEnabled()) { logger.debug("emulationLink BEFORE:{}", beforeLinkCallData); logger.debug("emulationLink agent:{}", agentHistogram); logger.debug("emulationLink link:{}/{} -> {}/{}", agentHistogram.getTarget(), agentHistogram.getTargetServiceType(), beforeLinkCallData.getTarget(), beforeLinkCallData.getTargetServiceType().getCode()); } linkCallDataMap.addCallData(agentHistogram.getTarget(), agentHistogram.getTargetServiceType(), beforeLinkCallData.getTarget(), beforeLinkCallData.getTargetServiceType(), timeHistogramList); } } } private List<LinkData> findEmulationLinkData(LinkDataDuplexMap linkDataDuplexMap) { // LinkDataDuplexMap already has a copy of the data - modifying emulationLinkMarker's data has no effect. // We must get the data from LinkDataDuplexMap again. List<LinkData> searchList = new ArrayList<>(); for (LinkData emulationLinkData : this.emulationLinkMarker) { LinkKey search = getLinkKey(emulationLinkData); for (LinkData linkData : linkDataDuplexMap.getSourceLinkDataList()) { LinkKey linkKey = getLinkKey(linkData); if (linkKey.equals(search)) { searchList.add(linkData); } } } return searchList; } private LinkKey getLinkKey(LinkData emulationLinkData) { Application fromApplication = emulationLinkData.getFromApplication(); Application toApplication = emulationLinkData.getToApplication(); return new LinkKey(fromApplication, toApplication); } public LinkDataDuplexMap select(Application sourceApplication, Range range, SearchOption searchOption) { if (searchOption == null) { throw new NullPointerException("searchOption must not be null"); } SearchDepth callerDepth = new SearchDepth(searchOption.getCallerSearchDepth()); SearchDepth calleeDepth = new SearchDepth(searchOption.getCalleeSearchDepth()); logger.debug("ApplicationMap select {}", sourceApplication); addNextNode(sourceApplication); LinkDataDuplexMap linkDataDuplexMap = new LinkDataDuplexMap(); while (!this.nextQueue.isEmpty()) { final List<Application> currentNode = this.nextQueue.copyAndClear(); logger.debug("size:{} depth caller:{} callee:{} node:{}", currentNode.size(), callerDepth.getDepth(), calleeDepth.getDepth(), currentNode); LinkDataDuplexMap levelData = selectLink(currentNode, range, callerDepth, calleeDepth); linkDataDuplexMap.addLinkDataDuplexMap(levelData); callerDepth = callerDepth.nextDepth(); calleeDepth = calleeDepth.nextDepth(); } if (!emulationLinkMarker.isEmpty()) { logger.debug("Link emulation size:{}", emulationLinkMarker.size()); // special case checkUnsearchEmulationCalleeNode(linkDataDuplexMap, range); fillEmulationLink(linkDataDuplexMap, range); } return linkDataDuplexMap; } private void checkUnsearchEmulationCalleeNode(LinkDataDuplexMap searchResult, Range range) { List<Application> unvisitedList = getUnvisitedEmulationNode(); if (unvisitedList.isEmpty()) { logger.debug("unvisited callee node not found"); return; } logger.info("unvisited callee node {}", unvisitedList); final LinkDataMap calleeLinkData = new LinkDataMap(); for (Application application : unvisitedList) { LinkDataMap callee = mapStatisticsCalleeDao.selectCallee(application, range); logger.debug("calleeNode:{}", callee); calleeLinkData.addLinkDataMap(callee); } LinkDataMap unvisitedNodeFilter = new LinkDataMap(); for (LinkData linkData : calleeLinkData.getLinkDataList()) { Application fromApplication = linkData.getFromApplication(); if (!fromApplication.getServiceType().isWas()) { continue; } Application emulatedApplication = linkData.getToApplication(); boolean unvisitedNode = isUnVisitedNode(unvisitedList, emulatedApplication, fromApplication); if (unvisitedNode) { logger.debug("EmulationCalleeNode:{}", linkData); unvisitedNodeFilter.addLinkData(linkData); } } logger.debug("UnVisitedNode:{}", unvisitedNodeFilter); for (LinkData linkData : unvisitedNodeFilter.getLinkDataList()) { searchResult.addTargetLinkData(linkData); } } private boolean isUnVisitedNode(List<Application> unvisitedList, Application toApplication, Application fromApplication) { for (Application unvisitedApplication : unvisitedList) { if (toApplication.equals(unvisitedApplication) && linkVisitChecker.isVisitedCaller(fromApplication)) { return true; } } return false; } private List<Application> getUnvisitedEmulationNode() { Set<Application> unvisitedList = new HashSet<>(); for (LinkData linkData : this.emulationLinkMarker) { Application toApplication = linkData.getToApplication(); boolean isVisited = this.linkVisitChecker.isVisitedCaller(toApplication); if (!isVisited) { unvisitedList.add(toApplication); } } return new ArrayList<>(unvisitedList); } private LinkDataMap replaceRpcCaller(LinkDataMap caller, Range range) { final LinkDataMap replaceRpcCaller = new LinkDataMap(); for (LinkData callerLink : caller.getLinkDataList()) { final List<LinkData> checkedLink = checkRpcCallAccepted(callerLink, range); for (LinkData linkData : checkedLink) { replaceRpcCaller.addLinkData(linkData); } } return replaceRpcCaller; } static class Queue { private final Set<Application> nextNode = new HashSet<>(); public boolean addNextNode(Application application) { return this.nextNode.add(application); } public List<Application> copyAndClear() { List<Application> copyList = new ArrayList<>(this.nextNode); this.nextNode.clear(); return copyList; } public boolean isEmpty() { return this.nextNode.isEmpty(); } } }
/******************************************************************************* * Open Behavioral Health Information Technology Architecture (OBHITA.org) * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package gov.samhsa.acs.brms; import gov.samhsa.acs.brms.domain.ClinicalFact; import gov.samhsa.acs.brms.domain.FactModel; import gov.samhsa.acs.brms.domain.RuleExecutionContainer; import gov.samhsa.acs.brms.guvnor.GuvnorService; import gov.samhsa.acs.common.tool.SimpleMarshaller; import gov.samhsa.consent2share.schema.ruleexecutionservice.AssertAndExecuteClinicalFactsResponse; import java.util.HashSet; import java.util.Set; import javax.xml.bind.JAXBException; import org.drools.KnowledgeBase; import org.drools.KnowledgeBaseFactory; import org.drools.builder.KnowledgeBuilder; import org.drools.builder.KnowledgeBuilderError; import org.drools.builder.KnowledgeBuilderErrors; import org.drools.builder.KnowledgeBuilderFactory; import org.drools.builder.ResourceType; import org.drools.definition.rule.Rule; import org.drools.event.rule.AfterActivationFiredEvent; import org.drools.event.rule.DefaultAgendaEventListener; import org.drools.io.ResourceFactory; import org.drools.runtime.StatefulKnowledgeSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Class RuleExecutionServiceImpl. */ public class RuleExecutionServiceImpl implements RuleExecutionService { /** The logger. */ private final Logger logger = LoggerFactory.getLogger(this.getClass()); /** The guvnor service. */ private final GuvnorService guvnorService; /** The marshaller. */ private final SimpleMarshaller marshaller; /** * Instantiates a new rule execution service impl. * * @param guvnorService * the guvnor service * @param marshaller * the marshaller */ public RuleExecutionServiceImpl(GuvnorService guvnorService, SimpleMarshaller marshaller) { super(); this.guvnorService = guvnorService; this.marshaller = marshaller; } /* * (non-Javadoc) * * @see * gov.samhsa.acs.brms.RuleExecutionService#assertAndExecuteClinicalFacts * (gov.samhsa.acs.brms.domain.FactModel) */ @Override public AssertAndExecuteClinicalFactsResponse assertAndExecuteClinicalFacts( FactModel factModel) { RuleExecutionContainer executionResponseContainer = null; final AssertAndExecuteClinicalFactsResponse assertAndExecuteResponse = new AssertAndExecuteClinicalFactsResponse(); String executionResponseContainerXMLString = null; final Set<String> firedRuleNames = new HashSet<String>(); final StatefulKnowledgeSession session = createStatefulKnowledgeSession(); try { session.insert(factModel.getXacmlResult()); for (final ClinicalFact clinicalFact : factModel .getClinicalFactList()) { session.insert(clinicalFact); } session.addEventListener(new DefaultAgendaEventListener() { @Override public void afterActivationFired(AfterActivationFiredEvent event) { super.afterActivationFired(event); final Rule rule = event.getActivation().getRule(); firedRuleNames.add(rule.getName()); } }); session.fireAllRules(); logger.debug("Fired rules: {}...", firedRuleNames); logger.debug("Fired rules: " + firedRuleNames); executionResponseContainer = (RuleExecutionContainer) session .getGlobal("ruleExecutionContainer"); // Marshal rule execution response executionResponseContainerXMLString = marshaller .marshal(executionResponseContainer); if (firedRuleNames.size() > 0) { assertAndExecuteResponse.setRulesFired(firedRuleNames .toString()); } } catch (final Throwable e) { logger.error(e.getMessage(), e); } finally { firedRuleNames.clear(); if (session != null) { session.dispose(); } } assertAndExecuteResponse .setRuleExecutionResponseContainer(executionResponseContainerXMLString); return assertAndExecuteResponse; } /* * (non-Javadoc) * * @see * gov.samhsa.acs.brms.RuleExecutionService#assertAndExecuteClinicalFacts * (java.lang.String) */ @Override public AssertAndExecuteClinicalFactsResponse assertAndExecuteClinicalFacts( String factModelXmlString) { FactModel factModel = null; try { factModel = marshaller.unmarshalFromXml(FactModel.class, factModelXmlString); } catch (final JAXBException e) { logger.error(e.getMessage(), e); } return assertAndExecuteClinicalFacts(factModel); } /** * Creates the stateful knowledge session. * * @return the stateful knowledge session */ StatefulKnowledgeSession createStatefulKnowledgeSession() { StatefulKnowledgeSession session = null; try { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory .newKnowledgeBuilder(); final String casRules = guvnorService .getVersionedRulesFromPackage(); kbuilder.add( ResourceFactory.newByteArrayResource(casRules.getBytes()), ResourceType.DRL); final KnowledgeBuilderErrors errors = kbuilder.getErrors(); if (errors.size() > 0) { for (final KnowledgeBuilderError error : errors) { logger.error(error.toString()); } } final KnowledgeBase knowledgeBase = KnowledgeBaseFactory .newKnowledgeBase(); knowledgeBase.addKnowledgePackages(kbuilder.getKnowledgePackages()); session = knowledgeBase.newStatefulKnowledgeSession(); session.setGlobal("ruleExecutionContainer", new RuleExecutionContainer()); } catch (final Exception e) { logger.error(e.toString(), e); } return session; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package datafu.test.pig.stats.entropy; import static org.testng.Assert.*; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.adrianwalker.multilinestring.Multiline; import org.apache.pig.data.Tuple; import org.apache.pig.pigunit.PigTest; import org.testng.annotations.Test; import datafu.test.pig.PigTests; /* * R's entropy library: http://cran.r-project.org/web/packages/entropy/entropy.pdf * used as our test benchmark */ public class ChaoShenEntropyTests extends AbstractEntropyTests { /** define Entropy datafu.pig.stats.entropy.Entropy('$type','$base'); data = load 'input' as (val:double); --describe data; data_grouped = GROUP data ALL; data_out = FOREACH data_grouped { data_ordered = ORDER data BY val; GENERATE Entropy(data_ordered); }; store data_out into 'output'; */ @Multiline private String entropy; @Test public void uniqValChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(entropy, "type=chaosh", "base=log"); writeLinesToFile("input", "98.94791", "38.61010", "97.10575", "62.28313", "38.83960", "32.05370", "96.10962", "28.72388", "96.65888", "20.41135"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c(98.94791,38.61010,97.10575,62.28313,38.83960,32.05370,96.10962,28.72388,96.65888,20.41135) * > table(v) * v * 20.41135 28.72388 32.0537 38.6101 38.8396 62.28313 96.10962 96.65888 97.10575 98.94791 * 1 1 1 1 1 1 1 1 1 1 * > count=c(1,1,1,1,1,1,1,1,1,1) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 4.816221 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(4.816221); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void singleValChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(entropy, "type=chaosh", "base=log"); writeLinesToFile("input", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791", "98.94791"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c(98.94791,98.94791,98.94791,98.94791,98.94791,98.94791,98.94791,98.94791,98.94791,98.94791) * > table(v) * v * 98.94791 * 10 * > count=(10) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 0 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(0.0); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void dupValChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(entropy,"type=chaosh", "base=log"); writeLinesToFile("input", "98.94791", "38.61010", "97.10575", "62.28313", "38.61010", "32.05370", "96.10962", "38.61010", "96.10962", "20.41135"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c(98.94791,38.61010,97.10575,62.28313,38.61010,32.05370,96.10962,38.61010,96.10962,20.41135) * > table(v) * v * 20.41135 28.72388 32.0537 38.6101 38.8396 62.28313 96.10962 96.65888 97.10575 98.94791 * 1 1 3 1 2 1 1 * > count=c(1,1,3,1,2,1,1) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 2.57429 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(2.57429); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void emptyInputBagChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(entropy, "type=chaosh", "base=log"); writeLinesToFile("input" ); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c() * > table(v) * < table of extent 0 > * > count=c() * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 0 * */ List<Double> expectedOutput = new ArrayList<Double>(); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void singleElemInputBagChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(entropy, "type=chaosh", "base=log"); writeLinesToFile("input", "98.94791"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > count=c(1) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 0 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(0.0); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } /** define Entropy datafu.pig.stats.entropy.Entropy('$type','$base'); data = load 'input' as (x:chararray, y:double); --describe data; data_grouped = GROUP data ALL; data_out = FOREACH data_grouped { data_ordered = ORDER data BY x,y; GENERATE Entropy(data_ordered); }; store data_out into 'output'; */ @Multiline private String pairEntropy; @Test public void dupPairValChaoShenEntropoyTest() throws Exception { PigTest test = createPigTestFromString(pairEntropy, "type=chaosh", "base=log"); writeLinesToFile("input", "hadoop 98.94791", "bigdata 38.61010", "hadoop 97.10575", "datafu 32.05370", "bigdata 38.61010", "datafu 32.05370", "datafu 32.05370", "hadoop 38.61010", "pig 96.10962", "pig 20.41135"); test.runScript(); /* Add expected values, computed using R: * * e.g. * > t <- data.table(x=c("hadoop","bigdata","hadoop","datafu","bigdata","datafu","datafu","hadoop","pig","pig"),y=c(98.94791,38.61010,97.10575,32.05370,38.61010,32.05370,32.05370,38.61010,96.10962,20.41135)) * > t <- t[order(x,y)] * > count<-c(2,3,1,1,1,1,1) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log")) * [1] 2.57429 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(2.57429); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void dupValChaoShenEntropoyLog2Test() throws Exception { PigTest test = createPigTestFromString(entropy,"type=chaosh", "base=log2"); writeLinesToFile("input", "98.94791", "38.61010", "97.10575", "62.28313", "38.61010", "32.05370", "96.10962", "38.61010", "96.10962", "20.41135"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c(98.94791,38.61010,97.10575,62.28313,38.61010,32.05370,96.10962,38.61010,96.10962,20.41135) * > table(v) * v * 20.41135 28.72388 32.0537 38.6101 38.8396 62.28313 96.10962 96.65888 97.10575 98.94791 * 1 1 3 1 2 1 1 * > count=c(1,1,3,1,2,1,1) * > freqs=count/sum(count) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log2")) * [1] 3.713915 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(3.713915); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } @Test public void dupValChaoShenEntropoyLog10Test() throws Exception { PigTest test = createPigTestFromString(entropy, "type=chaosh", "base=log10"); writeLinesToFile("input", "98.94791", "38.61010", "97.10575", "62.28313", "38.61010", "32.05370", "96.10962", "38.61010", "96.10962", "20.41135"); test.runScript(); /* Add expected values, computed using R: * * e.g. * * > v=c(98.94791,38.61010,97.10575,62.28313,38.61010,32.05370,96.10962,38.61010,96.10962,20.41135) * > table(v) * v * 20.41135 28.72388 32.0537 38.6101 38.8396 62.28313 96.10962 96.65888 97.10575 98.94791 * 1 1 3 1 2 1 1 * > count=c(1,1,3,1,2,1,1) * > library(entropy) * > entropy(count,count/sum(count),c("CS"),c("log10")) * [1] 1.118 * */ List<Double> expectedOutput = new ArrayList<Double>(); expectedOutput.add(1.118); List<Tuple> output = this.getLinesForAlias(test, "data_out"); verifyEqualEntropyOutput(expectedOutput, output, 5); } }
package it.unibz.krdb.obda.reformulation.tests; /* * #%L * ontop-quest-owlapi3 * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.io.ModelIOManager; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants; import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWL; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLConnection; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLFactory; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLResultSet; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLStatement; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; import junit.framework.TestCase; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.reasoner.SimpleConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /*** * A simple test that check if the system is able to handle Mappings for * classes/roles and attributes even if there are no URI templates. i.e., the * database stores URI's directly. * * We are going to create an H2 DB, the .sql file is fixed. We will map directly * there and then query on top. */ public class SimpleMappingVirtualABoxTest extends TestCase { // TODO We need to extend this test to import the contents of the mappings // into OWL and repeat everything taking form OWL private OBDADataFactory fac; private Connection conn; Logger log = LoggerFactory.getLogger(this.getClass()); private OBDAModel obdaModel; private OWLOntology ontology; final String owlfile = "src/test/resources/test/simplemapping.owl"; final String obdafile = "src/test/resources/test/simplemapping.obda"; @Override public void setUp() throws Exception { /* * Initializing and H2 database with the stock exchange data */ // String driver = "org.h2.Driver"; String url = "jdbc:h2:mem:questjunitdb"; String username = "sa"; String password = ""; fac = OBDADataFactoryImpl.getInstance(); conn = DriverManager.getConnection(url, username, password); Statement st = conn.createStatement(); FileReader reader = new FileReader("src/test/resources/test/simplemapping-create-h2.sql"); BufferedReader in = new BufferedReader(reader); StringBuilder bf = new StringBuilder(); String line = in.readLine(); while (line != null) { bf.append(line); line = in.readLine(); } in.close(); st.executeUpdate(bf.toString()); conn.commit(); // Loading the OWL file OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile))); // Loading the OBDA data obdaModel = fac.getOBDAModel(); ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load(obdafile); } @Override public void tearDown() throws Exception { dropTables(); conn.close(); } private void dropTables() throws SQLException, IOException { Statement st = conn.createStatement(); FileReader reader = new FileReader("src/test/resources/test/simplemapping-drop-h2.sql"); BufferedReader in = new BufferedReader(reader); StringBuilder bf = new StringBuilder(); String line = in.readLine(); while (line != null) { bf.append(line); line = in.readLine(); } in.close(); st.executeUpdate(bf.toString()); st.close(); conn.commit(); } private void runTests(Properties p) throws Exception { // Creating a new instance of the reasoner QuestOWLFactory factory = new QuestOWLFactory(); factory.setOBDAController(obdaModel); factory.setPreferenceHolder(p); QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration()); // Now we are ready for querying QuestOWLConnection conn = reasoner.getConnection(); QuestOWLStatement st = conn.createStatement(); String query = "PREFIX : <http://it.unibz.krdb/obda/test/simple#> SELECT * WHERE { ?x a :A; :P ?y; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z; :P ?y; :U ?z; :P ?y ; :U ?z }"; try { /* * Enable this if you want to test performance, it will run several cycles */ // long start = System.currentTimeMillis(); // for (int i = 0; i < 3000; i++) { // QuestQuestOWLStatement sto = (QuestQuestOWLStatement)st; // String q = sto.getUnfolding(bf.insert(7, ' ').toString()); // } // long end = System.currentTimeMillis(); // long elapsed = end-start; // log.info("Elapsed time: {}", elapsed); QuestOWLResultSet rs = st.executeTuple(query); assertTrue(rs.nextRow()); OWLIndividual ind1 = rs.getOWLIndividual("x"); OWLIndividual ind2 = rs.getOWLIndividual("y"); OWLLiteral val = rs.getOWLLiteral("z"); assertEquals("<uri1>", ind1.toString()); assertEquals("<uri1>", ind2.toString()); assertEquals("\"value1\"", val.toString()); } catch (Exception e) { throw e; } finally { try { st.close(); } catch (Exception e) { throw e; } finally { conn.close(); reasoner.dispose(); } } } public void testViEqSig() throws Exception { QuestPreferences p = new QuestPreferences(); p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL); p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true"); p.setCurrentValueOf(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true"); runTests(p); } public void testClassicEqSig() throws Exception { QuestPreferences p = new QuestPreferences(); p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC); p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true"); p.setCurrentValueOf(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true"); p.setCurrentValueOf(QuestPreferences.OBTAIN_FROM_MAPPINGS, "true"); runTests(p); } }
package cz.muni.sci.astro.fhm.gui; import cz.muni.sci.astro.fhm.core.MultipleOperationsRunner; import cz.muni.sci.astro.fhm.core.Operation; import cz.muni.sci.astro.fhm.core.OperationShift; import cz.muni.sci.astro.fits.FitsCard; import cz.muni.sci.astro.fits.FitsCardDateValue; import cz.muni.sci.astro.fits.FitsCardDateValueUnknownFormatException; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.concurrent.Task; import javafx.concurrent.WorkerStateEvent; import javafx.event.EventHandler; import javafx.fxml.FXML; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.*; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.stage.Stage; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.List; import java.util.StringJoiner; import java.util.stream.Collectors; /** * Controller for multiple editing cards * * @author Jan Hlava, 395986 */ public class MultipleEditController { private static final String LOG_FILE = "multiple_operations.log"; private static final String LOGGER_FAILED_TEXT = "Logger failed. No log records will be available."; private static final long MAX_DISPLAYED_CHARS = 100000; private MainViewController mainViewController; private RandomAccessFile logFile; private MultipleOperationsRunner operationsRunner; private boolean allOK; @FXML private Button buttonExecute; @FXML private Button buttonExecuteQuit; @FXML private Button buttonDeselectAll; @FXML private Button buttonSelectAll; @FXML private Button buttonInvertSelection; @FXML private Button buttonAddCard; @FXML private Button buttonRemoveCard; @FXML private Button buttonChangeCard; @FXML private Button buttonConcatenate; @FXML private Button buttonShift; @FXML private Button buttonAddJD; @FXML private ListView<String> listViewFiles; @FXML private TextArea textAreaLog; @FXML private ListView<Operation> listViewOperationQueue; @FXML private Button buttonOperationQueueMoveUp; @FXML private Button buttonOperationQueueMoveDown; @FXML private Button buttonOperationQueueRemove; @FXML private Button buttonOperationQueueShow; @FXML private TextField textFieldAddCardKeyword; @FXML private TextField textFieldAddCardRValue; @FXML private TextField textFieldAddCardIValue; @FXML private TextField textFieldAddCardComment; @FXML private TextField textFieldAddCardIndex; @FXML private TextField textFieldAddCardAfterKeyword; @FXML private CheckBox checkBoxAddCardRValue; @FXML private CheckBox checkBoxAddCardIValue; @FXML private CheckBox checkBoxAddCardComment; @FXML private CheckBox checkBoxAddCardPosition; @FXML private CheckBox checkBoxAddCardUpdate; @FXML private Toggle toggleAddCardIndex; @FXML private Toggle toggleAddCardAfterCard; @FXML private ToggleGroup toggleGroupAddCardPosition; @FXML private TextField textFieldRemoveCardKeyword; @FXML private TextField textFieldRemoveCardIndex; @FXML private Toggle toggleRemoveCardKeyword; @FXML private Toggle toggleRemoveCardIndex; @FXML private ToggleGroup toggleGroupRemoveCard; @FXML private TextField textFieldChangeCardKeyword; @FXML private TextField textFieldChangeCardNewKeyword; @FXML private TextField textFieldChangeCardRValue; @FXML private TextField textFieldChangeCardIValue; @FXML private TextField textFieldChangeCardComment; @FXML private TextField textFieldChangeCardIndex; @FXML private CheckBox checkBoxChangeCardNewKeyword; @FXML private CheckBox checkBoxChangeCardRValue; @FXML private CheckBox checkBoxChangeCardIValue; @FXML private CheckBox checkBoxChangeCardComment; @FXML private CheckBox checkBoxChangeCardIndex; @FXML private CheckBox checkBoxChangeCardDelete; @FXML private TextField textFieldConcatenateKeyword; @FXML private TextField textFieldConcatenateKeywordValue; @FXML private TextField textFieldConcatenateString; @FXML private ListView<ConcatenateValue> listViewConcatenationValues; @FXML private CheckBox checkBoxConcatenateUpdate; @FXML private Button buttonConcatenateAddKeywordValue; @FXML private Button buttonConcatenateAddString; @FXML private Button buttonConcatenateRemove; @FXML private Button buttonConcatenateMoveUp; @FXML private Button buttonConcatenateMoveDown; @FXML private TextField textFieldShiftKeyword; @FXML private TextField textFieldShiftYears; @FXML private TextField textFieldShiftMonths; @FXML private TextField textFieldShiftDays; @FXML private TextField textFieldShiftHours; @FXML private TextField textFieldShiftMinutes; @FXML private TextField textFieldShiftSeconds; @FXML private TextField textFieldShiftMilliseconds; @FXML private TextField textFieldShiftMicroseconds; @FXML private TextField textFieldJDKeyword; @FXML private TextField textFieldJDSourceKeyword; @FXML private TextField textFieldJDSourceDatetime; @FXML private CheckBox checkBoxJDUpdate; @FXML private Toggle toggleJDSourceKeyword; @FXML private Toggle toggleJDSourceDatetime; @FXML private ToggleGroup toggleGroupJD; /** * Prepares this form * * @param files file list to working with * @param mainViewController MainViewController controller */ public void prepareWindow(List<String> files, MainViewController mainViewController) { this.mainViewController = mainViewController; try { logFile = new RandomAccessFile(new File(LOG_FILE), "rw"); logFile.setLength(0); } catch (IOException exc) { textAreaLog.setText(LOGGER_FAILED_TEXT); closeLogs(); } operationsRunner = new MultipleOperationsRunner(this::printOK, this::printError); GUIHelpers.modifyMenuItem(mainViewController, MenuBarController.MENU_FILE, MenuBarController.MENU_ITEM_FILE_SAVE, false, e -> handleClickButtonExecute()); GUIHelpers.modifyMenuItem(mainViewController, MenuBarController.MENU_FILE, MenuBarController.MENU_ITEM_FILE_EXIT, false, e -> quitWithConfirmation()); GUIHelpers.modifyMenuItem(mainViewController, MenuBarController.MENU_MODES, MenuBarController.MENU_ITEM_MODES_SINGLE, false, e -> switchMode()); GUIHelpers.modifyMenuItem(mainViewController, MenuBarController.MENU_MODES, MenuBarController.MENU_ITEM_MODES_MULTIPLE, true, null); buttonExecute.getScene().getWindow().setOnCloseRequest(e -> { e.consume(); quitWithConfirmation(); }); if (files.isEmpty()) { buttonSelectAll.setDisable(true); buttonExecute.setDisable(true); buttonExecuteQuit.setDisable(true); } listViewFiles.setCellFactory(listView -> new ListCell<String>() { @Override protected void updateItem(String item, boolean empty) { super.updateItem(item, empty); if (item != null) { setText(new File(item).getName()); } } }); listViewFiles.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); listViewFiles.getSelectionModel().selectedItemProperty().addListener((ov, before, after) -> { buttonDeselectAll.setDisable(after == null); buttonInvertSelection.setDisable(after == null); buttonAddCard.setDisable(after == null); buttonRemoveCard.setDisable(after == null); buttonChangeCard.setDisable(after == null); buttonConcatenate.setDisable(after == null); buttonShift.setDisable(after == null); buttonAddJD.setDisable(after == null); }); listViewFiles.setPlaceholder(new Label("No files")); listViewFiles.setItems(FXCollections.observableArrayList(files)); listViewFiles.getSelectionModel().selectAll(); listViewOperationQueue.setPlaceholder(new Label("No queued operations")); listViewOperationQueue.getSelectionModel().selectedItemProperty().addListener((ov, before, after) -> { buttonOperationQueueRemove.setDisable(after == null); buttonOperationQueueShow.setDisable(after == null); }); listViewOperationQueue.getSelectionModel().selectedIndexProperty().addListener((ov, before, after) -> { GUIHelpers.checkMoveButtons(listViewOperationQueue.getSelectionModel().getSelectedIndex(), listViewOperationQueue.getItems().size(), buttonOperationQueueMoveUp, buttonOperationQueueMoveDown); }); listViewOperationQueue.setItems(FXCollections.observableList(operationsRunner.getOperations())); listViewConcatenationValues.setPlaceholder(new Label("No values")); listViewConcatenationValues.getSelectionModel().selectedItemProperty().addListener((ov, before, after) -> { buttonConcatenateRemove.setDisable(after == null); }); listViewConcatenationValues.getSelectionModel().selectedIndexProperty().addListener((ov, before, after) -> { GUIHelpers.checkMoveButtons(listViewConcatenationValues.getSelectionModel().getSelectedIndex(), listViewConcatenationValues.getItems().size(), buttonConcatenateMoveUp, buttonConcatenateMoveDown); }); textFieldConcatenateKeywordValue.setOnKeyPressed(keyEvent -> { if (keyEvent.getCode() == KeyCode.ENTER) { buttonConcatenateAddKeywordValue.fire(); keyEvent.consume(); } }); textFieldConcatenateString.setOnKeyPressed(keyEvent -> { if (keyEvent.getCode() == KeyCode.ENTER) { buttonConcatenateAddString.fire(); keyEvent.consume(); } }); EventHandler<KeyEvent> ignoreEnter = keyEvent -> { if (keyEvent.getCode() == KeyCode.ENTER) { keyEvent.consume(); } }; textFieldAddCardKeyword.setOnKeyPressed(ignoreEnter); textFieldAddCardRValue.setOnKeyPressed(ignoreEnter); textFieldAddCardIValue.setOnKeyPressed(ignoreEnter); textFieldAddCardComment.setOnKeyPressed(ignoreEnter); textFieldAddCardIndex.setOnKeyPressed(ignoreEnter); textFieldAddCardAfterKeyword.setOnKeyPressed(ignoreEnter); textFieldRemoveCardKeyword.setOnKeyPressed(ignoreEnter); textFieldRemoveCardIndex.setOnKeyPressed(ignoreEnter); textFieldChangeCardKeyword.setOnKeyPressed(ignoreEnter); textFieldChangeCardNewKeyword.setOnKeyPressed(ignoreEnter); textFieldChangeCardRValue.setOnKeyPressed(ignoreEnter); textFieldChangeCardIValue.setOnKeyPressed(ignoreEnter); textFieldChangeCardComment.setOnKeyPressed(ignoreEnter); textFieldChangeCardIndex.setOnKeyPressed(ignoreEnter); textFieldConcatenateKeyword.setOnKeyPressed(ignoreEnter); textFieldShiftKeyword.setOnKeyPressed(ignoreEnter); textFieldShiftYears.setOnKeyPressed(ignoreEnter); textFieldShiftMonths.setOnKeyPressed(ignoreEnter); textFieldShiftDays.setOnKeyPressed(ignoreEnter); textFieldShiftHours.setOnKeyPressed(ignoreEnter); textFieldShiftMinutes.setOnKeyPressed(ignoreEnter); textFieldShiftSeconds.setOnKeyPressed(ignoreEnter); textFieldShiftMilliseconds.setOnKeyPressed(ignoreEnter); textFieldShiftMicroseconds.setOnKeyPressed(ignoreEnter); textFieldJDKeyword.setOnKeyPressed(ignoreEnter); textFieldJDSourceKeyword.setOnKeyPressed(ignoreEnter); textFieldJDSourceDatetime.setOnKeyPressed(ignoreEnter); textFieldAddCardRValue.textProperty().addListener((observable, oldValue, newValue) -> checkBoxAddCardRValue.setSelected(true)); textFieldAddCardIValue.textProperty().addListener((observable, oldValue, newValue) -> checkBoxAddCardIValue.setSelected(true)); textFieldAddCardComment.textProperty().addListener((observable, oldValue, newValue) -> checkBoxAddCardComment.setSelected(true)); textFieldAddCardIndex.textProperty().addListener((observable, oldValue, newValue) -> { checkBoxAddCardPosition.setSelected(true); toggleAddCardIndex.setSelected(true); }); textFieldAddCardAfterKeyword.textProperty().addListener((observable, oldValue, newValue) -> { checkBoxAddCardPosition.setSelected(true); toggleAddCardAfterCard.setSelected(true); }); textFieldRemoveCardKeyword.textProperty().addListener((observable, oldValue, newValue) -> toggleRemoveCardKeyword.setSelected(true)); textFieldRemoveCardIndex.textProperty().addListener((observable, oldValue, newValue) -> toggleRemoveCardIndex.setSelected(true)); textFieldChangeCardNewKeyword.textProperty().addListener((observable, oldValue, newValue) -> checkBoxChangeCardNewKeyword.setSelected(true)); textFieldChangeCardRValue.textProperty().addListener((observable, oldValue, newValue) -> checkBoxChangeCardRValue.setSelected(true)); textFieldChangeCardIValue.textProperty().addListener((observable, oldValue, newValue) -> checkBoxChangeCardIValue.setSelected(true)); textFieldChangeCardComment.textProperty().addListener((observable, oldValue, newValue) -> checkBoxChangeCardComment.setSelected(true)); textFieldChangeCardIndex.textProperty().addListener((observable, oldValue, newValue) -> checkBoxChangeCardIndex.setSelected(true)); textFieldJDSourceKeyword.textProperty().addListener((observable, oldValue, newValue) -> toggleJDSourceKeyword.setSelected(true)); textFieldJDSourceDatetime.textProperty().addListener((observable, oldValue, newValue) -> toggleJDSourceDatetime.setSelected(true)); } /** * Handles click on button Execute - tries to execute operations, if there are some problems, shows them */ @FXML private void handleClickButtonExecute() { executeOperations(false); } /** * Handles click on button Execute and quit - tries to execute operations, if there are some problems, shows them, in case of no problems quits */ @FXML private void handleClickButtonExecuteQuit() { executeOperations(true); } /** * Handles click on button Deselect all - deselects all files */ @FXML private void handleClickButtonDeselectAll() { listViewFiles.getSelectionModel().clearSelection(); listViewFiles.requestFocus(); } /** * Handles click on button Select all - selects all files */ @FXML private void handleClickButtonSelectAll() { listViewFiles.getSelectionModel().selectAll(); listViewFiles.requestFocus(); } /** * Handles click on button Invert selection - inverts selection */ @FXML private void handleClickButtonInvertSelection() { for (int i = 0; i < listViewFiles.getItems().size(); i++) { if (listViewFiles.getSelectionModel().isSelected(i)) { listViewFiles.getSelectionModel().clearSelection(i); } else { listViewFiles.getSelectionModel().select(i); } } listViewFiles.requestFocus(); } /** * Handles click on add card - add cards with given values */ @FXML private void handleClickButtonAddCard() { String keyword = textFieldAddCardKeyword.getText(); String rValue; if (checkBoxAddCardRValue.isSelected()) { rValue = textFieldAddCardRValue.getText(); } else { rValue = null; } String iValue; if (checkBoxAddCardIValue.isSelected()) { iValue = textFieldAddCardIValue.getText(); } else { iValue = null; } String comment; if (checkBoxAddCardComment.isSelected()) { comment = textFieldAddCardComment.getText(); } else { comment = null; } Integer index = null; String afterKeyword = null; if (checkBoxAddCardPosition.isSelected()) { if (toggleGroupAddCardPosition.getSelectedToggle() == toggleAddCardIndex) { index = parseInt(textFieldAddCardIndex.getText(), "Index"); if (index == null) { return; } } else { afterKeyword = textFieldAddCardAfterKeyword.getText(); } } boolean update = checkBoxAddCardUpdate.isSelected(); printOperationAddQueueStartInfo("Preparing operation of adding new card:"); operationsRunner.addCard(listViewFiles.getSelectionModel().getSelectedItems(), keyword, rValue, iValue, comment, index, afterKeyword, update); printOperationAddQueueEndInfo(); } /** * Handles click on remove card - remove cards with given values */ @FXML private void handleClickButtonRemoveCard() { String keyword; Integer index; if (toggleGroupRemoveCard.getSelectedToggle() == toggleRemoveCardKeyword) { keyword = textFieldRemoveCardKeyword.getText(); index = null; } else { keyword = null; index = parseInt(textFieldRemoveCardIndex.getText(), "Index"); if (index == null) { return; } } printOperationAddQueueStartInfo("Preparing operation of deleting card:"); operationsRunner.removeCard(listViewFiles.getSelectionModel().getSelectedItems(), keyword, index); printOperationAddQueueEndInfo(); } /** * Handles click on change card - change cards with given values */ @FXML private void handleClickButtonChangeCard() { String keyword = textFieldChangeCardKeyword.getText(); String newKeyword; if (checkBoxChangeCardNewKeyword.isSelected()) { newKeyword = textFieldChangeCardNewKeyword.getText(); } else { newKeyword = null; } String rValue; if (checkBoxChangeCardRValue.isSelected()) { rValue = textFieldChangeCardRValue.getText(); } else { rValue = null; } String iValue; if (checkBoxChangeCardIValue.isSelected()) { iValue = textFieldChangeCardIValue.getText(); } else { iValue = null; } String comment; if (checkBoxChangeCardComment.isSelected()) { comment = textFieldChangeCardComment.getText(); } else { comment = null; } Integer index = null; if (checkBoxChangeCardIndex.isSelected()) { index = parseInt(textFieldChangeCardIndex.getText(), "Index"); if (index == null) { return; } } boolean delete = checkBoxChangeCardDelete.isSelected(); printOperationAddQueueStartInfo("Preparing operation of changing new card:"); if (checkBoxChangeCardIndex.isSelected()) { operationsRunner.changeIndexCard(listViewFiles.getSelectionModel().getSelectedItems(), keyword, index); } if (checkBoxChangeCardNewKeyword.isSelected() || checkBoxChangeCardRValue.isSelected() || checkBoxChangeCardIValue.isSelected() || checkBoxChangeCardComment.isSelected()) { operationsRunner.changeCard(listViewFiles.getSelectionModel().getSelectedItems(), keyword, newKeyword, rValue, iValue, comment, delete); } printOperationAddQueueEndInfo(); } /** * Handles click on concatenate - concatenate values */ @FXML private void handleClickButtonConcatenate() { String keyword = textFieldConcatenateKeyword.getText(); List<String> values = new ArrayList<>(listViewConcatenationValues.getItems().size()); values.addAll(listViewConcatenationValues.getItems().stream().map(ConcatenateValue::toExport).collect(Collectors.toList())); boolean update = checkBoxConcatenateUpdate.isSelected(); if (values.isEmpty()) { GUIHelpers.showAlert(AlertType.ERROR, "Concatenating", "At least one concatenation value must be inserted.", ""); return; } printOperationAddQueueStartInfo("Preparing operation of concatenating values:"); operationsRunner.concatenate(listViewFiles.getSelectionModel().getSelectedItems(), keyword, values, "", update); printOperationAddQueueEndInfo(); } /** * Handles click on button add keyword values - add keyword value to concatenation */ @FXML private void handleClickButtonConcatenateAddKeywordValue() { ConcatenateValue value = new ConcatenateValue(ConcatenateType.KEYWORD_VALUE, textFieldConcatenateKeywordValue.getText()); listViewConcatenationValues.getItems().add(value); } /** * Handles click on button add string values - add string value to concatenation */ @FXML private void handleClickButtonConcatenateAddString() { FitsCard card = new FitsCard(); card.setKeyword("CONCAT"); card.setRValue(textFieldConcatenateString.getText()); if (!card.validate().isEmpty()) { GUIHelpers.showAlert(AlertType.ERROR, "Concatenating", "Value " + textFieldConcatenateString.getText() + " contains characters forbidden by FITS standard.", ""); return; } ConcatenateValue value = new ConcatenateValue(ConcatenateType.STRING, textFieldConcatenateString.getText()); listViewConcatenationValues.getItems().add(value); } /** * Handles click on button move down - moves down concatenated values */ @FXML private void handleClickButtonConcatenateMoveDown() { moveSelectedConcatenationValue(1); listViewConcatenationValues.getSelectionModel().selectNext(); } /** * Handles click on button move up - moves up concatenated values */ @FXML private void handleClickButtonConcatenateMoveUp() { moveSelectedConcatenationValue(-1); listViewConcatenationValues.getSelectionModel().selectPrevious(); } /** * Moves selected value in list view with given offset * * @param offset how many lines values should be moved */ private void moveSelectedConcatenationValue(int offset) { int selectedIndex = listViewConcatenationValues.getSelectionModel().getSelectedIndex(); ConcatenateValue movedValue = listViewConcatenationValues.getItems().set(selectedIndex + offset, listViewConcatenationValues.getSelectionModel().getSelectedItem()); listViewConcatenationValues.getItems().set(selectedIndex, movedValue); } /** * Handles click on button remove - removes concatenated value */ @FXML private void handleClickButtonConcatenateRemove() { listViewConcatenationValues.getItems().remove(listViewConcatenationValues.getSelectionModel().getSelectedIndex()); } /** * Handles click on add JD - add cards with julian day */ @FXML private void handleClickButtonShift() { String keyword = textFieldShiftKeyword.getText(); List<String> intervals = new ArrayList<>(); Integer interval; String value = textFieldShiftYears.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Years"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_YEAR + interval); } } value = textFieldShiftMonths.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Months"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_MONTH + interval); } } value = textFieldShiftDays.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Days"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_DAY + interval); } } value = textFieldShiftHours.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Hours"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_HOUR + interval); } } value = textFieldShiftMinutes.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Minutes"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_MINUTE + interval); } } value = textFieldShiftSeconds.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Seconds"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_SECOND + interval); } } value = textFieldShiftMilliseconds.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Milliseconds"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_MILLISECOND + interval); } } value = textFieldShiftMicroseconds.getText().trim(); if (!value.isEmpty()) { interval = parseInt(value, "Microseconds"); if (interval == null) { return; } else { intervals.add(OperationShift.PREFIX_MICROSECOND + interval); } } if (intervals.isEmpty()) { GUIHelpers.showAlert(AlertType.ERROR, "Shifting", "At least one time parameter must be specified", ""); return; } printOperationAddQueueStartInfo("Preparing operation of shifting values:"); operationsRunner.shift(listViewFiles.getSelectionModel().getSelectedItems(), keyword, intervals); printOperationAddQueueEndInfo(); } /** * Handles click on add JD - add cards with julian day */ @FXML private void handleClickButtonAddJD() { String keyword = textFieldJDKeyword.getText(); String sourceKeyword; String dateTime; if (toggleGroupJD.getSelectedToggle() == toggleJDSourceKeyword) { sourceKeyword = textFieldJDSourceKeyword.getText(); dateTime = null; } else { sourceKeyword = null; dateTime = textFieldJDSourceDatetime.getText(); try { FitsCardDateValue.createFromDateString(dateTime); } catch (FitsCardDateValueUnknownFormatException exc) { GUIHelpers.showAlert(AlertType.ERROR, "Invalid input", "Date time has unknown date time format", ""); return; } } boolean update = checkBoxJDUpdate.isSelected(); printOperationAddQueueStartInfo("Preparing operation of adding card with julian day:"); operationsRunner.jd(listViewFiles.getSelectionModel().getSelectedItems(), keyword, sourceKeyword, dateTime, update); printOperationAddQueueEndInfo(); } /** * Handles click on button move down - moves down operation */ @FXML private void handleClickButtonOperationQueueMoveDown() { moveSelectedOperation(1); } /** * Handles click on button move up - moves up operation */ @FXML private void handleClickButtonOperationQueueMoveUp() { moveSelectedOperation(-1); } /** * Moves selected operation in list view with given offset * * @param offset how many lines operation should be moved */ private void moveSelectedOperation(int offset) { int selectedIndex = listViewOperationQueue.getSelectionModel().getSelectedIndex(); operationsRunner.swapOperations(listViewOperationQueue.getItems().get(selectedIndex), listViewOperationQueue.getItems().get(selectedIndex + offset)); listViewOperationQueue.setItems(FXCollections.observableList(operationsRunner.getOperations())); } /** * Handles click on button remove - removes operation */ @FXML private void handleClickButtonOperationQueueRemove() { Operation operation = listViewOperationQueue.getSelectionModel().getSelectedItem(); operationsRunner.removeOperation(operation); listViewOperationQueue.setItems(FXCollections.observableList(operationsRunner.getOperations())); } /** * Handles click on button show - show files affected by selected operation */ @FXML private void handleClickButtonOperationQueueShowFiles() { StringJoiner joiner = new StringJoiner(", "); List<String> files = operationsRunner.getFilesForOperation(listViewOperationQueue.getSelectionModel().getSelectedItem()); for (String file : files) { joiner.add(new File(file).getName()); } GUIHelpers.showAlert(AlertType.INFORMATION, "Affected files by operation", "These files will be affected by operation", joiner.toString()); } /** * Executes operations in queue * * @param quitAfter if after executing app should quit (in case of no problems during executing operations) */ private void executeOperations(boolean quitAfter) { printOK(); printOK("Executing operations on files."); Task<Boolean> task = new TaskOperationRunner<Boolean>() { @Override protected Boolean call() throws Exception { allOK = true; operationsRunner.executeOperations(this::updateProgress, this::updateTitle, this::isCancelled); return allOK; } }; Stage progressInfo = GUIHelpers.createProgressInfo("Executing operations. Please wait.", task); task.setOnSucceeded(event -> { progressInfo.close(); listViewOperationQueue.setItems(null); refreshLogs(); if (task.getValue()) { GUIHelpers.showAlert(AlertType.INFORMATION, "Executing operations", "Operations on all files were executed successfully.", null); if (quitAfter) { quit(); } } else { GUIHelpers.showAlert(AlertType.ERROR, "Executing operations", "Problems with executing operations.", "Details are in log."); } }); EventHandler<WorkerStateEvent> failAction = event -> { progressInfo.close(); listViewOperationQueue.setItems(FXCollections.observableList(operationsRunner.getOperations())); refreshLogs(); GUIHelpers.showAlert(AlertType.ERROR, "Executing operations", "Problems with executing operations.", "Not all operations were executed. You can rerun operations."); }; task.setOnFailed(failAction); task.setOnCancelled(failAction); progressInfo.show(); Thread thread = new Thread(task); thread.start(); } /** * Refreshes log viewer */ private void refreshLogs() { try { byte[] bytes = new byte[(int) MAX_DISPLAYED_CHARS]; logFile.seek(Math.max(logFile.length() - MAX_DISPLAYED_CHARS, 0l)); int readChars = logFile.read(bytes); if (readChars != -1) { textAreaLog.setText((new String(bytes)).trim()); } else { textAreaLog.setText("(Empty)"); } textAreaLog.end(); } catch (IOException exc) { textAreaLog.setText(LOGGER_FAILED_TEXT); } } /** * Closes logs */ public void closeLogs() { if (logFile != null) { try { logFile.close(); } catch (IOException ignored) { } // I want log read to close, but if not, so what i can do with that - nothing } } /** * Closes entire app */ private void quit() { closeLogs(); Platform.exit(); System.exit(0); } /** * Shows confirmation of exit (if it is necessary) and then maybe closes entire app */ @FXML private void quitWithConfirmation() { if (operationsRunner.getOperations().isEmpty()) { quit(); return; } GUIHelpers.showAlert(AlertType.CONFIRMATION, "Confirm exit", "Exiting application", "Are you sure you want to exit application? You lost all not executed operations.").ifPresent(response -> { if (response == ButtonType.OK) { quit(); } }); } /** * Prints message to output * * @param partsMessage parts of message which should be printed */ private void printOK(String... partsMessage) { StringBuilder stringBuilder = new StringBuilder(); for (String part : partsMessage) { stringBuilder.append(part); } stringBuilder.append(System.lineSeparator()); try { logFile.write(stringBuilder.toString().getBytes()); } catch (IOException ignored) // Truncate logs { } } /** * Prints error message to output * * @param partsMessage parts of message which should be printed */ private void printError(String... partsMessage) { allOK = false; StringBuilder stringBuilder = new StringBuilder("ERROR: "); for (String part : partsMessage) { stringBuilder.append(part); } stringBuilder.append(System.lineSeparator()); try { logFile.write(stringBuilder.toString().getBytes()); } catch (IOException ignored) // Truncate logs { } } /** * Try parse number from textField * * @param number number to parse * @param textFieldName name of textfield used for error alert * @return parsed number or null in case of parsing failure */ private Integer parseInt(String number, String textFieldName) { Integer result = null; try { result = Integer.parseInt(number); } catch (NumberFormatException exc) { GUIHelpers.showAlert(AlertType.ERROR, "Invalid input", textFieldName + " is not valid integer", ""); } return result; } /** * Prints info about start of adding operation into queue * * @param text info */ private void printOperationAddQueueStartInfo(String text) { allOK = true; printOK(text); } /** * Prints operation info about end of adding operation into queue - show success and refreshes logs */ private void printOperationAddQueueEndInfo() { printOK(); refreshLogs(); listViewOperationQueue.setItems(FXCollections.observableList(operationsRunner.getOperations())); if (allOK) { GUIHelpers.showAlert(AlertType.INFORMATION, "Operation summary", "Adding operation into queue was completed without errors.", ""); } else { GUIHelpers.showAlert(AlertType.ERROR, "Operation summary", "Some errors occur during adding last operation into queue. Check logs.", ""); } } /** * Switches mode to single operation */ private void switchMode() { if (listViewFiles.getItems().size() > 50) { GUIHelpers.showAlert(AlertType.INFORMATION, "Switch to single operation mode", "Switching cannot be done", "Single operation mode is not for too much open files, it was intended only for few files, where you want exact editing and WYSIWYG editor."); return; } if (operationsRunner.getOperations().isEmpty()) { openEditFrom(); return; } GUIHelpers.showAlert(AlertType.CONFIRMATION, "Confirm switching mode", "Switching modes", "Are you sure you want to switch modes? You lost all not executed operations.").ifPresent(response -> { if (response == ButtonType.OK) { openEditFrom(); } }); } /** * Opens single edit form */ private void openEditFrom() { EditController editController = (EditController) mainViewController.setContent("fxml/Edit.fxml"); closeLogs(); editController.prepareWindow(listViewFiles.getItems(), mainViewController); } }
package tutorial.tree_icon_demo; /* * Copyright (c) 1995, 2008, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle or the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * A 1.4 application that requires the following additional files: * TreeDemoHelp.html * arnold.html * bloch.html * chan.html * jls.html * swingtutorial.html * tutorial.html * tutorialcont.html * vm.html */ import javax.swing.JEditorPane; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTree; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeSelectionModel; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultTreeCellRenderer; import javax.swing.ImageIcon; import java.net.URL; import java.io.IOException; import java.awt.Dimension; import java.awt.GridLayout; public class TreeIconDemo extends JPanel implements TreeSelectionListener { private JEditorPane htmlPane; private JTree tree; private URL helpURL; private static boolean DEBUG = false; public TreeIconDemo() { super(new GridLayout(1,0)); //Create the nodes. DefaultMutableTreeNode top = new DefaultMutableTreeNode("The Java Series"); createNodes(top); //Create a tree that allows one selection at a time. tree = new JTree(top); tree.getSelectionModel().setSelectionMode (TreeSelectionModel.SINGLE_TREE_SELECTION); //Set the icon for leaf nodes. ImageIcon leafIcon = createImageIcon("images/middle.gif"); if (leafIcon != null) { DefaultTreeCellRenderer renderer = new DefaultTreeCellRenderer(); renderer.setLeafIcon(leafIcon); tree.setCellRenderer(renderer); } else { System.err.println("Leaf icon missing; using default."); } //Listen for when the selection changes. tree.addTreeSelectionListener(this); //Create the scroll pane and add the tree to it. JScrollPane treeView = new JScrollPane(tree); //Create the HTML viewing pane. htmlPane = new JEditorPane(); htmlPane.setEditable(false); initHelp(); JScrollPane htmlView = new JScrollPane(htmlPane); //Add the scroll panes to a split pane. JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT); splitPane.setTopComponent(treeView); splitPane.setBottomComponent(htmlView); Dimension minimumSize = new Dimension(100, 50); htmlView.setMinimumSize(minimumSize); treeView.setMinimumSize(minimumSize); splitPane.setDividerLocation(100); //XXX: ignored in some releases //of Swing. bug 4101306 //workaround for bug 4101306: //treeView.setPreferredSize(new Dimension(100, 100)); splitPane.setPreferredSize(new Dimension(500, 300)); //Add the split pane to this panel. add(splitPane); } /** Required by TreeSelectionListener interface. */ public void valueChanged(TreeSelectionEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree.getLastSelectedPathComponent(); if (node == null) return; Object nodeInfo = node.getUserObject(); if (node.isLeaf()) { BookInfo book = (BookInfo)nodeInfo; displayURL(book.bookURL); if (DEBUG) { System.out.print(book.bookURL + ": \n "); } } else { displayURL(helpURL); } if (DEBUG) { System.out.println(nodeInfo.toString()); } } private class BookInfo { public String bookName; public URL bookURL; public BookInfo(String book, String filename) { bookName = book; bookURL = TreeIconDemo.class.getResource(filename); if (bookURL == null) { System.err.println("Couldn't find file: " + filename); } } public String toString() { return bookName; } } private void initHelp() { String s = "TreeDemoHelp.html"; helpURL = TreeIconDemo.class.getResource(s); if (helpURL == null) { System.err.println("Couldn't open help file: " + s); } else if (DEBUG) { System.out.println("Help URL is " + helpURL); } displayURL(helpURL); } private void displayURL(URL url) { try { if (url != null) { htmlPane.setPage(url); } else { //null url htmlPane.setText("File Not Found"); if (DEBUG) { System.out.println("Attempted to display a null URL."); } } } catch (IOException e) { System.err.println("Attempted to read a bad URL: " + url); } } private void createNodes(DefaultMutableTreeNode top) { DefaultMutableTreeNode category = null; DefaultMutableTreeNode book = null; category = new DefaultMutableTreeNode("Books for Java Programmers"); top.add(category); //original Tutorial book = new DefaultMutableTreeNode(new BookInfo ("The Java Tutorial: A Short Course on the Basics", "tutorial.html")); category.add(book); //Tutorial Continued book = new DefaultMutableTreeNode(new BookInfo ("The Java Tutorial Continued: The Rest of the JDK", "tutorialcont.html")); category.add(book); //JFC Swing Tutorial book = new DefaultMutableTreeNode(new BookInfo ("The JFC Swing Tutorial: A Guide to Constructing GUIs", "swingtutorial.html")); category.add(book); //Bloch book = new DefaultMutableTreeNode(new BookInfo ("Effective Java Programming Language Guide", "bloch.html")); category.add(book); //Arnold/Gosling book = new DefaultMutableTreeNode(new BookInfo ("The Java Programming Language", "arnold.html")); category.add(book); //Chan book = new DefaultMutableTreeNode(new BookInfo ("The Java Developers Almanac", "chan.html")); category.add(book); category = new DefaultMutableTreeNode("Books for Java Implementers"); top.add(category); //VM book = new DefaultMutableTreeNode(new BookInfo ("The Java Virtual Machine Specification", "vm.html")); category.add(book); //Language Spec book = new DefaultMutableTreeNode(new BookInfo ("The Java Language Specification", "jls.html")); category.add(book); } /** Returns an ImageIcon, or null if the path was invalid. */ protected static ImageIcon createImageIcon(String path) { java.net.URL imgURL = TreeIconDemo.class.getResource(path); if (imgURL != null) { return new ImageIcon(imgURL); } else { System.err.println("Couldn't find file: " + path); return null; } } /** * Create the GUI and show it. For thread safety, * this method should be invoked from the * event-dispatching thread. */ private static void createAndShowGUI() { //Create and set up the window. JFrame frame = new JFrame("TreeIconDemo"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); //Create and set up the content pane. TreeIconDemo newContentPane = new TreeIconDemo(); newContentPane.setOpaque(true); //content panes must be opaque frame.setContentPane(newContentPane); //Display the window. frame.pack(); frame.setVisible(true); } public static void main() { //Schedule a job for the event-dispatching thread: //creating and showing this application's GUI. javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { createAndShowGUI(); } }); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.directory.model; /** * Public API: Resources.buildings * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Admin SDK API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class BuildingAddress extends com.google.api.client.json.GenericJson { /** * Unstructured address lines describing the lower levels of an address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> addressLines; /** * Optional. Highest administrative subdivision which is used for postal addresses of a country or * region. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String administrativeArea; /** * Optional. BCP-47 language code of the contents of this address (if known). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String languageCode; /** * Optional. Generally refers to the city/town portion of the address. Examples: US city, IT * comune, UK post town. In regions of the world where localities are not well defined or do not * fit into this structure well, leave locality empty and use addressLines. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String locality; /** * Optional. Postal code of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String postalCode; /** * Required. CLDR region code of the country/region of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String regionCode; /** * Optional. Sublocality of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sublocality; /** * Unstructured address lines describing the lower levels of an address. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAddressLines() { return addressLines; } /** * Unstructured address lines describing the lower levels of an address. * @param addressLines addressLines or {@code null} for none */ public BuildingAddress setAddressLines(java.util.List<java.lang.String> addressLines) { this.addressLines = addressLines; return this; } /** * Optional. Highest administrative subdivision which is used for postal addresses of a country or * region. * @return value or {@code null} for none */ public java.lang.String getAdministrativeArea() { return administrativeArea; } /** * Optional. Highest administrative subdivision which is used for postal addresses of a country or * region. * @param administrativeArea administrativeArea or {@code null} for none */ public BuildingAddress setAdministrativeArea(java.lang.String administrativeArea) { this.administrativeArea = administrativeArea; return this; } /** * Optional. BCP-47 language code of the contents of this address (if known). * @return value or {@code null} for none */ public java.lang.String getLanguageCode() { return languageCode; } /** * Optional. BCP-47 language code of the contents of this address (if known). * @param languageCode languageCode or {@code null} for none */ public BuildingAddress setLanguageCode(java.lang.String languageCode) { this.languageCode = languageCode; return this; } /** * Optional. Generally refers to the city/town portion of the address. Examples: US city, IT * comune, UK post town. In regions of the world where localities are not well defined or do not * fit into this structure well, leave locality empty and use addressLines. * @return value or {@code null} for none */ public java.lang.String getLocality() { return locality; } /** * Optional. Generally refers to the city/town portion of the address. Examples: US city, IT * comune, UK post town. In regions of the world where localities are not well defined or do not * fit into this structure well, leave locality empty and use addressLines. * @param locality locality or {@code null} for none */ public BuildingAddress setLocality(java.lang.String locality) { this.locality = locality; return this; } /** * Optional. Postal code of the address. * @return value or {@code null} for none */ public java.lang.String getPostalCode() { return postalCode; } /** * Optional. Postal code of the address. * @param postalCode postalCode or {@code null} for none */ public BuildingAddress setPostalCode(java.lang.String postalCode) { this.postalCode = postalCode; return this; } /** * Required. CLDR region code of the country/region of the address. * @return value or {@code null} for none */ public java.lang.String getRegionCode() { return regionCode; } /** * Required. CLDR region code of the country/region of the address. * @param regionCode regionCode or {@code null} for none */ public BuildingAddress setRegionCode(java.lang.String regionCode) { this.regionCode = regionCode; return this; } /** * Optional. Sublocality of the address. * @return value or {@code null} for none */ public java.lang.String getSublocality() { return sublocality; } /** * Optional. Sublocality of the address. * @param sublocality sublocality or {@code null} for none */ public BuildingAddress setSublocality(java.lang.String sublocality) { this.sublocality = sublocality; return this; } @Override public BuildingAddress set(String fieldName, Object value) { return (BuildingAddress) super.set(fieldName, value); } @Override public BuildingAddress clone() { return (BuildingAddress) super.clone(); } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Scanner; import org.apache.commons.lang.RandomStringUtils; import org.apache.maven.project.MavenProject; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Status; import org.eclipse.draw2d.Figure; import org.eclipse.draw2d.GridData; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.PositionConstants; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.StackLayout; import org.eclipse.draw2d.ToolbarLayout; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.transaction.TransactionalEditingDomain; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.Tool; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.PaletteDrawer; import org.eclipse.gef.palette.ToolEntry; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy; import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator; import org.eclipse.gmf.runtime.diagram.ui.internal.services.palette.PaletteToolEntry; import org.eclipse.gmf.runtime.diagram.ui.tools.UnspecifiedTypeCreationTool; import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.eclipse.gmf.runtime.emf.type.core.commands.SetValueCommand; import org.eclipse.gmf.runtime.emf.type.core.requests.SetRequest; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.Node; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.ErrorDialog; import org.eclipse.jface.dialogs.IInputValidator; import org.eclipse.jface.dialogs.InputDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.IEditorDescriptor; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IEditorReference; import org.eclipse.ui.IFileEditorInput; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.ide.IDE; import org.eclipse.ui.part.FileEditorInput; import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBArtifact; import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBProjectArtifact; import org.wso2.developerstudio.eclipse.esb.project.utils.ESBProjectUtils; import org.wso2.developerstudio.eclipse.gmf.esb.EsbDiagram; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.KeyType; import org.wso2.developerstudio.eclipse.gmf.esb.ProxyService; import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.gmf.esb.Sequence; import org.wso2.developerstudio.eclipse.gmf.esb.Sequences; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShape; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedSizedAbstractMediator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.OpenSeparatelyEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.SequenceStorage; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ToolPalleteDetails; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.deserializer.AbstractEsbNodeDeserializer; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.deserializer.Deserializer; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.deserializer.EsbDeserializerRegistry; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.editpolicy.FeedbackIndicateDragDropEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.OpenEditorUtils; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.SequenceCanonicalEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.SequenceItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramEditor; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramEditorUtil; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbMultiPageEditor; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbPaletteFactory; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.Messages; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog; import org.wso2.developerstudio.eclipse.logging.core.Logger; import org.wso2.developerstudio.eclipse.maven.util.MavenUtils; import org.wso2.developerstudio.eclipse.platform.ui.editor.Openable; import org.wso2.developerstudio.eclipse.platform.ui.startup.ESBGraphicalEditor; import org.wso2.developerstudio.eclipse.utils.file.FileUtils; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.edit.command.SetCommand; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.URI; import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EditorUtils.*; import static org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage.Literals.*; /** * @generated NOT */ public class SequenceEditPart extends FixedSizedAbstractMediator { private static IDeveloperStudioLog log = Logger .getLog("org.wso2.developerstudio.eclipse.gmf.esb.diagram"); private EsbDiagramEditor mainDiagramEditorRef; /** * @generated */ public static final int VISUAL_ID = 3503; /** * @generated */ protected IFigure contentPane; public static SequenceStorage sequenceStorage = new SequenceStorage(); private static Integer sequenceCount = 0; private SequenceEditPart currentSequence; private String value; public static ArrayList<String> definedSequenceNames = new ArrayList<String>(); private float inputCount = 0, outputCount = 0; private Button recieveCheckBox; //private boolean checked; private SequenceEditPart sequenceEditPart; private SequencesInfo info; /** * @generated NOT */ public SequenceEditPart(View view) { super(view); sequenceEditPart = this; info = new SequencesInfo(); } /** * @generated NOT */ protected void createDefaultEditPolicies() { installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicy()); super.createDefaultEditPolicies(); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new SequenceItemSemanticEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new DragDropEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new FeedbackIndicateDragDropEditPolicy()); installEditPolicy(EditPolicyRoles.CANONICAL_ROLE, new SequenceCanonicalEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // For handle Double click Event. installEditPolicy(EditPolicyRoles.OPEN_ROLE, new OpenSeparatelyEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that // would let children add reasonable editpolicies // removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { View childView = (View) child.getModel(); switch (EsbVisualIDRegistry.getVisualID(childView)) { case SequenceInputConnectorEditPart.VISUAL_ID: case SequenceOutputConnectorEditPart.VISUAL_ID: return new BorderItemSelectionEditPolicy(); } EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } public void notifyChanged(Notification notification) { super.notifyChanged(notification); Object notifier = ((ENotificationImpl) notification).getNotifier(); if (notifier instanceof Sequence) { if (notification.getFeature() instanceof EReference) { if ("staticReferenceKey".equals(((EReference) notification.getFeature()).getName())) { String keyValue = ((RegistryKeyProperty) notification.getNewValue()) .getKeyValue(); setValue((Sequence) notifier, SEQUENCE__NAME, keyValue); } } else if (notification.getFeature() instanceof EAttribute) { if ("name".equals(((EAttribute) notification.getFeature()).getName())) { String name = (String) notification.getNewValue(); if ("{XPath}".equals(name)) { setValue((Sequence) notifier, SEQUENCE__REFERRING_SEQUENCE_TYPE, KeyType.DYNAMIC); } else { setValue((Sequence) notifier, SEQUENCE__REFERRING_SEQUENCE_TYPE, KeyType.STATIC); RegistryKeyProperty registryKeyProperty = ((Sequence) notifier) .getStaticReferenceKey(); setValue(registryKeyProperty, REGISTRY_KEY_PROPERTY__KEY_VALUE, name); } } else if ("referringSequenceType".equals(((EAttribute) notification.getFeature()) .getName())) { KeyType type = (KeyType) notification.getNewValue(); if (KeyType.DYNAMIC == type) { setValue((Sequence) notifier, SEQUENCE__NAME, "{XPath}"); } else { if ("{XPath}".equals(((Sequence) notifier).getName())) { setValue((Sequence) notifier, SEQUENCE__NAME, ""); } } } } } } private void setValue(EObject owner, EStructuralFeature feature, Object value) { SetCommand setCommand = new SetCommand(getEditingDomain(), owner, feature, value); if (setCommand.canExecute()) { getEditingDomain().getCommandStack().execute(setCommand); } } /** * @generated NOT */ protected IFigure createNodeShape() { return primaryShape = new SequenceFigure() { public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) { super.setBounds(rect); if (this.getBounds().getLocation().x != 0 && this.getBounds().getLocation().y != 0) { connectToMostSuitableElement(); reAllocate(rect); } }; }; } /** * @generated */ public SequenceFigure getPrimaryShape() { return (SequenceFigure) primaryShape; } /** * @generated NOT */ protected boolean addFixedChild(EditPart childEditPart) { if (childEditPart instanceof SequenceNameEditPart) { ((SequenceNameEditPart) childEditPart).setLabel(getPrimaryShape() .getSequenceLabelFigure()); return true; } if (childEditPart instanceof SequenceInputConnectorEditPart) { IFigure borderItemFigure = ((SequenceInputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } if (childEditPart instanceof SequenceOutputConnectorEditPart) { IFigure borderItemFigure = ((SequenceOutputConnectorEditPart) childEditPart) .getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.EAST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } return false; } /*public IFigure getFigure() { IFigure figure = super.getFigure(); figure.setMaximumSize(new Dimension(75, 75)); return figure; }*/ /*protected NodeFigure createNodePlate() { DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(75, 75); result.setMinimumSize(new Dimension(75, 75)); return result; }*/ /** * @generated */ protected boolean removeFixedChild(EditPart childEditPart) { if (childEditPart instanceof SequenceNameEditPart) { return true; } if (childEditPart instanceof SequenceInputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((SequenceInputConnectorEditPart) childEditPart).getFigure()); return true; } if (childEditPart instanceof SequenceOutputConnectorEditPart) { getBorderedFigure().getBorderItemContainer().remove( ((SequenceOutputConnectorEditPart) childEditPart).getFigure()); return true; } return false; } /** * @generated */ protected void addChildVisual(EditPart childEditPart, int index) { if (addFixedChild(childEditPart)) { return; } super.addChildVisual(childEditPart, -1); } /** * @generated */ protected void removeChildVisual(EditPart childEditPart) { if (removeFixedChild(childEditPart)) { return; } super.removeChildVisual(childEditPart); } /** * @generated */ protected IFigure getContentPaneFor(IGraphicalEditPart editPart) { if (editPart instanceof IBorderItemEditPart) { return getBorderedFigure().getBorderItemContainer(); } return getContentPane(); } /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model so * you may safely remove <i>generated</i> tag and modify it. * * @generated NOT */ protected NodeFigure createMainFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new ToolbarLayout(true)); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; } /** * Default implementation treats passed figure as content pane. Respects * layout one may have set for generated figure. * * @param nodeShape * instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { if (nodeShape.getLayoutManager() == null) { ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout(); layout.setSpacing(5); nodeShape.setLayoutManager(layout); } return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } /** * {@inheritDoc} */ protected void addBorderItem(IFigure borderItemContainer, IBorderItemEditPart borderItemEditPart) { IFigure borderItemFigure = borderItemEditPart.getFigure(); if (borderItemEditPart instanceof SequenceInputConnectorEditPart) { borderItemContainer.add(borderItemFigure, new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, 0.5)); } else if (borderItemEditPart instanceof SequenceOutputConnectorEditPart) { borderItemContainer.add(borderItemFigure, new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.EAST, 0.5)); } else { super.addBorderItem(borderItemContainer, borderItemEditPart); } } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated */ public EditPart getPrimaryChildEditPart() { return getChildBySemanticHint(EsbVisualIDRegistry.getType(SequenceNameEditPart.VISUAL_ID)); } public IProject getActiveProject() { IEditorPart editorPart = null; IProject activeProject = null; IEditorReference editorReferences[] = PlatformUI.getWorkbench().getActiveWorkbenchWindow() .getActivePage().getEditorReferences(); for (int i = 0; i < editorReferences.length; i++) { IEditorPart editor = editorReferences[i].getEditor(false); if (editor != null) { editorPart = editor.getSite().getWorkbenchWindow().getActivePage() .getActiveEditor(); } if (editorPart != null) { IFileEditorInput input = (IFileEditorInput) editorPart.getEditorInput(); IFile file = input.getFile(); activeProject = file.getProject(); } } return activeProject; } private String getMavenGroupID(IProject project) { String groupID = "com.example"; try { MavenProject mavenProject = MavenUtils.getMavenProject(project.getFile("pom.xml") .getLocation().toFile()); groupID = mavenProject.getGroupId(); } catch (Exception e) { //ignore. Then group id would be default. } return groupID; } private ESBArtifact createArtifact(String name, String groupId, String version, String path, String type) { ESBArtifact artifact = new ESBArtifact(); artifact.setName(name); artifact.setVersion(version); artifact.setType(type); artifact.setServerRole("EnterpriseServiceBus"); artifact.setGroupId(groupId); artifact.setFile(path); return artifact; } public void openWithSeparateEditor() { IProject activeProject = getActiveProject(); String name = ((Sequence) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel()) .getElement()).getName(); //if (sequenceStorage.sequences.get(name) == null) { if (!name.equals("")) { /* * Tool group creations in the Tool pallete. */ /* if ((getEditDomain().getPaletteViewer().getPaletteRoot().getChildren().size() - 2) != ToolPalleteDetails.SEQUENCE) { getEditDomain().getPaletteViewer().getPaletteRoot().add(createSequenceGroup()); } if (!definedSequenceNames.contains(name)) { ((PaletteContainer) getEditDomain().getPaletteViewer().getPaletteRoot() .getChildren().get(ToolPalleteDetails.SEQUENCE)) .add(createSequence4CreationTool(name)); definedSequenceNames.add(name); }*/ } /* * File creations. */ createFiles(name, "sequence_" + name + ".esb_diagram", "sequence_" + name + ".esb", activeProject); EditorUtils.updateToolpalette(); } public boolean createFiles(String name, String fileURI1, String fileURI2, IProject currentProject) { Resource diagram; String basePath = "platform:/resource/" + currentProject.getName() + "/" + SEQUENCE_RESOURCE_DIR + "/"; IFile file = currentProject.getFile(SEQUENCE_RESOURCE_DIR + "/" + fileURI1); if (((Sequence) ((Node) sequenceEditPart.getModel()).getElement()).isReceiveSequence()) { info.setRecieveSequence(true); info.setAssociatedProxy(((ProxyService) ((Node) EditorUtils.getProxy( sequenceEditPart.getParent()).getModel()).getElement()).getName()); } if (!file.exists()) { IFile fileTobeOpened = currentProject.getFile(SYNAPSE_CONFIG_DIR + "/sequences/" + name + ".xml"); try { diagram = EsbDiagramEditorUtil.createDiagram(URI.createURI(basePath + fileURI1), URI.createURI(basePath + fileURI2), new NullProgressMonitor(), "sequence", name, info); if (fileTobeOpened.exists()) { String diagramPath = diagram.getURI().toPlatformString(true); OpenEditorUtils oeUtils = new OpenEditorUtils(); oeUtils.openSeparateEditor(fileTobeOpened, diagramPath); } else { addSequenceToArtifactXML(name); EsbDiagramEditorUtil.openDiagram(diagram); } } catch (Exception e) { log.error("Cannot open file " + fileTobeOpened, e); return false; } return true; } else { IWorkbenchPage page = PlatformUI.getWorkbench().getActiveWorkbenchWindow() .getActivePage(); IEditorDescriptor desc = PlatformUI.getWorkbench().getEditorRegistry() .getDefaultEditor(file.getName()); try { page.openEditor(new FileEditorInput(file), desc.getId()); } catch (PartInitException e) { log.error("Cannot init editor", e); } return true; } } private PaletteContainer createSequenceGroup() { PaletteDrawer paletteContainer = new PaletteDrawer("Sequences"); paletteContainer.setId("Sequences"); //$NON-NLS-1$ return paletteContainer; } private ToolEntry createSequence4CreationTool(String name) { ArrayList<IElementType> types = new ArrayList<IElementType>(2); types.add(EsbElementTypes.Sequence_3503); /*types.add(EsbElementTypes.Sequence_3187); types.add(EsbElementTypes.Sequence_3254); types.add(EsbElementTypes.Sequence_3375);*/ NodeToolEntry entry = new NodeToolEntry(name, Messages.Sequence4CreationTool_desc, types); entry.setId("createSequence4CreationTool"); //$NON-NLS-1$ entry.setSmallIcon(EsbElementTypes.getImageDescriptor(EsbElementTypes.Sequence_3503)); entry.setLargeIcon(entry.getSmallIcon()); return entry; } public void createDialogBox() { final EObject diagram = ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel()) .getDiagram().getChildren().get(0)).getElement().eContainer(); final EObject sequence = (Sequence) ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel()) .getElement(); // For validation: user should not enter "" value for name. if (((Sequence) sequence).getName().trim().equals("")) { IInputValidator validator = new IInputValidator() { public String isValid(String str) { if (str.trim().isEmpty()) { return "Sequence name cannot be empty"; } else if (str.indexOf(0x20) != -1) { return "Sequence name cannot contain spaces"; } return null; } }; //String defaultName = "Sequence_" + (((EsbDiagram) diagram).getTest() + 1); String defaultName = calculateDefaultName(); final InputDialog sequenceNameInput = new InputDialog(new Shell(), "Enter Sequence Name", "Sequence Name", defaultName, validator) { protected Control createDialogArea(Composite parent) { Composite composite = (Composite) super.createDialogArea(parent); /* recieveCheckBox = new Button(composite, SWT.CHECK); recieveCheckBox.setText("Use this sequence as a Recieving sequence."); recieveCheckBox.addSelectionListener(new SelectionListener() { public void widgetSelected(SelectionEvent arg0) { checked = recieveCheckBox.getSelection(); } public void widgetDefaultSelected(SelectionEvent arg0) { } });*/ return composite; } }; int open = sequenceNameInput.open(); if (open == Dialog.OK) { Display.getDefault().asyncExec(new Runnable() { public void run() { String sequenceName = sequenceNameInput.getValue(); TransactionalEditingDomain editingDomain = getEditingDomain(); /* SetRequest setRequestSequenceCount = new SetRequest(editingDomain, diagram, EsbPackage.eINSTANCE.getEsbDiagram_Test(), ((EsbDiagram) diagram) .getTest() + 1); SetValueCommand operationSequenceCount = new SetValueCommand( setRequestSequenceCount) { public boolean canUndo() { return true; } public boolean canRedo() { return true; } }; ICommandProxy commandSequenceCount = new ICommandProxy( operationSequenceCount); if (commandSequenceCount.canExecute()) { getEditDomain().getCommandStack().execute(commandSequenceCount); }*/ SetRequest setRequest = new SetRequest(editingDomain, sequence, EsbPackage.eINSTANCE.getSequence_Name(), sequenceName); SetValueCommand operation = new SetValueCommand(setRequest) { public boolean canUndo() { return true; } public boolean canRedo() { return true; } }; getEditDomain().getCommandStack().execute(new ICommandProxy(operation)); /* if (checked) { info.setRecieveSequence(true); info.setAssociatedProxy(((ProxyService) ((Node) EditorUtils.getProxy( sequenceEditPart.getParent()).getModel()).getElement()) .getName()); }*/ /* IProject activeProject = getActiveProject(); ESBProjectArtifact esbProjectArtifact = new ESBProjectArtifact(); try { esbProjectArtifact.fromFile(activeProject.getFile("artifact.xml") .getLocation().toFile()); esbProjectArtifact.addESBArtifact(createArtifact(sequenceName, getMavenGroupID(activeProject), "1.0.0", "src/main/synapse-config/sequences/" + sequenceName + ".xml", "synapse/sequence")); esbProjectArtifact.toFile(); } catch (Exception e) { log.error("Error while updating Artifact.xml"); }*/ openWithSeparateEditor(); } }); } } else { openWithSeparateEditor(); } } public String calculateDefaultName() { IProject activeProject = getActiveProject(); String finalName = "Sequence_1"; int i = 1; try { while (ESBProjectUtils.artifactExists(activeProject, finalName)) { finalName = finalName.replaceAll("\\d+$", ""); i++; finalName = finalName.concat(i + ""); } } catch (Exception e) { finalName = finalName.concat("_").concat(RandomStringUtils.randomAlphabetic(5)) .concat("_" + i); } return finalName; } private void addSequenceToArtifactXML(String sequenceName) { IProject activeProject = getActiveProject(); ESBProjectArtifact esbProjectArtifact = new ESBProjectArtifact(); try { esbProjectArtifact.fromFile(activeProject.getFile("artifact.xml").getLocation() .toFile()); esbProjectArtifact.addESBArtifact(createArtifact(sequenceName, getMavenGroupID(activeProject), "1.0.0", "src/main/synapse-config/sequences/" + sequenceName + ".xml", "synapse/sequence")); esbProjectArtifact.toFile(); } catch (Exception e) { log.error("Error while updating Artifact.xml"); } } /** * @generated NOT */ public class SequenceFigure extends EsbGraphicalShapeWithLabel { /** * @generated */ private WrappingLabel fSequenceLabelFigure; /** * @generated NOT */ public SequenceFigure() { /* * GridLayout layoutThis = new GridLayout(); layoutThis.numColumns = * 1; layoutThis.makeColumnsEqualWidth = true; * this.setLayoutManager(layoutThis); * * this.setCornerDimensions(new Dimension(getMapMode().DPtoLP(8), * getMapMode().DPtoLP(8))); */ this.setBackgroundColor(THIS_BACK); createContents(); /* * this.addMouseListener(new MouseListener() { * * public void mouseReleased(MouseEvent me) { // TODO Auto-generated * method stub System.out.println("Released"); } * * public void mousePressed(MouseEvent me) { * //openWithSeparateEditor(); * * /*final EObject diagram = * ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) * ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel()) * .getDiagram().getChildren().get(0)).getElement() .eContainer(); * String name = ((Sequence) * ((org.eclipse.gmf.runtime.notation.impl.NodeImpl) getModel()) * .getElement()).getName(); if (sequenceStorage.sequences.get(name) * == null) { * * TransactionalEditingDomain editingDomain = getEditingDomain(); * SetRequest setRequest = new SetRequest(editingDomain, diagram, * EsbPackage.eINSTANCE .getEsbDiagram_Test(), ((EsbDiagram) * diagram).getTest() + 1); SetValueCommand operation = new * SetValueCommand( setRequest) { * * public boolean canUndo() { return true; } * * public boolean canRedo() { return true; } }; * * getEditDomain().getCommandStack().execute( new * ICommandProxy(operation)); * * sequenceStorage.sequences.put(name, ((EsbDiagram) * diagram).getTest()); * * * // * Tool group creations in the Tool pallete. * * * if ((getEditDomain().getPaletteViewer() * .getPaletteRoot().getChildren().size() - 1) != * ToolPalleteDetails.SEQUENCE) { * getEditDomain().getPaletteViewer().getPaletteRoot() * .add(createSequenceGroup()); } * * ((PaletteContainer) getEditDomain().getPaletteViewer() * .getPaletteRoot().getChildren() * .get(ToolPalleteDetails.SEQUENCE)) * .add(createSequence4CreationTool(name)); * * } * * * // * File creations. * * SequenceFileCreator sequenceFileCreator = new * SequenceFileCreator(); * sequenceFileCreator.createFiles("sequence_" + name + * ".sequence_diagram", "sequence_" + name + ".sequence"); * * * /* IFile file = * ResourcesPlugin.getWorkspace().getRoot().getProject ("testing") * .getFile("sequence"+sequenceStorage.sequences. * get(currentSequence).toString()+".sequence_diagram"); * * if(!file.exists()){ * * createFiles(); try { file.create(null, IResource.NONE, null); } * catch (CoreException e1) { // TODO Auto-generated catch block * e1.printStackTrace(); } } file = * ResourcesPlugin.getWorkspace().getRoot().getProject("testing" ) * .getFile("sequence"+sequenceStorage.sequences.get( * currentSequence).toString()+".sequence_diagram"); * * IWorkbenchPage page = PlatformUI.getWorkbench() * .getActiveWorkbenchWindow().getActivePage(); //System.out. * println("xxxxxxxxxx"+sequenceStorage.sequences * .get(currentSequence * )+" "+page.getActiveEditor().getEditorInput ().getName()); * HashMap map = new HashMap(); map.put(IMarker.LINE_NUMBER, new * Integer(5)); map.put(IWorkbenchPage.EDITOR_ID_ATTR, * "org.wso2.developerstudio.eclipse.gmf.esb.sequence.diagram.part.EsbDiagramEditorID" * ); IMarker marker; try { marker = * file.createMarker(IMarker.TEXT); * * marker.setAttributes(map); // page.openEditor(marker); //2.1 API * IDE.openEditor(page, marker); // 3.0 API marker.delete(); } catch * (CoreException e) { // TODO Auto-generated catch block * e.printStackTrace(); } * * * } * * public void mouseDoubleClicked(MouseEvent me) { // TODO * Auto-generated method stub System.out.println("DoubleClicked"); * * } * * }); */ } private PaletteContainer createSequenceGroup() { PaletteDrawer paletteContainer = new PaletteDrawer("Sequences"); paletteContainer.setId("Sequences"); //$NON-NLS-1$ return paletteContainer; } private ToolEntry createSequence4CreationTool(String name) { NodeToolEntry entry = new NodeToolEntry(name, Messages.Sequence4CreationTool_desc, Collections.singletonList(EsbElementTypes.Sequence_3503)); entry.setId("createSequence4CreationTool"); //$NON-NLS-1$ entry.setSmallIcon(EsbElementTypes.getImageDescriptor(EsbElementTypes.Sequence_3503)); entry.setLargeIcon(entry.getSmallIcon()); return entry; } /** * @generated NOT */ private void createContents() { /*fSequenceLabelFigure = new WrappingLabel(); fSequenceLabelFigure.setText(""); fSequenceLabelFigure.setAlignment(PositionConstants.TOP | PositionConstants.CENTER); fSequenceLabelFigure.setFont(new Font(null, new FontData("Courier", 8, SWT.BOLD))); this.getPropertyValueRectangle1().add(fSequenceLabelFigure);*/ fSequenceLabelFigure = getPropertyNameLabel(); } /* *//** * @generated NOT */ /* * protected void fillShape(Graphics graphics) { // Backup the graphics * colors Color bgColor = graphics.getBackgroundColor(); Color fgColor = * graphics.getForegroundColor(); // Set the graphics color * graphics.setBackgroundColor(getBackgroundColor()); * graphics.setForegroundColor(ColorConstants.white); // Restore the * original colors graphics.fillGradient(getBounds(), true); * graphics.setBackgroundColor(bgColor); * graphics.setForegroundColor(fgColor); } */ /** * @generated */ public WrappingLabel getSequenceLabelFigure() { return fSequenceLabelFigure; } public String getIconPath() { return "icons/ico20/sequence-mediator.gif"; } public String getNodeName() { return "Sequence"; } } /** * @generated */ static final Color THIS_BACK = new Color(null, 255, 255, 255); /* * This will be used for arrange the connectors to the right side of the figure. */ public void moveConnectorsRightSide() { for (int i = 0; i < this.getChildren().size(); ++i) { if (this.getChildren().get(i) instanceof AbstractMediatorInputConnectorEditPart) { IFigure inputConnector = ((AbstractMediatorInputConnectorEditPart) this .getChildren().get(i)).getFigure(); NodeFigure figureInput = ((AbstractMediatorInputConnectorEditPart) this .getChildren().get(i)).getNodeFigureInput(); figureInput.removeAll(); figureInput .add(((AbstractMediatorInputConnectorEditPart) this.getChildren().get(i)) .getPrimaryShapeReverse()); BorderItemLocator inputLocator = new FixedBorderItemLocator(this.getMainFigure(), inputConnector, PositionConstants.EAST, 0.30); this.getBorderedFigure().getBorderItemContainer().remove(inputConnector); this.getBorderedFigure().getBorderItemContainer().add(inputConnector, inputLocator); } } for (int i = 0; i < this.getChildren().size(); ++i) { if (this.getChildren().get(i) instanceof AbstractMediatorOutputConnectorEditPart) { IFigure outputConnector = ((AbstractMediatorOutputConnectorEditPart) this .getChildren().get(i)).getFigure(); NodeFigure figureOutput = ((AbstractMediatorOutputConnectorEditPart) this .getChildren().get(i)).getNodeFigureOutput(); figureOutput.removeAll(); Figure emptyFigure = new Figure(); figureOutput.add(emptyFigure); /* figureOutput.add(((AbstractMediatorOutputConnectorEditPart) this.getChildren().get( i)).getPrimaryShapeForward());*/ BorderItemLocator outputLocator = new FixedBorderItemLocator(this.getMainFigure(), outputConnector, PositionConstants.EAST, 0.70); this.getBorderedFigure().getBorderItemContainer().remove(outputConnector); this.getBorderedFigure().getBorderItemContainer() .add(outputConnector, outputLocator); } } } public static class NodeToolEntry extends PaletteToolEntry { private final List<IElementType> elementTypes; private NodeToolEntry(String title, String description, List<IElementType> elementTypes) { // super(title, description, null, null); super(null, title, null); this.setDescription(description); this.elementTypes = elementTypes; } public Tool createTool() { Tool tool = new UnspecifiedTypeCreationTool(elementTypes); tool.setProperties(getToolProperties()); return tool; } } public class SequencesInfo { private String associatedProxy; private boolean recieveSequence; public void setAssociatedProxy(String associatedProxy) { this.associatedProxy = associatedProxy; } public String getAssociatedProxy() { return associatedProxy; } public void setRecieveSequence(boolean recieveSequence) { this.recieveSequence = recieveSequence; } public boolean isRecieveSequence() { return recieveSequence; } } }
package org.docksidestage.sqlite.dbflute.allcommon; import javax.sql.DataSource; import org.dbflute.bhv.core.InvokerAssistant; import org.dbflute.bhv.core.context.ResourceParameter; import org.dbflute.bhv.core.melodicsql.MelodicSqlAnalyzerFactory; import org.dbflute.bhv.core.supplement.SequenceCacheHandler; import org.dbflute.bhv.core.supplement.SequenceCacheKeyGenerator; import org.dbflute.bhv.exception.BehaviorExceptionThrower; import org.dbflute.bhv.exception.DefaultSQLExceptionHandlerFactory; import org.dbflute.bhv.exception.SQLExceptionHandlerFactory; import org.dbflute.cbean.cipher.GearedCipherManager; import org.dbflute.cbean.sqlclause.SqlClauseCreator; import org.dbflute.dbmeta.DBMetaProvider; import org.dbflute.dbway.DBDef; import org.dbflute.jdbc.DataSourceHandler; import org.dbflute.jdbc.HandlingDataSourceWrapper; import org.dbflute.jdbc.SQLExceptionDigger; import org.dbflute.jdbc.StatementConfig; import org.dbflute.jdbc.StatementFactory; import org.dbflute.optional.RelationOptionalFactory; import org.dbflute.outsidesql.OutsideSqlOption; import org.dbflute.outsidesql.factory.DefaultOutsideSqlExecutorFactory; import org.dbflute.outsidesql.factory.OutsideSqlExecutorFactory; import org.dbflute.s2dao.extension.TnBeanMetaDataFactoryExtension; import org.dbflute.s2dao.jdbc.TnResultSetHandlerFactory; import org.dbflute.s2dao.jdbc.TnResultSetHandlerFactoryImpl; import org.dbflute.s2dao.jdbc.TnStatementFactoryImpl; import org.dbflute.s2dao.metadata.TnBeanMetaDataFactory; import org.dbflute.twowaysql.factory.SqlAnalyzerFactory; /** * @author DBFlute(AutoGenerator) */ public class MaImplementedInvokerAssistant implements InvokerAssistant { // =================================================================================== // Attribute // ========= protected static final String[] DEFAULT_CLIENT_INVOKE_NAMES = new String[] { "Page", "Action", "Controller", "ControllerImpl", "Job", "Task", "Test" }; protected static final String[] DEFAULT_BYPASS_INVOKE_NAMES = new String[] { "Service", "ServiceImpl", "Facade", "FacadeImpl", "Logic", "LogicImpl" }; // =================================================================================== // Attribute // ========= // ----------------------------------------------------- // DI Component // ------------ protected DataSource _dataSource; protected MaDBFluteInitializer _introduction; // ----------------------------------------------------- // Lazy Component // -------------- protected volatile DBMetaProvider _dbmetaProvider; protected volatile SqlClauseCreator _sqlClauseCreator; protected volatile StatementFactory _statementFactory; protected volatile TnBeanMetaDataFactory _beanMetaDataFactory; protected volatile TnResultSetHandlerFactory _resultSetHandlerFactory; protected volatile RelationOptionalFactory _relationOptionalFactory; protected volatile SqlAnalyzerFactory _sqlAnalyzerFactory; protected volatile OutsideSqlExecutorFactory _outsideSqlExecutorFactory; protected volatile SQLExceptionHandlerFactory _sqlExceptionHandlerFactory; protected volatile SequenceCacheHandler _sequenceCacheHandler; // ----------------------------------------------------- // Disposable Flag // --------------- protected volatile boolean _disposable; // =================================================================================== // Assistant Main Work // =================== // ----------------------------------------------------- // Current DBDef // ------------- /** {@inheritDoc} */ public DBDef assistCurrentDBDef() { return MaDBCurrent.getInstance().currentDBDef(); } // ----------------------------------------------------- // Data Source // ----------- /** {@inheritDoc} */ public DataSource assistDataSource() { // DI component // this instance will be cached in SQL executions // so the handler should be set before initialization of DBFlute // (and it means you cannot switch data source after initialization) DataSourceHandler handler = MaDBFluteConfig.getInstance().getDataSourceHandler(); return handler != null ? new HandlingDataSourceWrapper(_dataSource, handler) : _dataSource; } // ----------------------------------------------------- // DBMeta Provider // --------------- /** {@inheritDoc} */ public DBMetaProvider assistDBMetaProvider() { // lazy component if (_dbmetaProvider != null) { return _dbmetaProvider; } synchronized (this) { if (_dbmetaProvider != null) { return _dbmetaProvider; } _dbmetaProvider = createDBMetaProvider(); } return _dbmetaProvider; } protected DBMetaProvider createDBMetaProvider() { return MaDBMetaInstanceHandler.getProvider(); } // ----------------------------------------------------- // SQL Clause Creator // ------------------ /** {@inheritDoc} */ public SqlClauseCreator assistSqlClauseCreator() { // lazy component if (_sqlClauseCreator != null) { return _sqlClauseCreator; } synchronized (this) { if (_sqlClauseCreator != null) { return _sqlClauseCreator; } _sqlClauseCreator = createSqlClauseCreator(); } return _sqlClauseCreator; } protected SqlClauseCreator createSqlClauseCreator() { SqlClauseCreator creator = MaDBFluteConfig.getInstance().getSqlClauseCreator(); if (creator != null) { return creator; } return newImplementedSqlClauseCreator(); // as default } protected MaImplementedSqlClauseCreator newImplementedSqlClauseCreator() { return new MaImplementedSqlClauseCreator(); } // ----------------------------------------------------- // Statement Factory // ----------------- /** {@inheritDoc} */ public StatementFactory assistStatementFactory() { // lazy component if (_statementFactory != null) { return _statementFactory; } synchronized (this) { if (_statementFactory != null) { return _statementFactory; } _statementFactory = createStatementFactory(); } return _statementFactory; } protected StatementFactory createStatementFactory() { final TnStatementFactoryImpl factory = newStatementFactoryImpl(); factory.setDefaultStatementConfig(assistDefaultStatementConfig()); MaDBFluteConfig config = MaDBFluteConfig.getInstance(); factory.setInternalDebug(config.isInternalDebug()); factory.setCursorSelectFetchSize(config.getCursorSelectFetchSize()); factory.setEntitySelectFetchSize(config.getEntitySelectFetchSize()); factory.setUsePagingByCursorSkipSynchronizedFetchSize(config.isUsePagingByCursorSkipSynchronizedFetchSize()); factory.setFixedPagingByCursorSkipSynchronizedFetchSize(config.getFixedPagingByCursorSkipSynchronizedFetchSize()); return factory; } protected TnStatementFactoryImpl newStatementFactoryImpl() { return new TnStatementFactoryImpl(); } // ----------------------------------------------------- // Bean Meta Data Factory // ---------------------- /** {@inheritDoc} */ public TnBeanMetaDataFactory assistBeanMetaDataFactory() { // lazy component if (_beanMetaDataFactory != null) { return _beanMetaDataFactory; } synchronized (this) { if (_beanMetaDataFactory != null) { return _beanMetaDataFactory; } _beanMetaDataFactory = createBeanMetaDataFactory(); } return _beanMetaDataFactory; } protected TnBeanMetaDataFactory createBeanMetaDataFactory() { RelationOptionalFactory relationOptionalFactory = assistRelationOptionalFactory(); final TnBeanMetaDataFactoryExtension factory = newBeanMetaDataFactoryExtension(relationOptionalFactory); factory.setDataSource(_dataSource); factory.setInternalDebug(MaDBFluteConfig.getInstance().isInternalDebug()); return factory; } protected TnBeanMetaDataFactoryExtension newBeanMetaDataFactoryExtension(RelationOptionalFactory relationOptionalFactory) { return new TnBeanMetaDataFactoryExtension(relationOptionalFactory); } // ----------------------------------------------------- // Result Set Handler Factory // -------------------------- /** {@inheritDoc} */ public TnResultSetHandlerFactory assistResultSetHandlerFactory() { // lazy component if (_resultSetHandlerFactory != null) { return _resultSetHandlerFactory; } synchronized (this) { if (_resultSetHandlerFactory != null) { return _resultSetHandlerFactory; } _resultSetHandlerFactory = createResultSetHandlerFactory(); } return _resultSetHandlerFactory; } protected TnResultSetHandlerFactory createResultSetHandlerFactory() { return newResultSetHandlerFactoryImpl(); } protected TnResultSetHandlerFactoryImpl newResultSetHandlerFactoryImpl() { return new TnResultSetHandlerFactoryImpl(); } // ----------------------------------------------------- // Relation Optional Factory // ------------------------- /** {@inheritDoc} */ public RelationOptionalFactory assistRelationOptionalFactory() { if (_relationOptionalFactory != null) { return _relationOptionalFactory; } synchronized (this) { if (_relationOptionalFactory != null) { return _relationOptionalFactory; } _relationOptionalFactory = createRelationOptionalFactory(); } return _relationOptionalFactory; } protected RelationOptionalFactory createRelationOptionalFactory() { return newRelationOptionalFactory(); } protected RelationOptionalFactory newRelationOptionalFactory() { return new RelationOptionalFactory(); } // ----------------------------------------------------- // SQL Analyzer Factory // -------------------- /** {@inheritDoc} */ public SqlAnalyzerFactory assistSqlAnalyzerFactory() { // lazy component if (_sqlAnalyzerFactory != null) { return _sqlAnalyzerFactory; } synchronized (this) { if (_sqlAnalyzerFactory != null) { return _sqlAnalyzerFactory; } _sqlAnalyzerFactory = createSqlAnalyzerFactory(); } return _sqlAnalyzerFactory; } protected SqlAnalyzerFactory createSqlAnalyzerFactory() { return newMelodicSqlAnalyzerFactory(); } protected MelodicSqlAnalyzerFactory newMelodicSqlAnalyzerFactory() { return new MelodicSqlAnalyzerFactory(); } // ----------------------------------------------------- // First OutsideSql Option // ----------------------- /** {@inheritDoc} */ public OutsideSqlOption assistFirstOutsideSqlOption(String tableDbName) { return prepareFirstOutsideSqlOption(tableDbName); } protected OutsideSqlOption prepareFirstOutsideSqlOption(String tableDbName) { if (MaDBFluteConfig.getInstance().isNonSpecifiedColumnAccessAllowed()) { OutsideSqlOption option = new OutsideSqlOption(); option.setTableDbName(tableDbName); return option.enableNonSpecifiedColumnAccess(); } return null; // no instance (lazy-loaded) as default } // ----------------------------------------------------- // OutsideSql Executor Factory // --------------------------- /** {@inheritDoc} */ public OutsideSqlExecutorFactory assistOutsideSqlExecutorFactory() { if (_outsideSqlExecutorFactory != null) { return _outsideSqlExecutorFactory; } synchronized (this) { if (_outsideSqlExecutorFactory != null) { return _outsideSqlExecutorFactory; } _outsideSqlExecutorFactory = createOutsideSqlExecutorFactory(); } return _outsideSqlExecutorFactory; } protected OutsideSqlExecutorFactory createOutsideSqlExecutorFactory() { OutsideSqlExecutorFactory factory = MaDBFluteConfig.getInstance().getOutsideSqlExecutorFactory(); if (factory != null) { return factory; } return newDefaultOutsideSqlExecutorFactory(); // as default } protected DefaultOutsideSqlExecutorFactory newDefaultOutsideSqlExecutorFactory() { return new DefaultOutsideSqlExecutorFactory(); } // ----------------------------------------------------- // SQLException Digger // ------------------- /** {@inheritDoc} */ public SQLExceptionDigger assistSQLExceptionDigger() { return createSQLExceptionDigger(); } protected SQLExceptionDigger createSQLExceptionDigger() { return MaDBFluteConfig.getInstance().getSQLExceptionDigger(); } // ----------------------------------------------------- // SQLException Handler Factory // ---------------------------- /** {@inheritDoc} */ public SQLExceptionHandlerFactory assistSQLExceptionHandlerFactory() { // lazy component if (_sqlExceptionHandlerFactory != null) { return _sqlExceptionHandlerFactory; } synchronized (this) { if (_sqlExceptionHandlerFactory != null) { return _sqlExceptionHandlerFactory; } _sqlExceptionHandlerFactory = createSQLExceptionHandlerFactory(); } return _sqlExceptionHandlerFactory; } protected SQLExceptionHandlerFactory createSQLExceptionHandlerFactory() { return newDefaultSQLExceptionHandlerFactory(); } protected DefaultSQLExceptionHandlerFactory newDefaultSQLExceptionHandlerFactory() { return new DefaultSQLExceptionHandlerFactory(); } // ----------------------------------------------------- // Sequence Cache Handler // ---------------------- /** {@inheritDoc} */ public SequenceCacheHandler assistSequenceCacheHandler() { // lazy component if (_sequenceCacheHandler != null) { return _sequenceCacheHandler; } synchronized (this) { if (_sequenceCacheHandler != null) { return _sequenceCacheHandler; } _sequenceCacheHandler = createSequenceCacheHandler(); } return _sequenceCacheHandler; } protected SequenceCacheHandler createSequenceCacheHandler() { SequenceCacheHandler handler = newSequenceCacheHandler(); SequenceCacheKeyGenerator generator = MaDBFluteConfig.getInstance().getSequenceCacheKeyGenerator(); if (generator != null) { handler.setSequenceCacheKeyGenerator(generator); } handler.setInternalDebug(MaDBFluteConfig.getInstance().isInternalDebug()); return handler; } protected SequenceCacheHandler newSequenceCacheHandler() { return new SequenceCacheHandler(); } // ----------------------------------------------------- // SQL File Encoding // ----------------- /** {@inheritDoc} */ public String assistSqlFileEncoding() { return "UTF-8"; } // ----------------------------------------------------- // Statement Configuration // ----------------------- /** {@inheritDoc} */ public StatementConfig assistDefaultStatementConfig() { return MaDBFluteConfig.getInstance().getDefaultStatementConfig(); } // ----------------------------------------------------- // Behavior Exception Thrower // -------------------------- /** {@inheritDoc} */ public BehaviorExceptionThrower assistBehaviorExceptionThrower() { return new BehaviorExceptionThrower(); } // ----------------------------------------------------- // Geared Cipher Manager // --------------------- /** {@inheritDoc} */ public GearedCipherManager assistGearedCipherManager() { return MaDBFluteConfig.getInstance().getGearedCipherManager(); } // ----------------------------------------------------- // Resource Parameter // ------------------ /** {@inheritDoc} */ public ResourceParameter assistResourceParameter() { return createResourceParameter(); } protected ResourceParameter createResourceParameter() { ResourceParameter parameter = newResourceParameter(); parameter.setOutsideSqlPackage(MaDBFluteConfig.getInstance().getOutsideSqlPackage()); parameter.setMappingDateTimeZoneProvider(MaDBFluteConfig.getInstance().getMappingDateTimeZoneProvider()); parameter.setLogDatePattern(MaDBFluteConfig.getInstance().getLogDatePattern()); parameter.setLogTimestampPattern(MaDBFluteConfig.getInstance().getLogTimestampPattern()); parameter.setLogTimePattern(MaDBFluteConfig.getInstance().getLogTimePattern()); parameter.setLogTimeZoneProvider(MaDBFluteConfig.getInstance().getLogTimeZoneProvider()); parameter.setInternalDebug(MaDBFluteConfig.getInstance().isInternalDebug()); return parameter; } protected ResourceParameter newResourceParameter() { return new ResourceParameter(); } // ----------------------------------------------------- // Invoke Names // ------------ /** {@inheritDoc} */ public String[] assistClientInvokeNames() { return DEFAULT_CLIENT_INVOKE_NAMES; } /** {@inheritDoc} */ public String[] assistByPassInvokeNames() { return DEFAULT_BYPASS_INVOKE_NAMES; } // =================================================================================== // Dispose // ======= /** {@inheritDoc} */ public void toBeDisposable(final DisposableProcess callerProcess) { // for HotDeploy // do nothing: unsupported at this DI container } public boolean isDisposable() { return _disposable; } // =================================================================================== // Accessor // ======== public void setDataSource(DataSource dataSource) { _dataSource = dataSource; } // to check the initializer is an instance of DBFluteInitializer // when the initializer is extended by DBFlute property // so this variable is actually unused in this class // (needs to be injected only when the DI container is set by its DI setting file) public void setIntroduction(MaDBFluteInitializer introduction) { _introduction = introduction; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.metrics; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Collection; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import io.micrometer.core.instrument.Counter; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.test.compiler.ClassBuilder; import org.apache.geode.test.junit.categories.MetricsTest; import org.apache.geode.test.junit.rules.gfsh.GfshRule; @Category(MetricsTest.class) public class GatewayReceiverMetricsTest { @Rule public GfshRule gfshRule = new GfshRule(); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final String SENDER_LOCATOR_NAME = "sender-locator"; private static final String RECEIVER_LOCATOR_NAME = "receiver-locator"; private static final String SENDER_SERVER_NAME = "sender-server"; private static final String RECEIVER_SERVER_NAME = "receiver-server"; private static final String REGION_NAME = "region"; private static final String GFSH_COMMAND_SEPARATOR = " "; private String senderLocatorFolder; private String receiverLocatorFolder; private String senderServerFolder; private String receiverServerFolder; private int receiverLocatorPort; private int senderLocatorPort; @Before public void startClusters() throws IOException { int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(8); receiverLocatorPort = ports[0]; senderLocatorPort = ports[1]; int senderServerPort = ports[2]; int receiverServerPort = ports[3]; int senderLocatorJmxPort = ports[4]; int receiverLocatorJmxPort = ports[5]; int senderLocatorHttpPort = ports[6]; int receiverLocatorHttpPort = ports[7]; int senderSystemId = 2; int receiverSystemId = 1; senderLocatorFolder = newFolder(SENDER_LOCATOR_NAME); receiverLocatorFolder = newFolder(RECEIVER_LOCATOR_NAME); senderServerFolder = newFolder(SENDER_SERVER_NAME); receiverServerFolder = newFolder(RECEIVER_SERVER_NAME); String startSenderLocatorCommand = String.join(GFSH_COMMAND_SEPARATOR, "start locator", "--name=" + SENDER_LOCATOR_NAME, "--dir=" + senderLocatorFolder, "--port=" + senderLocatorPort, "--locators=localhost[" + senderLocatorPort + "]", "--J=-Dgemfire.remote-locators=localhost[" + receiverLocatorPort + "]", "--J=-Dgemfire.distributed-system-id=" + senderSystemId, "--J=-Dgemfire.jmx-manager-start=true", "--J=-Dgemfire.jmx-manager-http-port=" + senderLocatorHttpPort, "--J=-Dgemfire.jmx-manager-port=" + senderLocatorJmxPort); String startReceiverLocatorCommand = String.join(GFSH_COMMAND_SEPARATOR, "start locator", "--name=" + RECEIVER_LOCATOR_NAME, "--dir=" + receiverLocatorFolder, "--port=" + receiverLocatorPort, "--locators=localhost[" + receiverLocatorPort + "]", "--J=-Dgemfire.remote-locators=localhost[" + senderLocatorPort + "]", "--J=-Dgemfire.distributed-system-id=" + receiverSystemId, "--J=-Dgemfire.jmx-manager-start=true ", "--J=-Dgemfire.jmx-manager-http-port=" + receiverLocatorHttpPort, "--J=-Dgemfire.jmx-manager-port=" + receiverLocatorJmxPort); String startSenderServerCommand = String.join(GFSH_COMMAND_SEPARATOR, "start server", "--name=" + SENDER_SERVER_NAME, "--dir=" + senderServerFolder, "--locators=localhost[" + senderLocatorPort + "]", "--server-port=" + senderServerPort, "--J=-Dgemfire.distributed-system-id=" + senderSystemId); String metricsPublishingServiceJarPath = newJarForMetricsPublishingServiceClass(SimpleMetricsPublishingService.class, "metrics-publishing-service.jar"); String startReceiverServerCommand = String.join(GFSH_COMMAND_SEPARATOR, "start server", "--name=" + RECEIVER_SERVER_NAME, "--dir=" + receiverServerFolder, "--locators=localhost[" + receiverLocatorPort + "]", "--server-port=" + receiverServerPort, "--classpath=" + metricsPublishingServiceJarPath, "--J=-Dgemfire.distributed-system-id=" + receiverSystemId); gfshRule.execute(startSenderLocatorCommand, startReceiverLocatorCommand, startSenderServerCommand, startReceiverServerCommand); String gatewaySenderId = "gs"; String connectToSenderLocatorCommand = "connect --locator=localhost[" + senderLocatorPort + "]"; String startGatewaySenderCommand = String.join(GFSH_COMMAND_SEPARATOR, "create gateway-sender", "--id=" + gatewaySenderId, "--parallel=false", "--remote-distributed-system-id=" + receiverSystemId); String createSenderRegionCommand = String.join(GFSH_COMMAND_SEPARATOR, "create region", "--name=" + REGION_NAME, "--type=" + RegionShortcut.REPLICATE.name(), "--gateway-sender-id=" + gatewaySenderId); gfshRule.execute(connectToSenderLocatorCommand, startGatewaySenderCommand, createSenderRegionCommand); String connectToReceiverLocatorCommand = "connect --locator=localhost[" + receiverLocatorPort + "]"; String startGatewayReceiverCommand = "create gateway-receiver"; String createReceiverRegionCommand = String.join(GFSH_COMMAND_SEPARATOR, "create region", "--name=" + REGION_NAME, "--type=" + RegionShortcut.REPLICATE.name()); gfshRule.execute(connectToReceiverLocatorCommand, startGatewayReceiverCommand, createReceiverRegionCommand); // Deploy function to members String functionJarPath = newJarForFunctionClass(GetEventsReceivedCountFunction.class, "function.jar"); String deployCommand = "deploy --jar=" + functionJarPath; String listFunctionsCommand = "list functions"; gfshRule.execute(connectToReceiverLocatorCommand, deployCommand, listFunctionsCommand); } @After public void stopClusters() { String stopReceiverServerCommand = "stop server --dir=" + receiverServerFolder; String stopSenderServerCommand = "stop server --dir=" + senderServerFolder; String stopReceiverLocatorCommand = "stop locator --dir=" + receiverLocatorFolder; String stopSenderLocatorCommand = "stop locator --dir=" + senderLocatorFolder; gfshRule.execute(stopReceiverServerCommand, stopSenderServerCommand, stopReceiverLocatorCommand, stopSenderLocatorCommand); } @Test public void whenPerformingOperations_thenGatewayReceiverEventsReceivedIncreases() { String connectToSenderLocatorCommand = "connect --locator=localhost[" + senderLocatorPort + "]"; String doPutCommand = String.join(GFSH_COMMAND_SEPARATOR, "put", "--region=" + REGION_NAME, "--key=foo", "--value=bar"); String doRemoveCommand = String.join(GFSH_COMMAND_SEPARATOR, "remove", "--region=" + REGION_NAME, "--key=foo"); String doCreateRegionCommand = String.join(GFSH_COMMAND_SEPARATOR, "create region", "--name=blah", "--type=" + RegionShortcut.REPLICATE.name()); gfshRule.execute(connectToSenderLocatorCommand, doPutCommand, doRemoveCommand, doCreateRegionCommand); String connectToReceiverLocatorCommand = "connect --locator=localhost[" + receiverLocatorPort + "]"; String executeFunctionCommand = "execute function --id=" + GetEventsReceivedCountFunction.ID; Collection<String> gatewayEventsExpectedToReceive = Arrays.asList(doPutCommand, doRemoveCommand); await().untilAsserted(() -> { String output = gfshRule.execute(connectToReceiverLocatorCommand, executeFunctionCommand).getOutputText(); assertThat(output.trim()) .as("Returned count of events received.") .endsWith("[" + gatewayEventsExpectedToReceive.size() + ".0]"); }); } private String newFolder(String folderName) throws IOException { return temporaryFolder.newFolder(folderName).getAbsolutePath(); } private String newJarForFunctionClass(Class clazz, String jarName) throws IOException { File jar = temporaryFolder.newFile(jarName); new ClassBuilder().writeJarFromClass(clazz, jar); return jar.getAbsolutePath(); } private String newJarForMetricsPublishingServiceClass(Class clazz, String jarName) throws IOException { File jar = temporaryFolder.newFile(jarName); String className = clazz.getName(); String classAsPath = className.replace('.', '/') + ".class"; InputStream stream = clazz.getClassLoader().getResourceAsStream(classAsPath); byte[] bytes = IOUtils.toByteArray(stream); try (FileOutputStream out = new FileOutputStream(jar)) { JarOutputStream jarOutputStream = new JarOutputStream(out); // Add the class file to the JAR file JarEntry classEntry = new JarEntry(classAsPath); classEntry.setTime(System.currentTimeMillis()); jarOutputStream.putNextEntry(classEntry); jarOutputStream.write(bytes); jarOutputStream.closeEntry(); String metaInfPath = "META-INF/services/org.apache.geode.metrics.MetricsPublishingService"; JarEntry metaInfEntry = new JarEntry(metaInfPath); metaInfEntry.setTime(System.currentTimeMillis()); jarOutputStream.putNextEntry(metaInfEntry); jarOutputStream.write(className.getBytes()); jarOutputStream.closeEntry(); jarOutputStream.close(); } return jar.getAbsolutePath(); } public static class GetEventsReceivedCountFunction implements Function<Void> { static final String ID = "GetEventsReceivedCountFunction"; @Override public void execute(FunctionContext<Void> context) { Counter eventsReceivedCounter = SimpleMetricsPublishingService.getRegistry() .find("cache.gatewayreceiver.events.received") .counter(); Object result = eventsReceivedCounter == null ? "Meter not found." : eventsReceivedCounter.count(); context.getResultSender().lastResult(result); } @Override public String getId() { return ID; } } }
package org.apache.solr.schema; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Currency; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldValueQuery; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.SortField; import org.apache.lucene.uninverting.UninvertingReader.Type; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.response.TextResponseWriter; import org.apache.solr.search.QParser; import org.apache.solr.search.SolrConstantScoreQuery; import org.apache.solr.search.function.ValueSourceRangeFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Field type for support of monetary values. * <p> * See <a href="http://wiki.apache.org/solr/CurrencyField">http://wiki.apache.org/solr/CurrencyField</a> */ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoaderAware { protected static final String PARAM_DEFAULT_CURRENCY = "defaultCurrency"; protected static final String PARAM_RATE_PROVIDER_CLASS = "providerClass"; protected static final Object PARAM_PRECISION_STEP = "precisionStep"; protected static final String DEFAULT_RATE_PROVIDER_CLASS = "solr.FileExchangeRateProvider"; protected static final String DEFAULT_DEFAULT_CURRENCY = "USD"; protected static final String DEFAULT_PRECISION_STEP = "0"; protected static final String FIELD_SUFFIX_AMOUNT_RAW = "_amount_raw"; protected static final String FIELD_SUFFIX_CURRENCY = "_currency"; private IndexSchema schema; protected FieldType fieldTypeCurrency; protected FieldType fieldTypeAmountRaw; private String exchangeRateProviderClass; private String defaultCurrency; private ExchangeRateProvider provider; public static Logger log = LoggerFactory.getLogger(CurrencyField.class); /** * A wrapper arround <code>Currency.getInstance</code> that returns null * instead of throwing <code>IllegalArgumentException</code> * if the specified Currency does not exist in this JVM. * * @see Currency#getInstance(String) */ public static Currency getCurrency(final String code) { try { return Currency.getInstance(code); } catch (IllegalArgumentException e) { /* :NOOP: */ } return null; } @Override protected void init(IndexSchema schema, Map<String, String> args) { super.init(schema, args); if (this.isMultiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyField types can not be multiValued: " + this.typeName); } this.schema = schema; this.exchangeRateProviderClass = args.get(PARAM_RATE_PROVIDER_CLASS); this.defaultCurrency = args.get(PARAM_DEFAULT_CURRENCY); if (this.defaultCurrency == null) { this.defaultCurrency = DEFAULT_DEFAULT_CURRENCY; } if (this.exchangeRateProviderClass == null) { this.exchangeRateProviderClass = DEFAULT_RATE_PROVIDER_CLASS; } if (null == getCurrency(this.defaultCurrency)) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Default currency code is not supported by this JVM: " + this.defaultCurrency); } String precisionStepString = args.get(PARAM_PRECISION_STEP); if (precisionStepString == null) { precisionStepString = DEFAULT_PRECISION_STEP; } // Initialize field type for amount fieldTypeAmountRaw = new TrieLongField(); fieldTypeAmountRaw.setTypeName("amount_raw_type_tlong"); Map<String,String> map = new HashMap<>(1); map.put("precisionStep", precisionStepString); fieldTypeAmountRaw.init(schema, map); // Initialize field type for currency string fieldTypeCurrency = new StrField(); fieldTypeCurrency.setTypeName("currency_type_string"); fieldTypeCurrency.init(schema, new HashMap<String,String>()); args.remove(PARAM_RATE_PROVIDER_CLASS); args.remove(PARAM_DEFAULT_CURRENCY); args.remove(PARAM_PRECISION_STEP); try { Class<? extends ExchangeRateProvider> c = schema.getResourceLoader().findClass(exchangeRateProviderClass, ExchangeRateProvider.class); provider = c.newInstance(); provider.init(args); } catch (Exception e) { throw new SolrException(ErrorCode.BAD_REQUEST, "Error instantiating exchange rate provider "+exchangeRateProviderClass+": " + e.getMessage(), e); } } @Override public boolean isPolyField() { return true; } @Override public void checkSchemaField(final SchemaField field) throws SolrException { super.checkSchemaField(field); if (field.multiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyFields can not be multiValued: " + field.getName()); } } @Override public List<IndexableField> createFields(SchemaField field, Object externalVal, float boost) { CurrencyValue value = CurrencyValue.parse(externalVal.toString(), defaultCurrency); List<IndexableField> f = new ArrayList<>(); SchemaField amountField = getAmountField(field); f.add(amountField.createField(String.valueOf(value.getAmount()), amountField.indexed() && !amountField.omitNorms() ? boost : 1F)); SchemaField currencyField = getCurrencyField(field); f.add(currencyField.createField(value.getCurrencyCode(), currencyField.indexed() && !currencyField.omitNorms() ? boost : 1F)); if (field.stored()) { org.apache.lucene.document.FieldType customType = new org.apache.lucene.document.FieldType(); assert !customType.omitNorms(); customType.setStored(true); String storedValue = externalVal.toString().trim(); if (storedValue.indexOf(",") < 0) { storedValue += "," + defaultCurrency; } f.add(createField(field.getName(), storedValue, customType, 1F)); } return f; } private SchemaField getAmountField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); } private SchemaField getCurrencyField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); } private void createDynamicCurrencyField(String suffix, FieldType type) { String name = "*" + POLY_FIELD_SEPARATOR + suffix; Map<String, String> props = new HashMap<>(); props.put("indexed", "true"); props.put("stored", "false"); props.put("multiValued", "false"); props.put("omitNorms", "true"); int p = SchemaField.calcProps(name, type, props); schema.registerDynamicFields(SchemaField.create(name, type, p, null)); } /** * When index schema is informed, add dynamic fields "*____currency" and "*____amount_raw". * * {@inheritDoc} * * @param schema {@inheritDoc} */ @Override public void inform(IndexSchema schema) { this.schema = schema; createDynamicCurrencyField(FIELD_SUFFIX_CURRENCY, fieldTypeCurrency); createDynamicCurrencyField(FIELD_SUFFIX_AMOUNT_RAW, fieldTypeAmountRaw); } /** * Load the currency config when resource loader initialized. * * @param resourceLoader The resource loader. */ @Override public void inform(ResourceLoader resourceLoader) { provider.inform(resourceLoader); boolean reloaded = provider.reload(); if(!reloaded) { log.warn("Failed reloading currencies"); } } @Override public Query getFieldQuery(QParser parser, SchemaField field, String externalVal) { CurrencyValue value = CurrencyValue.parse(externalVal, defaultCurrency); CurrencyValue valueDefault; valueDefault = value.convertTo(provider, defaultCurrency); return getRangeQuery(parser, field, valueDefault, valueDefault, true, true); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the indexed value as converted to the default * currency for the field, normalized to its most granular form based * on the default fractional digits. * </p> * <p> * For example: If the default Currency specified for a field is * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of "cents" (ie: value in dollars * 100) * after converting each document's native currency to USD -- because the * default fractional digits for <code>USD</code> is "<code>2</code>". * So for a document whose indexed value was currently equivilent to * "<code>5.43,USD</code>" using the the exchange provider for this field, * this ValueSource would return a value of "<code>543</code>" * </p> * * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see Currency#getDefaultFractionDigits * @see #getConvertedValueSource */ public RawCurrencyValueSource getValueSource(SchemaField field, QParser parser) { field.checkFieldCacheSource(parser); return new RawCurrencyValueSource(field, defaultCurrency, parser); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the value from the underlying * <code>RawCurrencyValueSource</code> as converted to the specified target * Currency. * </p> * <p> * For example: If the <code>targetCurrencyCode</code> param is set to * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of dollars after converting each * document's raw value to <code>USD</code>. So for a document whose * indexed value was currently equivilent to "<code>5.43,USD</code>" * using the the exchange provider for this field, this ValueSource would * return a value of "<code>5.43</code>" * </p> * * @param targetCurrencyCode The target currency for the resulting value source, if null the defaultCurrency for this field type will be used * @param source the raw ValueSource to wrap * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see #getValueSource */ public ValueSource getConvertedValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { if (null == targetCurrencyCode) { targetCurrencyCode = defaultCurrency; } return new ConvertedCurrencyValueSource(targetCurrencyCode, source); } @Override public Query getRangeQuery(QParser parser, SchemaField field, String part1, String part2, final boolean minInclusive, final boolean maxInclusive) { final CurrencyValue p1 = CurrencyValue.parse(part1, defaultCurrency); final CurrencyValue p2 = CurrencyValue.parse(part2, defaultCurrency); if (p1 != null && p2 != null && !p1.getCurrencyCode().equals(p2.getCurrencyCode())) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot parse range query " + part1 + " to " + part2 + ": range queries only supported when upper and lower bound have same currency."); } return getRangeQuery(parser, field, p1, p2, minInclusive, maxInclusive); } public Query getRangeQuery(QParser parser, SchemaField field, final CurrencyValue p1, final CurrencyValue p2, final boolean minInclusive, final boolean maxInclusive) { String currencyCode = (p1 != null) ? p1.getCurrencyCode() : (p2 != null) ? p2.getCurrencyCode() : defaultCurrency; // ValueSourceRangeFilter doesn't check exists(), so we have to final Filter docsWithValues = new QueryWrapperFilter(new FieldValueQuery(getAmountField(field).getName())); final Filter vsRangeFilter = new ValueSourceRangeFilter (new RawCurrencyValueSource(field, currencyCode, parser), p1 == null ? null : p1.getAmount() + "", p2 == null ? null : p2.getAmount() + "", minInclusive, maxInclusive); final BooleanQuery docsInRange = new BooleanQuery(); docsInRange.add(docsWithValues, Occur.FILTER); docsInRange.add(vsRangeFilter, Occur.FILTER); return new SolrConstantScoreQuery(new QueryWrapperFilter(docsInRange)); } @Override public SortField getSortField(SchemaField field, boolean reverse) { // Convert all values to default currency for sorting. return (new RawCurrencyValueSource(field, defaultCurrency, null)).getSortField(reverse); } @Override public Type getUninversionType(SchemaField sf) { return null; } @Override public void write(TextResponseWriter writer, String name, IndexableField field) throws IOException { writer.writeStr(name, field.stringValue(), true); } public ExchangeRateProvider getProvider() { return provider; } /** * <p> * A value source whose values represent the "normal" values * in the specified target currency. * </p> * @see RawCurrencyValueSource */ class ConvertedCurrencyValueSource extends ValueSource { private final Currency targetCurrency; private final RawCurrencyValueSource source; private final double rate; public ConvertedCurrencyValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { this.source = source; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } // the target digits & currency of our source, // become the source digits & currency of ourselves this.rate = provider.getExchangeRate (source.getTargetCurrency().getCurrencyCode(), targetCurrency.getCurrencyCode()); } @Override public FunctionValues getValues(Map context, LeafReaderContext reader) throws IOException { final FunctionValues amounts = source.getValues(context, reader); // the target digits & currency of our source, // become the source digits & currency of ourselves final String sourceCurrencyCode = source.getTargetCurrency().getCurrencyCode(); final int sourceFractionDigits = source.getTargetCurrency().getDefaultFractionDigits(); final double divisor = Math.pow(10D, targetCurrency.getDefaultFractionDigits()); return new FunctionValues() { @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { return (long) doubleVal(doc); } @Override public int intVal(int doc) { return (int) doubleVal(doc); } @Override public double doubleVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / divisor; } @Override public float floatVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / ((float)divisor); } @Override public String strVal(int doc) { return Double.toString(doubleVal(doc)); } @Override public String toString(int doc) { return name() + '(' + strVal(doc) + ')'; } }; } public String name() { return "currency"; } @Override public String description() { return name() + "(" + source.getField().getName() + "," + targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConvertedCurrencyValueSource that = (ConvertedCurrencyValueSource) o; return !(source != null ? !source.equals(that.source) : that.source != null) && (rate == that.rate) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (source != null ? source.hashCode() : 0); result = 31 * (int) Double.doubleToLongBits(rate); return result; } } /** * <p> * A value source whose values represent the "raw" (ie: normalized using * the number of default fractional digits) values in the specified * target currency). * </p> * <p> * For example: if the specified target currency is "<code>USD</code>" * then the numeric values are the number of pennies in the value * (ie: <code>$n * 100</code>) since the number of defalt fractional * digits for <code>USD</code> is "<code>2</code>") * </p> * @see ConvertedCurrencyValueSource */ class RawCurrencyValueSource extends ValueSource { private static final long serialVersionUID = 1L; private final Currency targetCurrency; private ValueSource currencyValues; private ValueSource amountValues; private final SchemaField sf; public RawCurrencyValueSource(SchemaField sfield, String targetCurrencyCode, QParser parser) { this.sf = sfield; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } SchemaField amountField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); SchemaField currencyField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); currencyValues = currencyField.getType().getValueSource(currencyField, parser); amountValues = amountField.getType().getValueSource(amountField, parser); } public SchemaField getField() { return sf; } public Currency getTargetCurrency() { return targetCurrency; } @Override public FunctionValues getValues(Map context, LeafReaderContext reader) throws IOException { final FunctionValues amounts = amountValues.getValues(context, reader); final FunctionValues currencies = currencyValues.getValues(context, reader); return new FunctionValues() { private final int MAX_CURRENCIES_TO_CACHE = 256; private final int[] fractionDigitCache = new int[MAX_CURRENCIES_TO_CACHE]; private final String[] currencyOrdToCurrencyCache = new String[MAX_CURRENCIES_TO_CACHE]; private final double[] exchangeRateCache = new double[MAX_CURRENCIES_TO_CACHE]; private int targetFractionDigits = -1; private int targetCurrencyOrd = -1; private boolean initializedCache; private String getDocCurrencyCode(int doc, int currencyOrd) { if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { String currency = currencyOrdToCurrencyCache[currencyOrd]; if (currency == null) { currencyOrdToCurrencyCache[currencyOrd] = currency = currencies.strVal(doc); } if (currency == null) { currency = defaultCurrency; } if (targetCurrencyOrd == -1 && currency.equals(targetCurrency.getCurrencyCode() )) { targetCurrencyOrd = currencyOrd; } return currency; } else { return currencies.strVal(doc); } } /** throws a (Server Error) SolrException if the code is not valid */ private Currency getDocCurrency(int doc, int currencyOrd) { String code = getDocCurrencyCode(doc, currencyOrd); Currency c = getCurrency(code); if (null == c) { throw new SolrException (SolrException.ErrorCode.SERVER_ERROR, "Currency code of document is not supported by this JVM: "+code); } return c; } @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { long amount = amounts.longVal(doc); // bail fast using whatever ammounts defaults to if no value // (if we don't do this early, currencyOrd may be < 0, // causing index bounds exception if ( ! exists(doc) ) { return amount; } if (!initializedCache) { for (int i = 0; i < fractionDigitCache.length; i++) { fractionDigitCache[i] = -1; } initializedCache = true; } int currencyOrd = currencies.ordVal(doc); if (currencyOrd == targetCurrencyOrd) { return amount; } double exchangeRate; int sourceFractionDigits; if (targetFractionDigits == -1) { targetFractionDigits = targetCurrency.getDefaultFractionDigits(); } if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { exchangeRate = exchangeRateCache[currencyOrd]; if (exchangeRate <= 0.0) { String sourceCurrencyCode = getDocCurrencyCode(doc, currencyOrd); exchangeRate = exchangeRateCache[currencyOrd] = provider.getExchangeRate(sourceCurrencyCode, targetCurrency.getCurrencyCode()); } sourceFractionDigits = fractionDigitCache[currencyOrd]; if (sourceFractionDigits == -1) { sourceFractionDigits = fractionDigitCache[currencyOrd] = getDocCurrency(doc, currencyOrd).getDefaultFractionDigits(); } } else { Currency source = getDocCurrency(doc, currencyOrd); exchangeRate = provider.getExchangeRate(source.getCurrencyCode(), targetCurrency.getCurrencyCode()); sourceFractionDigits = source.getDefaultFractionDigits(); } return CurrencyValue.convertAmount(exchangeRate, sourceFractionDigits, amount, targetFractionDigits); } @Override public int intVal(int doc) { return (int) longVal(doc); } @Override public double doubleVal(int doc) { return (double) longVal(doc); } @Override public float floatVal(int doc) { return (float) longVal(doc); } @Override public String strVal(int doc) { return Long.toString(longVal(doc)); } @Override public String toString(int doc) { return name() + '(' + amounts.toString(doc) + ',' + currencies.toString(doc) + ')'; } }; } public String name() { return "rawcurrency"; } @Override public String description() { return name() + "(" + sf.getName() + ",target="+targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RawCurrencyValueSource that = (RawCurrencyValueSource) o; return !(amountValues != null ? !amountValues.equals(that.amountValues) : that.amountValues != null) && !(currencyValues != null ? !currencyValues.equals(that.currencyValues) : that.currencyValues != null) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (currencyValues != null ? currencyValues.hashCode() : 0); result = 31 * result + (amountValues != null ? amountValues.hashCode() : 0); return result; } } } /** * Configuration for currency. Provides currency exchange rates. */ class FileExchangeRateProvider implements ExchangeRateProvider { public static Logger log = LoggerFactory.getLogger(FileExchangeRateProvider.class); protected static final String PARAM_CURRENCY_CONFIG = "currencyConfig"; // Exchange rate map, maps Currency Code -> Currency Code -> Rate private Map<String, Map<String, Double>> rates = new HashMap<>(); private String currencyConfigFile; private ResourceLoader loader; /** * Returns the currently known exchange rate between two currencies. If a direct rate has been loaded, * it is used. Otherwise, if a rate is known to convert the target currency to the source, the inverse * exchange rate is computed. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate. * @throws SolrException if the requested currency pair cannot be found */ @Override public double getExchangeRate(String sourceCurrencyCode, String targetCurrencyCode) { if (sourceCurrencyCode == null || targetCurrencyCode == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot get exchange rate; currency was null."); } if (sourceCurrencyCode.equals(targetCurrencyCode)) { return 1.0; } Double directRate = lookupRate(sourceCurrencyCode, targetCurrencyCode); if (directRate != null) { return directRate; } Double symmetricRate = lookupRate(targetCurrencyCode, sourceCurrencyCode); if (symmetricRate != null) { return 1.0 / symmetricRate; } throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No available conversion rate between " + sourceCurrencyCode + " to " + targetCurrencyCode); } /** * Looks up the current known rate, if any, between the source and target currencies. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate, or null if no rate has been registered. */ private Double lookupRate(String sourceCurrencyCode, String targetCurrencyCode) { Map<String, Double> rhs = rates.get(sourceCurrencyCode); if (rhs != null) { return rhs.get(targetCurrencyCode); } return null; } /** * Registers the specified exchange rate. * * @param ratesMap The map to add rate to * @param sourceCurrencyCode The source currency. * @param targetCurrencyCode The target currency. * @param rate The known exchange rate. */ private void addRate(Map<String, Map<String, Double>> ratesMap, String sourceCurrencyCode, String targetCurrencyCode, double rate) { Map<String, Double> rhs = ratesMap.get(sourceCurrencyCode); if (rhs == null) { rhs = new HashMap<>(); ratesMap.put(sourceCurrencyCode, rhs); } rhs.put(targetCurrencyCode, rate); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FileExchangeRateProvider that = (FileExchangeRateProvider) o; return !(rates != null ? !rates.equals(that.rates) : that.rates != null); } @Override public int hashCode() { return rates != null ? rates.hashCode() : 0; } @Override public String toString() { return "["+this.getClass().getName()+" : " + rates.size() + " rates.]"; } @Override public Set<String> listAvailableCurrencies() { Set<String> currencies = new HashSet<>(); for(String from : rates.keySet()) { currencies.add(from); for(String to : rates.get(from).keySet()) { currencies.add(to); } } return currencies; } @Override public boolean reload() throws SolrException { InputStream is = null; Map<String, Map<String, Double>> tmpRates = new HashMap<>(); try { log.info("Reloading exchange rates from file "+this.currencyConfigFile); is = loader.openResource(currencyConfigFile); javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); try { dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "XML parser doesn't support XInclude option", e); } try { Document doc = dbf.newDocumentBuilder().parse(is); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); // Parse exchange rates. NodeList nodes = (NodeList) xpath.evaluate("/currencyConfig/rates/rate", doc, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Node rateNode = nodes.item(i); NamedNodeMap attributes = rateNode.getAttributes(); Node from = attributes.getNamedItem("from"); Node to = attributes.getNamedItem("to"); Node rate = attributes.getNamedItem("rate"); if (from == null || to == null || rate == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exchange rate missing attributes (required: from, to, rate) " + rateNode); } String fromCurrency = from.getNodeValue(); String toCurrency = to.getNodeValue(); Double exchangeRate; if (null == CurrencyField.getCurrency(fromCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'from' currency not supported in this JVM: " + fromCurrency); } if (null == CurrencyField.getCurrency(toCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'to' currency not supported in this JVM: " + toCurrency); } try { exchangeRate = Double.parseDouble(rate.getNodeValue()); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not parse exchange rate: " + rateNode, e); } addRate(tmpRates, fromCurrency, toCurrency, exchangeRate); } } catch (SAXException | XPathExpressionException | ParserConfigurationException | IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } } catch (IOException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error while opening Currency configuration file "+currencyConfigFile, e); } finally { try { if (is != null) { is.close(); } } catch (IOException e) { e.printStackTrace(); } } // Atomically swap in the new rates map, if it loaded successfully this.rates = tmpRates; return true; } @Override public void init(Map<String,String> params) throws SolrException { this.currencyConfigFile = params.get(PARAM_CURRENCY_CONFIG); if(currencyConfigFile == null) { throw new SolrException(ErrorCode.NOT_FOUND, "Missing required configuration "+PARAM_CURRENCY_CONFIG); } // Removing config params custom to us params.remove(PARAM_CURRENCY_CONFIG); } @Override public void inform(ResourceLoader loader) throws SolrException { if(loader == null) { throw new SolrException(ErrorCode.SERVER_ERROR, "Needs ResourceLoader in order to load config file"); } this.loader = loader; reload(); } } /** * Represents a Currency field value, which includes a long amount and ISO currency code. */ class CurrencyValue { private long amount; private String currencyCode; /** * Constructs a new currency value. * * @param amount The amount. * @param currencyCode The currency code. */ public CurrencyValue(long amount, String currencyCode) { this.amount = amount; this.currencyCode = currencyCode; } /** * Constructs a new currency value by parsing the specific input. * <p/> * Currency values are expected to be in the format &lt;amount&gt;,&lt;currency code&gt;, * for example, "500,USD" would represent 5 U.S. Dollars. * <p/> * If no currency code is specified, the default is assumed. * * @param externalVal The value to parse. * @param defaultCurrency The default currency. * @return The parsed CurrencyValue. */ public static CurrencyValue parse(String externalVal, String defaultCurrency) { if (externalVal == null) { return null; } String amount = externalVal; String code = defaultCurrency; if (externalVal.contains(",")) { String[] amountAndCode = externalVal.split(","); amount = amountAndCode[0]; code = amountAndCode[1]; } if (amount.equals("*")) { return null; } Currency currency = CurrencyField.getCurrency(code); if (currency == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + code); } try { double value = Double.parseDouble(amount); long currencyValue = Math.round(value * Math.pow(10.0, currency.getDefaultFractionDigits())); return new CurrencyValue(currencyValue, code); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } } /** * The amount of the CurrencyValue. * * @return The amount. */ public long getAmount() { return amount; } /** * The ISO currency code of the CurrencyValue. * * @return The currency code. */ public String getCurrencyCode() { return currencyCode; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRates Exchange rates to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(ExchangeRateProvider exchangeRates, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { double exchangeRate = exchangeRates.getExchangeRate(sourceCurrencyCode, targetCurrencyCode); return convertAmount(exchangeRate, sourceCurrencyCode, sourceAmount, targetCurrencyCode); } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceFractionDigits The fraction digits of the source. * @param sourceAmount The source amount. * @param targetFractionDigits The fraction digits of the target. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(final double exchangeRate, final int sourceFractionDigits, final long sourceAmount, final int targetFractionDigits) { int digitDelta = targetFractionDigits - sourceFractionDigits; double value = ((double) sourceAmount * exchangeRate); if (digitDelta != 0) { if (digitDelta < 0) { for (int i = 0; i < -digitDelta; i++) { value *= 0.1; } } else { for (int i = 0; i < digitDelta; i++) { value *= 10.0; } } } return (long) value; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(double exchangeRate, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { if (targetCurrencyCode.equals(sourceCurrencyCode)) { return sourceAmount; } int sourceFractionDigits = Currency.getInstance(sourceCurrencyCode).getDefaultFractionDigits(); Currency targetCurrency = Currency.getInstance(targetCurrencyCode); int targetFractionDigits = targetCurrency.getDefaultFractionDigits(); return convertAmount(exchangeRate, sourceFractionDigits, sourceAmount, targetFractionDigits); } /** * Returns a new CurrencyValue that is the conversion of this CurrencyValue to the specified currency. * * @param exchangeRates The exchange rate provider. * @param targetCurrencyCode The target currency code to convert this CurrencyValue to. * @return The converted CurrencyValue. */ public CurrencyValue convertTo(ExchangeRateProvider exchangeRates, String targetCurrencyCode) { return new CurrencyValue(convertAmount(exchangeRates, this.getCurrencyCode(), this.getAmount(), targetCurrencyCode), targetCurrencyCode); } @Override public String toString() { return String.valueOf(amount) + "," + currencyCode; } }
package gov.va.med.lom.kaajee.jboss.security.auth; import gov.va.med.authentication.kernel.KaajeeInstitutionResourceException; import gov.va.med.authentication.kernel.VistaDivisionVO; import gov.va.med.term.access.Institution; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; /** * For internal KAAJEE use only. Value object for otbaining / manipulating configuration values. * <p> * This class is public and implemented as a javabean so that the KAAJEE login JSP pages can access these * configuration values. * @author VHIT Security and Other Common Services (S&OCS) * @version 1.1.0.007 */ public class ConfigurationVO { private static ConfigurationVO me = null; private String hostApplicationName; private String introductoryText; private String institutionLogonDropDownList; //AC/OIFO - Next line added to support sorted institution list by name on the JSP Institution Drop-Down: private String institutionLogonDropDownListByName; private TreeMap<String, VistaDivisionVO> institutionMap; //AC/OIFO - Next line added to support sorted institution list by name on the JSP Institution Drop-Down: private TreeMap<String, VistaDivisionVO> institutionMapByName; private String oracleDAOFactoryJndiDataSourceName; private String daoFactoryDatabaseChoice; private boolean retrieveUserNewPersonDivisions; private boolean retrieveComputingFacilityDivisions; private boolean cactusModeEnabled; private String contextName; private static Log logger = LogFactory.getLog(ConfigurationVO.class); private static Object syncObj = new Object(); /** * private constructor. This class does not need to be instantiated. */ private ConfigurationVO(Document configDoc) { try { init(configDoc); } catch (KaajeeInstitutionResourceException e) { logger.error("Could not initialize institutions.", e); } } /** * retrieves the instance of this singleton object. Returns null if singleton has somehow not been * initialized yet. * @return the singleton instance of ConfigurationVO * @throws IllegalStateException thrown if singleton not yet initialized */ public static ConfigurationVO getInstance() throws IllegalStateException { if (logger.isDebugEnabled()) { logger.debug("In getInstance()."); } synchronized (syncObj) { if (me == null) { String exceptionString = "ConfigurationVO singleton not yet initialized."; if (logger.isDebugEnabled()) { logger.error(exceptionString); } throw new IllegalStateException(exceptionString); } return me; } } /** * Initializes the singleton. * @param configDoc Document containing KAAJEE config settings * @throws IllegalStateException if singleton already initialized */ static void createInstance(Document configDoc) throws IllegalStateException { if (logger.isDebugEnabled()) { logger.debug("In createInstance()."); } synchronized (syncObj) { if (me == null) { if (logger.isDebugEnabled()) { logger.debug("creating a new ConfigurationVO."); } me = new ConfigurationVO(configDoc); } else { String exceptionString = "ConfigurationVO instance already exists."; if (logger.isDebugEnabled()) { logger.error(exceptionString); } throw new IllegalStateException(exceptionString); } } } private void init(Document configDoc) throws KaajeeInstitutionResourceException { if (logger.isDebugEnabled()) { logger.debug("initializing."); } // initialize defaults this.institutionMap = new TreeMap<String, VistaDivisionVO>(); //AC/OIFO - Next line added to support sorted institution list by name on the JSP Institution Drop-Down: this.institutionMapByName = new TreeMap<String, VistaDivisionVO>(); this.introductoryText = "[Introductory Text for the system not found. Adminstrators should fill in the KAAJEE settings file to furnish the required introductory text.]"; this.hostApplicationName = "[unidentified application]"; this.oracleDAOFactoryJndiDataSourceName = ""; this.daoFactoryDatabaseChoice = ""; this.institutionLogonDropDownList = ""; this.retrieveComputingFacilityDivisions = false; this.retrieveUserNewPersonDivisions = false; this.cactusModeEnabled = false; this.contextName = ""; // process document if (configDoc == null) { logger.error("Configuration file document object was null. Can't retrieve KAAJEE settings."); } else { // retrieve application name { NodeList appNameNodeList = configDoc.getElementsByTagName("host-application-name"); if (appNameNodeList.getLength() > 0) { hostApplicationName = ((Text) appNameNodeList.item(0).getFirstChild()).getData(); } if (logger.isDebugEnabled()) { logger.debug("set host application name: " + hostApplicationName); } } // retrieve context-name { NodeList ContextNode = configDoc.getElementsByTagName("context-root-name"); if (ContextNode.getLength() > 0) { contextName = ((Text)ContextNode.item(0).getFirstChild()).getData(); } if (logger.isDebugEnabled()) { logger.debug("set ContextName : " + contextName); } } // retrieve introductory text { String rawText = ""; StringBuffer sb = new StringBuffer(); NodeList introTextNodeList = configDoc.getElementsByTagName("system-announcement"); if (introTextNodeList.getLength() > 0) { rawText = ((Text) introTextNodeList.item(0).getFirstChild()).getData(); } StringTokenizer st = new StringTokenizer(rawText, "~"); while (st.hasMoreTokens()) { sb.append(st.nextToken()); sb.append("&nbsp;<br>"); } introductoryText = sb.toString(); if (logger.isDebugEnabled()) { logger.debug("retrieved introductory text: " + introductoryText); } } // retrieve database choice for DAO database choice { NodeList databaseChoiceNodeList = configDoc.getElementsByTagName("database-choice"); if (databaseChoiceNodeList.getLength() > 0) { daoFactoryDatabaseChoice = ((Text) databaseChoiceNodeList.item(0).getFirstChild()).getData(); } if (logger.isDebugEnabled()) { logger.debug("retrieved DAO database choice: " + daoFactoryDatabaseChoice); } } // retrieve Oracle DAO JNDI Datasource Name { NodeList jndiDataSourceNodeList = configDoc.getElementsByTagName("database-jndi-data-source-name"); if (jndiDataSourceNodeList.getLength() > 0) { oracleDAOFactoryJndiDataSourceName = ((Text) jndiDataSourceNodeList.item(0).getFirstChild()).getData(); } if (logger.isDebugEnabled()) { logger.debug( "retrieved Oracle DAO Factory JNDI DataSource Name: " + oracleDAOFactoryJndiDataSourceName); } } // retrieve divisions from user's new Person file entries? // <user-new-person-divisions retrieve="true" /> { NodeList npDivisionNodeList = configDoc.getElementsByTagName("user-new-person-divisions"); if (npDivisionNodeList.getLength() > 0) { Node npDivisionNode = npDivisionNodeList.item(0); NamedNodeMap attributes = npDivisionNode.getAttributes(); Attr retrieveAttr = (Attr) attributes.item(0); if ("retrieve".equals(retrieveAttr.getName()) && ("true".equals(retrieveAttr.getValue()))) { retrieveUserNewPersonDivisions = true; if (logger.isDebugEnabled()) { logger.debug("Setting 'retrieve new person divisions' to true."); } } } } // retrieve all divisions related to the login computing facility? // <computing-facility-divisions retrieve="true" /> { NodeList cfDivisionNodeList = configDoc.getElementsByTagName("computing-facility-divisions"); if (cfDivisionNodeList.getLength() > 0) { Node cfDivisionNode = cfDivisionNodeList.item(0); NamedNodeMap attributes = cfDivisionNode.getAttributes(); Attr retrieveAttr = (Attr) attributes.item(0); if ("retrieve".equals(retrieveAttr.getName()) && ("true".equals(retrieveAttr.getValue()))) { retrieveComputingFacilityDivisions = true; if (logger.isDebugEnabled()) { logger.debug("Setting 'retrieve computing facility divisions' to true."); } } } } // Cactus Mode // <cactus-insecure-mode enabled="true" /> { NodeList cfDivisionNodeList = configDoc.getElementsByTagName("cactus-insecure-mode"); if (cfDivisionNodeList.getLength() > 0) { Node cfDivisionNode = cfDivisionNodeList.item(0); NamedNodeMap attributes = cfDivisionNode.getAttributes(); Attr retrieveAttr = (Attr) attributes.item(0); if ("enabled".equals(retrieveAttr.getName()) && ("true".equals(retrieveAttr.getValue()))) { cactusModeEnabled = true; //TODO add a Production check if there's a production system marker to check against if (logger.isDebugEnabled()) { logger.debug( "Setting Cactus mode to True. This mode should NEVER be enabled for a production application."); } } } } //Stations { NodeList institutionMappingNodeList = configDoc.getElementsByTagName("station-number"); for (int i = 0; i < institutionMappingNodeList.getLength(); i++) { String stationNumber = ((Text) institutionMappingNodeList.item(i).getFirstChild()).getData(); String logonDisplayName = null; if (stationNumber == null) { if (logger.isDebugEnabled()) { logger.debug( "Attempt to retrieve station number from KAAJEE configuration resulted in NULL station number."); } } else { if (logger.isDebugEnabled()) { logger.debug("retrieved station number from KAAJEE configuration: " + stationNumber); } // getting real name from Institution table/SDS Institution inst = null; try { inst = Institution.factory.obtainByStationNumber(stationNumber); } catch (Throwable t) { StringBuffer sb = new StringBuffer("Error retrieving institution. "); sb.append(t.getMessage()); // following can't be done for JRE 1.3.x // sb.append(t.getStackTrace()); throw new KaajeeInstitutionResourceException(sb.toString()); } if (inst == null) { logonDisplayName = stationNumber + " [WARNING: NOT FOUND IN INSTITUTION TABLE]"; if (logger.isDebugEnabled()) { logger.debug("SDS returned null institution for station#: " + stationNumber); } } else { logonDisplayName = inst.getName(); if (logger.isDebugEnabled()) { logger.debug("Used SDS for login display name: " + logonDisplayName); } VistaDivisionVO icdVO = new VistaDivisionVO(); icdVO.setName(logonDisplayName); icdVO.setNumber(stationNumber); //TODO check against double-entering (e.g. overwriting) first? institutionMap.put(stationNumber, icdVO); //AC/OIFO - Next line added to support sorted institution list by name on the JSP Institution Drop-Down: institutionMapByName.put(logonDisplayName, icdVO); if (logger.isDebugEnabled()) { logger.debug("added new TreeMap entry: " + icdVO.toString()); } } } } } // generate the logon DropDownList string institutionLogonDropDownList = createJspDropDownListOptions(); institutionLogonDropDownListByName = createJspDropDownListOptionsByName(); } } /** * Returns a list of &lt;OPTION&gt;s for inclusion in a JSP page's SELECT drop down list, created from * the set of division mappings maintained by this object. The value of * each option is the station number from the division mapping; the text * of each option is the value of the logonDisplayName element of the * mapping, concatenated with the station number in parentheses. The list * is sorted in station number order, sorted as strings. * @return Concatenated string of &lt;OPTION&gt;s for use in a JSP login page's SELECT drop down list. * been created, this exception is thrown. */ public String getJspDropDownListLoginOptions() { return institutionLogonDropDownList; } /** * Returns a list of &lt;OPTION&gt;s for inclusion in a JSP page's SELECT drop down list, created from * the set of division mappings maintained by this object. The value of * each option is the station number from the division mapping; the text * of each option is the value of the logonDisplayName element of the * mapping, concatenated with the station number in parentheses. The list * is sorted in SDS logon display name order, sorted as strings. * @return Concatenated string of &lt;OPTION&gt;s for use in a JSP login page's SELECT drop down list. * been created, this exception is thrown. */ public String getJspDropDownListLoginOptionsByName() { return institutionLogonDropDownListByName; } /** * internal method to create the list of &lt;OPTION&gt;s for inclusion in a JSP page's SELECT drop down list. * @return Concatenated string of &lt;OPTION&gt;s for use in a JSP login page's SELECT drop down list. */ private String createJspDropDownListOptions() { //TODO need a way to select one option...? Or can it be done through javascript. Set<String> instSet = this.institutionMap.keySet(); Iterator<String> instSetIterator = instSet.iterator(); StringBuffer sb = new StringBuffer(); while (true) { try { String stationNumber = (String) instSetIterator.next(); VistaDivisionVO im = (VistaDivisionVO) institutionMap.get(stationNumber); sb.append("<OPTION value="); sb.append(im.getNumber()); sb.append(">"); sb.append(im.getName()); sb.append(" ("); sb.append(im.getNumber()); sb.append(")"); sb.append("</OPTION>\n"); } catch (NoSuchElementException e) { break; } } if (logger.isDebugEnabled()) { logger.debug(sb.toString()); } return sb.toString(); } /** * internal method to create the list of &lt;OPTION&gt;s for inclusion in a JSP page's SELECT drop down list. * @return Concatenated string of &lt;OPTION&gt;s for use in a JSP login page's SELECT drop down list. */ private String createJspDropDownListOptionsByName() { //TODO need a way to select one option...? Or can it be done through javascript. Set<String> instSet = this.institutionMapByName.keySet(); Iterator<String> instSetIterator = instSet.iterator(); StringBuffer sb = new StringBuffer(); while (true) { try { String logonDisplayName = (String) instSetIterator.next(); VistaDivisionVO im = (VistaDivisionVO) institutionMapByName.get(logonDisplayName); sb.append("<OPTION value="); sb.append(im.getNumber()); sb.append(">"); sb.append(im.getName()); sb.append(" ("); sb.append(im.getNumber()); sb.append(")"); sb.append("</OPTION>\n"); } catch (NoSuchElementException e) { break; } } if (logger.isDebugEnabled()) { logger.debug(sb.toString()); } return sb.toString(); } /** * Introductory Text: Returns the text stored in KAAJEE configuration file. Interprets/replaces the tilde * character &quot~&quot; as a * paragraph break, replacing (in the returned output) with &lt;BR&gt; characters.</li> * @return a string containing the system announcement/introductory text. */ public String getIntroductoryText() { return introductoryText; } /** * Used internall by KAAJEE. Returns the database choice configured for KAAJEE. * @return database choice */ public String getDaoFactoryDatabaseChoice() { return daoFactoryDatabaseChoice; } /** * Used internally by KAAJEE. Returns the JNDI Data Source name configured for KAAJEE. * @return JNDI Data Source name */ public String getOracleDAOFactoryJndiDataSourceName() { return oracleDAOFactoryJndiDataSourceName; } /** * Controls whether or not KAAJEE should retrieve the New Person divisions for a user. * @return true if NP divisions should be retrieved */ public boolean getRetrieveNewPersonDivisions() { return this.retrieveUserNewPersonDivisions; } /** * Controls whether or not KAAJEE should retrieve the computing facility divisions for a user. * @return true if computing facility divisions should be retrieved */ public boolean getRetrieveComputingFacilityDivisions() { return this.retrieveComputingFacilityDivisions; } /** * Checks if a given division is in the KAAJEE login list * @param stationNumber division to check against Kaajee login list * @return true if in list, false if not */ boolean isKaajeeLoginDivision(String stationNumber) { boolean returnVal = false; if (institutionMap.containsKey(stationNumber)) { returnVal = true; } return returnVal; } /** * * @return true if enabled, false if not. */ public boolean getIsCactusModeEnabled() { return this.cactusModeEnabled; } /** * Return the hosting application's name * @return host app name */ public String getHostApplicationName() { return this.hostApplicationName; } /** * Return the Context root Name * @return Context root Name */ public String getContextName() { return this.contextName; } /** * String representation of this object. * @return String representation of the data values held by this object. */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("; Database Choice: "); sb.append(this.getDaoFactoryDatabaseChoice()); sb.append("Oracle JNDI Source Name: "); sb.append(this.getOracleDAOFactoryJndiDataSourceName()); sb.append("Cactus Mode: "); sb.append(this.getIsCactusModeEnabled()); sb.append("Retrieve computing facility divisions: "); sb.append(this.getRetrieveComputingFacilityDivisions()); sb.append("; retreive NP divisions: "); sb.append(this.getRetrieveNewPersonDivisions()); sb.append("; JSP Drop Down List Options: "); sb.append(this.getJspDropDownListLoginOptions()); sb.append("; Introductory Text: "); sb.append(this.getIntroductoryText()); return sb.toString(); } public TreeMap<String, VistaDivisionVO> getInstitutionMap() { return institutionMap; } }
package com.booking.replication.applier.hbase.mutation; import com.booking.replication.applier.hbase.HBaseApplier; import com.booking.replication.augmenter.model.event.AugmentedEventType; import com.booking.replication.augmenter.model.row.AugmentedRow; import com.booking.replication.commons.metrics.Metrics; import com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.util.Bytes; import org.eclipse.jetty.server.Server; import org.junit.Assert; import org.junit.Test; import java.util.*; public class MutationGeneratorTest { // TODO: timestamp organizer test // TODO: delete event test @Test public void mutationGeneratorInsertTest() { Map<String, Object> configuration = getConfiguration(); Metrics<?> metrics = Metrics.build(configuration, new Server()); AugmentedRow testInsertRow = getTestInsertAugmentedRow(); HBaseApplierMutationGenerator mutationGenerator = new HBaseApplierMutationGenerator(configuration, metrics); HBaseApplierMutationGenerator.PutMutation insertMutation = mutationGenerator.getPutForMirroredTable(testInsertRow); String expectedRowKey = "202cb962;123;456"; Long expectedTimestamp = testInsertRow.getRowMicrosecondTimestamp(); String expectedRowStatus = "I"; Map<String, String> expectedValues = ImmutableMap.of( "id", "123", "ca", "456", "cb", "789", "cc", "987", "_transaction_uuid", "85503b0c-ae28-473a-ae70-86af3e61458b:1234567" ); String resultRowKey = Bytes.toString(insertMutation.getPut().getRow()); Map<String, String> resultValues = new HashMap<>(); List<Long> cellMutationTimestamps = new ArrayList<>(); List<Cell> cells = insertMutation.getPut().getFamilyCellMap().get(Bytes.toBytes("d")); ListIterator<Cell> cellsIterator = cells.listIterator(); while (cellsIterator.hasNext()) { Cell cell = cellsIterator.next(); String cellQualifier = Bytes.toString( Arrays.copyOfRange( cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierOffset() + cell.getQualifierLength() ) ); String cellValue = Bytes.toString( Arrays.copyOfRange( cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength() ) ); Long cellMutationTimestamp = cell.getTimestamp(); cellMutationTimestamps.add(cellMutationTimestamp); resultValues.put(cellQualifier,cellValue); } Assert.assertEquals(expectedRowKey, resultRowKey); for (Long cellMutationTimestamp: cellMutationTimestamps) { Assert.assertEquals(expectedTimestamp, cellMutationTimestamp); } for (String cellQualifier: expectedValues.keySet()) { Assert.assertTrue(resultValues.containsKey(cellQualifier)); Assert.assertEquals(expectedValues.get(cellQualifier),resultValues.get(cellQualifier)); } Assert.assertTrue(resultValues.containsKey("row_status")); Assert.assertEquals(expectedRowStatus, resultValues.get("row_status")); } @Test public void mutationGeneratorUpdateTest() { Map<String, Object> configuration = getConfiguration(); Metrics<?> metrics = Metrics.build(configuration, new Server()); AugmentedRow testUpdateRow = getTestUpdateAugmentedRow(); HBaseApplierMutationGenerator mutationGenerator = new HBaseApplierMutationGenerator(configuration, metrics); HBaseApplierMutationGenerator.PutMutation updateMutation = mutationGenerator.getPutForMirroredTable(testUpdateRow); String expectedRowKey = "202cb962;123;456"; Long expectedTimestamp = testUpdateRow.getRowMicrosecondTimestamp(); String expectedRowStatus = "U"; Map<String, String> expectedValues = ImmutableMap.of( "cb", "800", "_transaction_uuid", "85503b0c-ae28-473a-ae70-86af3e61458b:1234568", "_transaction_xid", "0", "row_status", "U" ); String resultRowKey = Bytes.toString(updateMutation.getPut().getRow()); Map<String, String> resultValues = new HashMap<>(); List<Long> cellMutationTimestamps = new ArrayList<>(); List<Cell> cells = updateMutation.getPut().getFamilyCellMap().get(Bytes.toBytes("d")); ListIterator<Cell> cellsIterator = cells.listIterator(); while (cellsIterator.hasNext()) { Cell cell = cellsIterator.next(); String cellQualifier = Bytes.toString( Arrays.copyOfRange( cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierOffset() + cell.getQualifierLength() ) ); String cellValue = Bytes.toString( Arrays.copyOfRange( cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength() ) ); resultValues.put(cellQualifier,cellValue); Long cellMutationTimestamp = cell.getTimestamp(); cellMutationTimestamps.add(cellMutationTimestamp); } Assert.assertEquals(expectedRowKey, resultRowKey); for (Long cellMutationTimestamp: cellMutationTimestamps) { Assert.assertEquals(expectedTimestamp, cellMutationTimestamp); } for (String cellQualifier: expectedValues.keySet()) { Assert.assertTrue(resultValues.containsKey(cellQualifier)); Assert.assertEquals(expectedValues.get(cellQualifier),resultValues.get(cellQualifier)); } Assert.assertTrue(!resultValues.containsKey("cc")); } private AugmentedRow getTestInsertAugmentedRow() { Long commitTimestamp = 1575566080000L; Long transactionSequenceNumber = 100L; AugmentedRow ar = new AugmentedRow( AugmentedEventType.INSERT, "test", "SomeTable", commitTimestamp, "85503b0c-ae28-473a-ae70-86af3e61458b:1234567", 0L, Arrays.asList("id", "ca"), ImmutableMap.of( "id", 123, "ca", 456, "cb", 789, "cc", 987 ) ); ar.setTransactionSequenceNumber(transactionSequenceNumber); Long microsOverride = commitTimestamp * 1000 + ar.getTransactionSequenceNumber(); ar.setRowMicrosecondTimestamp(microsOverride); return ar; } private AugmentedRow getTestUpdateAugmentedRow() { Long commitTimestamp = 1575566080000L; Long transactionSequenceNumber = 101L; Map<String, Object> values = ImmutableMap.of( "id", ImmutableMap.of("a", 123, "b", 123), "ca", ImmutableMap.of("a", 456, "b", 456), "cb", ImmutableMap.of("a", 800, "b", 789), // <- change "cc", ImmutableMap.of("a", 987, "b", 987) ); AugmentedRow ar = new AugmentedRow( AugmentedEventType.UPDATE, "test", "SomeTable", commitTimestamp, "85503b0c-ae28-473a-ae70-86af3e61458b:1234568", 0L, Arrays.asList("id", "ca"), values ); ar.setTransactionSequenceNumber(transactionSequenceNumber); Long microsOverride = commitTimestamp * 1000 + ar.getTransactionSequenceNumber(); System.out.println(microsOverride); ar.setRowMicrosecondTimestamp(microsOverride); return ar; } private Map<String, Object> getConfiguration() { Map<String, Object> configuration = new HashMap<>(); configuration.put(HBaseApplier.Configuration.TARGET_NAMESPACE, "namespaceTest"); configuration.put(HBaseApplier.Configuration.PAYLOAD_TABLE_NAME, "payloadTableTest"); configuration.put(Metrics.Configuration.TYPE, Metrics.Type.CONSOLE.name()); return configuration; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.leaderelection; import org.apache.flink.util.Preconditions; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.NodeCache; import org.apache.curator.framework.recipes.cache.NodeCacheListener; import org.apache.curator.framework.recipes.leader.LeaderLatch; import org.apache.curator.framework.recipes.leader.LeaderLatchListener; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.UUID; /** * Leader election service for multiple JobManager. The leading JobManager is elected using * ZooKeeper. The current leader's address as well as its leader session ID is published via * ZooKeeper as well. */ public class ZooKeeperLeaderElectionService implements LeaderElectionService, LeaderLatchListener, NodeCacheListener { private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperLeaderElectionService.class); /** Client to the ZooKeeper quorum */ private final CuratorFramework client; /** Curator recipe for leader election */ private final LeaderLatch leaderLatch; /** Curator recipe to watch a given ZooKeeper node for changes */ private final NodeCache cache; /** ZooKeeper path of the node which stores the current leader information */ private final String leaderPath; private UUID issuedLeaderSessionID; private UUID confirmedLeaderSessionID; /** The leader contender which applies for leadership */ private volatile LeaderContender leaderContender; private final Object lock = new Object(); private final ConnectionStateListener listener = new ConnectionStateListener() { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { handleStateChange(newState); } }; /** * Creates a ZooKeeperLeaderElectionService object. * * @param client Client which is connected to the ZooKeeper quorum * @param latchPath ZooKeeper node path for the leader election latch * @param leaderPath ZooKeeper node path for the node which stores the current leader information */ public ZooKeeperLeaderElectionService(CuratorFramework client, String latchPath, String leaderPath) { this.client = client; this.leaderPath = leaderPath; leaderLatch = new LeaderLatch(client, latchPath); cache = new NodeCache(client, leaderPath); } /** * Returns the current leader session ID or null, if the contender is not the leader. * * @return The last leader session ID or null, if the contender is not the leader */ public UUID getLeaderSessionID() { return confirmedLeaderSessionID; } @Override public void start(LeaderContender contender) throws Exception { Preconditions.checkNotNull(contender, "Contender must not be null."); Preconditions.checkState(leaderContender == null, "Contender was already set."); LOG.info("Starting ZooKeeperLeaderElectionService."); leaderContender = contender; leaderLatch.addListener(this); leaderLatch.start(); cache.getListenable().addListener(this); cache.start(); client.getConnectionStateListenable().addListener(listener); } @Override public void stop() throws Exception{ LOG.info("Stopping ZooKeeperLeaderElectionService."); client.getConnectionStateListenable().removeListener(listener); cache.close(); leaderLatch.close(); client.close(); synchronized (lock) { confirmedLeaderSessionID = null; issuedLeaderSessionID = null; } } @Override public void confirmLeaderSessionID(UUID leaderSessionID) { if (LOG.isDebugEnabled()) { LOG.debug( "Confirm leader session ID {} for leader {}.", leaderSessionID, leaderContender.getAddress()); } Preconditions.checkNotNull(leaderSessionID); if(leaderLatch.hasLeadership()) { // check if this is an old confirmation call synchronized (lock) { if (leaderSessionID.equals(this.issuedLeaderSessionID)) { confirmedLeaderSessionID = leaderSessionID; writeLeaderInformation(confirmedLeaderSessionID); } } } else { LOG.warn("The leader session ID {} was confirmed even though the" + "corresponding JobManager was not elected as the leader.", leaderSessionID); } } @Override public boolean hasLeadership() { return leaderLatch.hasLeadership(); } @Override public void isLeader() { synchronized (lock) { issuedLeaderSessionID = UUID.randomUUID(); confirmedLeaderSessionID = null; if (LOG.isDebugEnabled()) { LOG.debug( "Grant leadership to contender {} with session ID {}.", leaderContender.getAddress(), issuedLeaderSessionID); } leaderContender.grantLeadership(issuedLeaderSessionID); } } @Override public void notLeader() { synchronized (lock) { issuedLeaderSessionID = null; confirmedLeaderSessionID = null; if (LOG.isDebugEnabled()) { LOG.debug("Revoke leadership of {}.", leaderContender.getAddress()); } leaderContender.revokeLeadership(); } } @Override public void nodeChanged() throws Exception { try { // leaderSessionID is null if the leader contender has not yet confirmed the session ID if (leaderLatch.hasLeadership()) { synchronized (lock) { if (LOG.isDebugEnabled()) { LOG.debug( "Leader node changed while {} is the leader with session ID {}.", leaderContender.getAddress(), confirmedLeaderSessionID); } if (confirmedLeaderSessionID != null) { ChildData childData = cache.getCurrentData(); if (childData == null) { if (LOG.isDebugEnabled()) { LOG.debug( "Writing leader information into empty node by {}.", leaderContender.getAddress()); } writeLeaderInformation(confirmedLeaderSessionID); } else { byte[] data = childData.getData(); if (data == null || data.length == 0) { // the data field seems to be empty, rewrite information if (LOG.isDebugEnabled()) { LOG.debug( "Writing leader information into node with empty data field by {}.", leaderContender.getAddress()); } writeLeaderInformation(confirmedLeaderSessionID); } else { ByteArrayInputStream bais = new ByteArrayInputStream(data); ObjectInputStream ois = new ObjectInputStream(bais); String leaderAddress = ois.readUTF(); UUID leaderSessionID = (UUID) ois.readObject(); if (!leaderAddress.equals(this.leaderContender.getAddress()) || (leaderSessionID == null || !leaderSessionID.equals(confirmedLeaderSessionID))) { // the data field does not correspond to the expected leader information if (LOG.isDebugEnabled()) { LOG.debug( "Correcting leader information by {}.", leaderContender.getAddress()); } writeLeaderInformation(confirmedLeaderSessionID); } } } } } } } catch (Exception e) { leaderContender.handleError(new Exception("Could not handle node changed event.", e)); throw e; } } /** * Writes the current leader's address as well the given leader session ID to ZooKeeper. * * @param leaderSessionID Leader session ID which is written to ZooKeeper */ protected void writeLeaderInformation(UUID leaderSessionID) { // this method does not have to be synchronized because the curator framework client // is thread-safe try { if (LOG.isDebugEnabled()) { LOG.debug( "Write leader information: Leader={}, session ID={}.", leaderContender.getAddress(), leaderSessionID); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeUTF(leaderContender.getAddress()); oos.writeObject(leaderSessionID); oos.close(); boolean dataWritten = false; while(!dataWritten && leaderLatch.hasLeadership()) { Stat stat = client.checkExists().forPath(leaderPath); if (stat != null) { long owner = stat.getEphemeralOwner(); long sessionID = client.getZookeeperClient().getZooKeeper().getSessionId(); if(owner == sessionID) { try { client.setData().forPath(leaderPath, baos.toByteArray()); dataWritten = true; } catch (KeeperException.NoNodeException noNode) { // node was deleted in the meantime } } else { try { client.delete().forPath(leaderPath); } catch (KeeperException.NoNodeException noNode) { // node was deleted in the meantime --> try again } } } else { try { client.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath( leaderPath, baos.toByteArray()); dataWritten = true; } catch (KeeperException.NodeExistsException nodeExists) { // node has been created in the meantime --> try again } } } if (LOG.isDebugEnabled()) { LOG.debug( "Successfully wrote leader information: Leader={}, session ID={}.", leaderContender.getAddress(), leaderSessionID); } } catch (Exception e) { leaderContender.handleError( new Exception("Could not write leader address and leader session ID to " + "ZooKeeper.", e)); } } protected void handleStateChange(ConnectionState newState) { switch (newState) { case CONNECTED: LOG.debug("Connected to ZooKeeper quorum. Leader election can start."); break; case SUSPENDED: LOG.warn("Connection to ZooKeeper suspended. The contender " + leaderContender.getAddress() + " no longer participates in the leader election."); break; case RECONNECTED: LOG.info("Connection to ZooKeeper was reconnected. Leader election can be restarted."); break; case LOST: // Maybe we have to throw an exception here to terminate the JobManager LOG.warn("Connection to ZooKeeper lost. The contender " + leaderContender.getAddress() + " no longer participates in the leader election."); break; } } }
/* Copyright (C) 2005-2011 Fabio Riccardi */ package com.lightcrafts.ui.layout; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.util.prefs.Preferences; /** * A JSplitPane with a fixed style that respects the minimum and maximum sizes * of its children, can swap its two child Components, can swap its orientation, * can show and hide its children, and remembers these settings as well as its * divider location between VM instances. */ public class SmartSplitPane extends JSplitPane { // Remember the split pane divider locations private final static Preferences Prefs = Preferences.userNodeForPackage( SmartSplitPane.class ); // Constants for constructing Preferences key Strings private final static String LocationKey = "Location"; private final static String HideLeftKey = "HideLeft"; private final static String HideRightKey = "HideRight"; private final static String HiddenLocKey = "HiddenLocation"; // A Preferences key for the divider location at initialization private String prefsKey; // Either of the children may be hidden, in the manner of the JSplitPane // oneTouchExpandable property. private boolean hideLeft; private boolean hideRight; private int hiddenDividerLoc; private Component hidden; // Either LEFT, RIGHT, TOP or BOTTOM, to indicate which of the two // children should have its maximum size be respected when the divider // location is constrained; or null, meaning that the minimum sizes of // both should be respected. private String respectedSide; /** * Define a SplitTreeNode that will use the given String key in reading * and writing Preference values. */ public SmartSplitPane(String prefsKey, String respectedSide) { this.prefsKey = prefsKey; this.respectedSide = respectedSide; setContinuousLayout(true); setBorder(null); // This divider size matches values in PreviewSplit and TemplateSplit. setDividerSize(4); if (respectedSide != null) { // Add the daemon that adjusts minimum size of the opposite child // in order to enforce the maximum size of the respected child. addComponentListener(); } } /** * This method from the base class is overridden so the argument may be * copied into Preferences for restoring later. */ public void setDividerLocation(int i) { super.setDividerLocation(i); Prefs.putInt(prefsKey + LocationKey, i); } /** * Update the left child component, without moving the divider. */ public void setLeftFrozenDivider(JComponent comp) { int loc = getDividerLocation(); setLeftComponent( comp ); super.setDividerLocation(loc); } /** * Update the right child component, without moving the divider. */ public void setRightFrozenDivider(JComponent comp) { int loc = getDividerLocation(); setRightComponent(comp); super.setDividerLocation(loc); } /** * Call this when it is the time to restore the orientation and divider * location from Preferences: after the SplitTreeNode has been set up, * had its children defined, resize weights adjusted, etc. */ public void restoreFromPrefs(int defaultDivider, int defaultOrientation) { int dividerLoc = Prefs.getInt(prefsKey + LocationKey, defaultDivider); if (dividerLoc >= 0) { super.setDividerLocation(dividerLoc); } super.setOrientation(defaultOrientation); hideLeft = Prefs.getBoolean(prefsKey + HideLeftKey, false); hideRight = Prefs.getBoolean(prefsKey + HideRightKey, false); hiddenDividerLoc = Prefs.getInt(prefsKey + HiddenLocKey, defaultDivider); if (hideLeft) { hidden = getLeftComponent(); remove(hidden); } if (hideRight) { hidden = getRightComponent(); remove(hidden); } if (hideLeft || hideRight) { setDividerSize(0); } } // Make sure the min/max size constraints are satisfied by the current // divider position. (These may become violated after restoring from // preferences or changes in properties of the respected component.) public void checkConstraints() { Component respected = getRespectedChild(); if (respected == null) { // The respected child may be hidden. return; } if (respectedSide == null) { // We could be in the symmetrical configuration. return; } // Component respected = getRespectedChild(); if (respected == null) { // The respected child may be hidden. return; } Dimension size = respected.getSize(); Dimension min = respected.getMinimumSize(); Dimension max = respected.getMaximumSize(); switch (getOrientation()) { case VERTICAL_SPLIT: int minH = min.height; int maxH = max.height; if ((size.height < minH) || (size.height > maxH)) { if (respected == getTopComponent()) { setDividerLocation(minH + getDividerSize()); } else { setDividerLocation( getSize().height - minH - getDividerSize() ); } updateOppositeMinSize(); } break; case HORIZONTAL_SPLIT: int minW = min.width; int maxW = max.width; if ((size.width < minW) || (size.width > maxW)) { if (respected == getLeftComponent()) { setDividerLocation(minW + getDividerSize()); } else { setDividerLocation( getSize().width - minW - getDividerSize() ); } updateOppositeMinSize(); } break; } } /** * Find out whether the left component has been hidden. */ public boolean isHiddenLeft() { return hideLeft; } /** * Find out whether the right component has been hidden. */ public boolean isHiddenRight() { return hideRight; } /** * Set the split pane divider location so that the left component has * zero size, remembering the current location for unhide(). */ public void hideLeft() { if (hideLeft) { return; } if (hideRight) { unhide(); } hiddenDividerLoc = getDividerLocation(); hidden = getLeftComponent(); remove(hidden); setDividerLocation(0d); setDividerSize(0); hideLeft = true; hideRight = false; Prefs.putBoolean(prefsKey + HideLeftKey, hideLeft); Prefs.putBoolean(prefsKey + HideRightKey, hideRight); Prefs.putInt(prefsKey + HiddenLocKey, hiddenDividerLoc); } /** * Set the split pane divider location so that the right component has * zero size, remembering the current location for unhide(). */ public void hideRight() { if (hideRight) { return; } if (hideLeft) { unhide(); } hiddenDividerLoc = getDividerLocation(); hidden = getRightComponent(); remove(hidden); setDividerLocation(1d); setDividerSize(0); hideLeft = false; hideRight = true; Prefs.putBoolean(prefsKey + HideLeftKey, hideLeft); Prefs.putBoolean(prefsKey + HideRightKey, hideRight); Prefs.putInt(prefsKey + HiddenLocKey, hiddenDividerLoc); } /** * Undo the effects of hideLeft() and hideRight(). */ public void unhide() { if (hideLeft || hideRight) { if (hideRight) { setRightComponent(hidden); } if (hideLeft) { setLeftComponent(hidden); } setDividerLocation(hiddenDividerLoc); setDividerSize(4); hideLeft = false; hideRight = false; validate(); } Prefs.putBoolean(prefsKey + HideLeftKey, hideLeft); Prefs.putBoolean(prefsKey + HideRightKey, hideRight); Prefs.putInt(prefsKey + HiddenLocKey, hiddenDividerLoc); } /** * If this is a horizontal split, make it vertical. If it's vertical, * make it horizontal. The new setting gets written to Preferences. */ public void toggleOrientation() { int orientation = getOrientation(); switch (orientation) { case HORIZONTAL_SPLIT: setOrientation(VERTICAL_SPLIT); break; case VERTICAL_SPLIT: setOrientation(HORIZONTAL_SPLIT); break; } } /** * Add the ComponentListener that monitors size changes of this split pane * and updates the minimum size of the "opposite" child so that the divider * location constraint will respect the maximum size of the "respected" * child. */ private void addComponentListener() { addComponentListener( new ComponentAdapter() { public void componentResized(ComponentEvent event) { updateOppositeMinSize(); } } ); } public void updateOppositeMinSize() { if (isHiddenLeft() || (isHiddenRight())) { return; } Component respected = getRespectedChild(); Component opposite = getOppositeChild(); Dimension size = getSize(); Dimension min = opposite.getMinimumSize(); Dimension max = respected.getMaximumSize(); switch (getOrientation()) { case HORIZONTAL_SPLIT: min.width = size.width - max.width; opposite.setMinimumSize(min); break; case VERTICAL_SPLIT: min.height = size.height - max.height; opposite.setMinimumSize(min); break; } } /** * Find out which child should have its maximum size respected by the * split pane divider location constraint. */ private Component getRespectedChild() { if (respectedSide == null) { return null; } if (respectedSide.equals(TOP)) { return getTopComponent(); } if (respectedSide.equals(BOTTOM)) { return getBottomComponent(); } if (respectedSide.equals(LEFT)) { return getLeftComponent(); } if (respectedSide.equals(RIGHT)) { return getRightComponent(); } assert false : "Illegal SmartSplitPane respectedSide: " + respectedSide; return null; } /** * Find out which child should allow its minimum size to be slaved to the * split pane divider location constraint. */ private Component getOppositeChild() { if (respectedSide == null) { return null; } if (respectedSide.equals(TOP)) { return getBottomComponent(); } if (respectedSide.equals(BOTTOM)) { return getTopComponent(); } if (respectedSide.equals(LEFT)) { return getRightComponent(); } if (respectedSide.equals(RIGHT)) { return getLeftComponent(); } assert false : "Illegal SmartSplitPane respectedSide: " + respectedSide; return null; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.java.abi; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.zip.Unzip; import com.google.common.base.Joiner; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.io.ByteStreams; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.objectweb.asm.Opcodes; import org.objectweb.asm.tree.AnnotationNode; import org.objectweb.asm.tree.ClassNode; import org.objectweb.asm.tree.FieldNode; import org.objectweb.asm.tree.MethodNode; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.List; import java.util.SortedSet; import java.util.jar.JarOutputStream; import java.util.zip.ZipEntry; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; public class MirrorTest { private static final ImmutableSortedSet<Path> EMPTY_CLASSPATH = ImmutableSortedSet.of(); @Rule public TemporaryFolder temp = new TemporaryFolder(); private ProjectFilesystem filesystem; private Path stubJar; @Before public void createStubJar() throws IOException { File out = temp.newFolder(); filesystem = new ProjectFilesystem(out.toPath()); stubJar = Paths.get("stub.jar"); } @Test public void emptyClass() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", "package com.example.buck; public class A {}"); new StubJar(jar).writeTo(filesystem, stubJar); // Verify that the stub jar works by compiling some code that depends on A. compileToJar( ImmutableSortedSet.of(stubJar), "B.java", "package com.example.buck; public class B extends A {}"); } @Test public void emptyClassWithAnnotation() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", "package com.example.buck; @Deprecated public class A {}"); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); assertNotEquals(0, classNode.access & Opcodes.ACC_DEPRECATED); } @Test public void classWithTwoMethods() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join(ImmutableList.of( "package com.example.buck;", "public class A {", " public String toString() { return null; }", " public void eatCake() {}", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); // Verify that both methods are present and given in alphabetical order. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<MethodNode> methods = classNode.methods; // Index 0 is the <init> method. Skip that. assertEquals("eatCake", methods.get(1).name); assertEquals("toString", methods.get(2).name); } @Test public void genericClassSignaturesShouldBePreserved() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A<T> {", " public T get(String key) { return null; }", "}" ))); // With generic classes, there are typically two interesting things we want to keep an eye on. // First is the "descriptor", which is the signature of the method with type erasure complete. // Optionally, compilers (and the OpenJDK, Oracle and Eclipse compilers all do this) can also // include a "signature", which is the signature of the method before type erasure. See // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3 for more. AbiClass original = readClass(jar, "com/example/buck/A.class"); String classSig = original.getClassNode().signature; MethodNode originalGet = original.findMethod("get"); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); assertEquals(classSig, stubbed.getClassNode().signature); MethodNode stubbedGet = stubbed.findMethod("get"); assertMethodEquals(originalGet, stubbedGet); } @Test public void shouldIgnorePrivateMethods() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " private void privateMethod() {}", " void packageMethod() {}", " protected void protectedMethod() {}", " public void publicMethod() {}", "}" ))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); for (MethodNode method : stubbed.getClassNode().methods) { assertFalse(method.name.contains("private")); } } @Test public void shouldPreserveAField() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " protected String protectedField;", "}" ))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); FieldNode field = stubbed.findField("protectedField"); assertEquals("protectedField", field.name); assertTrue((field.access & Opcodes.ACC_PROTECTED) > 0); } @Test public void shouldIgnorePrivateFields() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " private String privateField;", "}" ))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); assertEquals(0, stubbed.getClassNode().fields.size()); } @Test public void shouldPreserveGenericTypesOnFields() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A<T> {", " public T theField;", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass original = readClass(jar, "com/example/buck/A.class"); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); FieldNode originalField = original.findField("theField"); FieldNode stubbedField = stubbed.findField("theField"); assertFieldEquals(originalField, stubbedField); } @Test public void shouldPreserveGenericTypesOnMethods() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A<T> {", " public T get(String key) { return null; }", " public <X extends Comparable<T>> X compareWith(T other) { return null; }", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass original = readClass(jar, "com/example/buck/A.class"); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); MethodNode originalGet = original.findMethod("get"); MethodNode stubbedGet = stubbed.findMethod("get"); assertEquals(originalGet.signature, stubbedGet.signature); assertEquals(originalGet.desc, stubbedGet.desc); MethodNode originalCompare = original.findMethod("compareWith"); MethodNode stubbedCompare = stubbed.findMethod("compareWith"); assertEquals(originalCompare.signature, stubbedCompare.signature); assertEquals(originalCompare.desc, stubbedCompare.desc); } @Test public void preservesAnnotationsOnMethods() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " @Foo", " public void cheese(String key) {}", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); MethodNode method = stubbed.findMethod("cheese"); List<AnnotationNode> seen = method.visibleAnnotations; assertEquals(1, seen.size()); assertEquals("Lcom/example/buck/Foo;", seen.get(0).desc); } @Test public void preservesAnnotationsOnFields() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " @Foo", " public String name;", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); FieldNode field = stubbed.findField("name"); List<AnnotationNode> seen = field.visibleAnnotations; assertEquals(1, seen.size()); assertEquals("Lcom/example/buck/Foo;", seen.get(0).desc); } @Test public void preservesAnnotationsOnParameters() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public void peynir(@Foo String very, int tasty) {}", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); MethodNode method = stubbed.findMethod("peynir"); List<AnnotationNode>[] parameterAnnotations = method.visibleParameterAnnotations; assertEquals(2, parameterAnnotations.length); } @Test public void preservesAnnotationsWithPrimitiveValues() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( "package com.example.buck;", "@Foo(primitiveValue=1)", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("primitiveValue", annotation.values.get(0)); assertEquals(1, annotation.values.get(1)); } @Test public void preservesAnnotationsWithStringArrayValues() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( "package com.example.buck;", "@Foo(stringArrayValue={\"1\", \"2\"})", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("stringArrayValue", annotation.values.get(0)); assertEquals(ImmutableList.of("1", "2"), annotation.values.get(1)); } @Test public void preservesAnnotationsWithEnumValues() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( "package com.example.buck;", "import java.lang.annotation.*;", "@Retention(RetentionPolicy.RUNTIME)", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("value", annotation.values.get(0)); assertEnumAnnotationValue( annotation.values, 1, "Ljava/lang/annotation/RetentionPolicy;", "RUNTIME"); } @Test public void preservesAnnotationsWithEnumArrayValues() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( "package com.example.buck;", "import java.lang.annotation.*;", "@Target({ElementType.CONSTRUCTOR, ElementType.FIELD})", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("value", annotation.values.get(0)); @SuppressWarnings("unchecked") List<Object> enumArray = (List<Object>) annotation.values.get(1); assertEquals(2, enumArray.size()); assertEnumAnnotationValue(enumArray, 0, "Ljava/lang/annotation/ElementType;", "CONSTRUCTOR"); assertEnumAnnotationValue(enumArray, 1, "Ljava/lang/annotation/ElementType;", "FIELD"); } @Test public void preservesAnnotationsWithAnnotationValues() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( "package com.example.buck;", "import java.lang.annotation.*;", "@Foo(annotationValue=@Retention(RetentionPolicy.RUNTIME))", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("annotationValue", annotation.values.get(0)); AnnotationNode nestedAnnotation = (AnnotationNode) annotation.values.get(1); assertEquals("Ljava/lang/annotation/Retention;", nestedAnnotation.desc); assertNotNull(nestedAnnotation.values); assertEquals(2, nestedAnnotation.values.size()); assertEquals("value", nestedAnnotation.values.get(0)); assertEnumAnnotationValue( nestedAnnotation.values, 1, "Ljava/lang/annotation/RetentionPolicy;", "RUNTIME"); } @Test public void preservesAnnotationsWithAnnotationArrayValues() throws IOException { Path annotations = buildAnnotationJar(); Path jar = compileToJar( ImmutableSortedSet.of(annotations), "A.java", Joiner.on("\n").join( "package com.example.buck;", "import java.lang.annotation.*;", "@Foo(annotationArrayValue=@Retention(RetentionPolicy.RUNTIME))", "public @interface A {}")); new StubJar(jar).writeTo(filesystem, stubJar); // Examine the jar to see if the "A" class is deprecated. ClassNode classNode = readClass(stubJar, "com/example/buck/A.class").getClassNode(); List<AnnotationNode> classAnnotations = classNode.visibleAnnotations; assertEquals(1, classAnnotations.size()); AnnotationNode annotation = classAnnotations.get(0); assertNotNull(annotation.values); assertEquals(2, annotation.values.size()); assertEquals("annotationArrayValue", annotation.values.get(0)); @SuppressWarnings("unchecked") List<Object> annotationArray = (List<Object>) annotation.values.get(1); assertEquals(1, annotationArray.size()); AnnotationNode nestedAnnotation = (AnnotationNode) annotationArray.get(0); assertEquals("Ljava/lang/annotation/Retention;", nestedAnnotation.desc); assertNotNull(nestedAnnotation.values); assertEquals(2, nestedAnnotation.values.size()); assertEquals("value", nestedAnnotation.values.get(0)); assertEnumAnnotationValue( nestedAnnotation.values, 1, "Ljava/lang/annotation/RetentionPolicy;", "RUNTIME"); } private void assertEnumAnnotationValue( List<Object> annotationValueList, int index, String enumType, String enumValue) { String[] enumArray = (String[]) annotationValueList.get(index); assertEquals(enumType, enumArray[0]); assertEquals(enumValue, enumArray[1]); } @Test public void stubsInnerClasses() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public class B {", " public int count;", " public void foo() {}", " }", "}" ))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass original = readClass(jar, "com/example/buck/A$B.class"); AbiClass stubbed = readClass(stubJar, "com/example/buck/A$B.class"); MethodNode originalFoo = original.findMethod("foo"); MethodNode stubbedFoo = stubbed.findMethod("foo"); assertMethodEquals(originalFoo, stubbedFoo); FieldNode originalCount = original.findField("count"); FieldNode stubbedCount = stubbed.findField("count"); assertFieldEquals(originalCount, stubbedCount); } @Test public void abiSafeChangesResultInTheSameOutputJar() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " protected final static int count = 42;", " public String getGreeting() { return \"hello\"; }", " Class<?> clazz;", " public int other;", "}" ))); new StubJar(jar).writeTo(filesystem, stubJar); String originalHash = filesystem.computeSha1(stubJar); Path jar2 = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " Class<?> clazz = String.class;", " public String getGreeting() { return \"merhaba\"; }", " protected final static int count = 42;", " public int other = 32;", "}" ))); filesystem.deleteFileAtPath(stubJar); new StubJar(jar2).writeTo(filesystem, stubJar); String secondHash = filesystem.computeSha1(stubJar); assertEquals(originalHash, secondHash); } @Test public void shouldIncludeStaticFields() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public static String foo;", " public final static int count = 42;", " protected static void method() {}", "}"))); new StubJar(jar).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); stubbed.findMethod("method"); // Presence is enough stubbed.findField("foo"); // Presence is enough FieldNode count = stubbed.findField("count"); assertEquals(42, count.value); } @Test public void innerClassesInStubsCanBeCompiledAgainst() throws IOException { Path original = compileToJar( EMPTY_CLASSPATH, "Outer.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class Outer {", " public class Inner {", " public String getGreeting() { return \"hola\"; }", " }", "}"))); new StubJar(original).writeTo(filesystem, stubJar); compileToJar( ImmutableSortedSet.of(stubJar), "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck2;", // Note: different package "import com.example.buck.Outer;", // Inner class becomes available "public class A {", " private Outer.Inner field;", // Reference the inner class "}"))); } @Test public void shouldPreserveSynchronizedKeywordOnMethods() throws IOException { Path original = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public synchronized void doMagic() {}", "}"))); new StubJar(original).writeTo(filesystem, stubJar); AbiClass stub = readClass(stubJar, "com/example/buck/A.class"); MethodNode magic = stub.findMethod("doMagic"); assertTrue((magic.access & Opcodes.ACC_SYNCHRONIZED) > 0); } @Test public void shouldKeepMultipleFieldsWithSameDescValue() throws IOException { Path original = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public static final A SEVERE = new A();", " public static final A NOT_SEVERE = new A();", " public static final A QUITE_MILD = new A();", "}"))); new StubJar(original).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); stubbed.findField("SEVERE"); stubbed.findField("NOT_SEVERE"); stubbed.findField("QUITE_MILD"); } @Test public void stubJarIsEquallyAtHomeWalkingADirectoryOfClassFiles() throws IOException { Path jar = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on("\n").join( ImmutableList.of( "package com.example.buck;", "public class A {", " public String toString() { return null; }", " public void eatCake() {}", "}"))); Path classDir = temp.newFolder().toPath(); Unzip.extractZipFile(jar, classDir, Unzip.ExistingFileMode.OVERWRITE); new StubJar(classDir).writeTo(filesystem, stubJar); // Verify that both methods are present and given in alphabetical order. AbiClass classNode = readClass(stubJar, "com/example/buck/A.class"); List<MethodNode> methods = classNode.getClassNode().methods; // Index 0 is the <init> method. Skip that. assertEquals("eatCake", methods.get(1).name); assertEquals("toString", methods.get(2).name); } @Test public void shouldIncludeBridgeMethods() throws IOException { Path original = compileToJar( EMPTY_CLASSPATH, "A.java", Joiner.on('\n').join(ImmutableList.of( "package com.example.buck;", "public class A implements Comparable<A> {", " public int compareTo(A other) {", " return 0;", " }", "}"))); new StubJar(original).writeTo(filesystem, stubJar); AbiClass stubbed = readClass(stubJar, "com/example/buck/A.class"); int count = 0; for (MethodNode method : stubbed.getClassNode().methods) { if ("compareTo".equals(method.name)) { count++; } } // One for the generics method, one for the bridge method from Comparable assertEquals(2, count); } private Path compileToJar( SortedSet<Path> classpath, String fileName, String source) throws IOException { File inputs = temp.newFolder(); File file = new File(inputs, fileName); Files.write(file.toPath(), source.getBytes(StandardCharsets.UTF_8)); JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); Iterable<? extends JavaFileObject> sourceObjects = fileManager.getJavaFileObjectsFromFiles(ImmutableSet.of(file)); final File outputDir = temp.newFolder(); List<String> args = Lists.newArrayList("-g", "-d", outputDir.getAbsolutePath()); if (!classpath.isEmpty()) { args.add("-classpath"); args.add(Joiner.on(File.pathSeparator).join(FluentIterable.from(classpath) .transform(filesystem.getAbsolutifier()))); } JavaCompiler.CompilationTask compilation = compiler.getTask(null, fileManager, null, args, null, sourceObjects); Boolean result = compilation.call(); fileManager.close(); assertNotNull(result); assertTrue(result); File jar = new File(outputDir, "output.jar"); try ( FileOutputStream fos = new FileOutputStream(jar); final JarOutputStream os = new JarOutputStream(fos)) { SimpleFileVisitor<Path> visitor = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().endsWith(".class")) { ZipEntry entry = new ZipEntry(outputDir.toPath().relativize(file).toString()); os.putNextEntry(entry); ByteStreams.copy(Files.newInputStream(file), os); os.closeEntry(); } return FileVisitResult.CONTINUE; } }; Files.walkFileTree(outputDir.toPath(), visitor); } return jar.toPath().toAbsolutePath(); } private AbiClass readClass(Path pathToJar, String className) throws IOException { return AbiClass.extract(filesystem.getPathForRelativePath(pathToJar), className); } private Path buildAnnotationJar() throws IOException { return compileToJar( EMPTY_CLASSPATH, "Foo.java", Joiner.on("\n").join(ImmutableList.of( "package com.example.buck;", "import java.lang.annotation.*;", "import static java.lang.annotation.ElementType.*;", "@Retention(RetentionPolicy.RUNTIME)", "@Target(value={CONSTRUCTOR, FIELD, METHOD, PARAMETER, TYPE})", "public @interface Foo {", " int primitiveValue() default 0;", " String[] stringArrayValue() default {};", " Retention annotationValue() default @Retention(RetentionPolicy.SOURCE);", " Retention[] annotationArrayValue() default {};", "}" ))); } private void assertMethodEquals(MethodNode expected, MethodNode seen) { assertEquals(expected.access, seen.access); assertEquals(expected.desc, seen.desc); assertEquals(expected.signature, seen.signature); } private void assertFieldEquals(FieldNode expected, FieldNode seen) { assertEquals(expected.name, seen.name); assertEquals(expected.desc, seen.desc); assertEquals(expected.signature, seen.signature); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.ui.awt.RelativePoint; import com.intellij.util.Consumer; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import javax.swing.*; import java.awt.*; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.List; /** * @author Alexey Pegov * @author Konstantin Bulenkov */ class SlideComponent extends JComponent { private static final int OFFSET = 11; private int myPointerValue = 0; private int myValue = 0; private final boolean myVertical; private final String myTitle; private final List<Consumer<Integer>> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private LightweightHint myTooltipHint; private final JLabel myLabel = new JLabel(); private Unit myUnit = Unit.LEVEL; enum Unit { PERCENT, LEVEL; private static final float PERCENT_MAX_VALUE = 100f; private static final float LEVEL_MAX_VALUE = 255f; private static float getMaxValue(Unit unit) { return LEVEL.equals(unit) ? LEVEL_MAX_VALUE : PERCENT_MAX_VALUE; } private static String formatValue(int value, Unit unit) { return String.format("%d%s", (int) (getMaxValue(unit) / LEVEL_MAX_VALUE * value), unit.equals(PERCENT) ? "%" : ""); } } void setUnits(Unit unit) { myUnit = unit; } SlideComponent(String title, boolean vertical) { myTitle = title; myVertical = vertical; addMouseMotionListener(new MouseAdapter() { @Override public void mouseDragged(MouseEvent e) { processMouse(e); } }); addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { processMouse(e); } @Override public void mouseEntered(MouseEvent e) { updateBalloonText(); } @Override public void mouseMoved(MouseEvent e) { updateBalloonText(); } @Override public void mouseExited(MouseEvent e) { if (myTooltipHint != null) { myTooltipHint.hide(); myTooltipHint = null; } } }); addMouseWheelListener(event -> { int units = event.getUnitsToScroll(); if (units == 0) return; int pointerValue = myPointerValue + units; pointerValue = pointerValue < OFFSET ? OFFSET : pointerValue; int size = myVertical ? getHeight() : getWidth(); pointerValue = pointerValue > (size - 12) ? size - 12 : pointerValue; myPointerValue = pointerValue; myValue = pointerValueToValue(myPointerValue); repaint(); fireValueChanged(); }); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { setValue(getValue()); fireValueChanged(); repaint(); } }); } private void updateBalloonText() { final Point point = myVertical ? new Point(0, myPointerValue) : new Point(myPointerValue, 0); myLabel.setText(myTitle + ": " + Unit.formatValue(myValue, myUnit)); if (myTooltipHint == null) { myTooltipHint = new LightweightHint(myLabel); myTooltipHint.setCancelOnClickOutside(false); myTooltipHint.setCancelOnOtherWindowOpen(false); final HintHint hint = new HintHint(this, point) .setPreferredPosition(myVertical ? Balloon.Position.atLeft : Balloon.Position.above) .setBorderColor(Color.BLACK) .setAwtTooltip(true) .setFont(UIUtil.getLabelFont().deriveFont(Font.BOLD)) .setTextBg(HintUtil.getInformationColor()) .setShowImmediately(true); final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); myTooltipHint.show(this, point.x, point.y, owner instanceof JComponent ? (JComponent)owner : null, hint); } else { myTooltipHint.setLocation(new RelativePoint(this, point)); } } @Override protected void processMouseMotionEvent(MouseEvent e) { super.processMouseMotionEvent(e); updateBalloonText(); } private void processMouse(MouseEvent e) { int pointerValue = myVertical ? e.getY() : e.getX(); pointerValue = pointerValue < OFFSET ? OFFSET : pointerValue; int size = myVertical ? getHeight() : getWidth(); pointerValue = pointerValue > (size - 12) ? size - 12 : pointerValue; myPointerValue = pointerValue; myValue = pointerValueToValue(myPointerValue); repaint(); fireValueChanged(); } public void addListener(Consumer<Integer> listener) { myListeners.add(listener); } private void fireValueChanged() { for (Consumer<Integer> listener : myListeners) { listener.consume(myValue); } } // 0 - 255 public void setValue(int value) { myPointerValue = valueToPointerValue(value); myValue = value; } public int getValue() { return myValue; } private int pointerValueToValue(int pointerValue) { pointerValue -= OFFSET; final int size = myVertical ? getHeight() : getWidth(); float proportion = (size - 23) / 255f; return Math.round((pointerValue / proportion)); } private int valueToPointerValue(int value) { final int size = myVertical ? getHeight() : getWidth(); float proportion = (size - 23) / 255f; return OFFSET + (int)(value * proportion); } @Override public Dimension getPreferredSize() { return myVertical ? new Dimension(22, 100) : new Dimension(100, 22); } @Override public Dimension getMinimumSize() { return myVertical ? new Dimension(22, 50) : new Dimension(50, 22); } @Override public final void setToolTipText(String text) { //disable tooltips } @Override protected void paintComponent(Graphics g) { final Graphics2D g2d = (Graphics2D)g; if (myVertical) { g2d.setPaint(UIUtil.getGradientPaint(0f, 0f, Color.WHITE, 0f, getHeight(), Color.BLACK)); g.fillRect(7, 10, 12, getHeight() - 20); g.setColor(Gray._150); g.drawRect(7, 10, 12, getHeight() - 20); g.setColor(Gray._250); g.drawRect(8, 11, 10, getHeight() - 22); } else { g2d.setPaint(UIUtil.getGradientPaint(0f, 0f, Color.WHITE, getWidth(), 0f, Color.BLACK)); g.fillRect(10, 7, getWidth() - 20, 12); g.setColor(Gray._150); g.drawRect(10, 7, getWidth() - 20, 12); g.setColor(Gray._250); g.drawRect(11, 8, getWidth() - 22, 10); } drawKnob(g2d, myVertical ? 7 : myPointerValue, myVertical ? myPointerValue : 7, myVertical); } private static void drawKnob(Graphics2D g2d, int x, int y, boolean vertical) { g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); if (vertical) { y -= 6; Polygon arrowShadow = new Polygon(); arrowShadow.addPoint(x - 5, y + 1); arrowShadow.addPoint(x + 7, y + 7); arrowShadow.addPoint(x - 5, y + 13); g2d.setColor(new Color(0, 0, 0, 70)); g2d.fill(arrowShadow); Polygon arrowHead = new Polygon(); arrowHead.addPoint(x - 6, y); arrowHead.addPoint(x + 6, y + 6); arrowHead.addPoint(x - 6, y + 12); g2d.setColor(new Color(153, 51, 0)); g2d.fill(arrowHead); } else { x -= 6; Polygon arrowShadow = new Polygon(); arrowShadow.addPoint(x + 1, y - 5); arrowShadow.addPoint(x + 13, y - 5); arrowShadow.addPoint(x + 7, y + 7); g2d.setColor(new Color(0, 0, 0, 70)); g2d.fill(arrowShadow); Polygon arrowHead = new Polygon(); arrowHead.addPoint(x, y - 6); arrowHead.addPoint(x + 12, y - 6); arrowHead.addPoint(x + 6, y + 6); g2d.setColor(new Color(153, 51, 0)); g2d.fill(arrowHead); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.component.dsl; import javax.annotation.Generated; import org.apache.camel.Component; import org.apache.camel.builder.component.AbstractComponentBuilder; import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.milo.client.MiloClientComponent; /** * Connect to OPC UA servers using the binary protocol for acquiring telemetry * data. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.ComponentDslMojo") public interface MiloClientComponentBuilderFactory { /** * OPC UA Client (camel-milo) * Connect to OPC UA servers using the binary protocol for acquiring * telemetry data. * * Category: iot * Since: 2.19 * Maven coordinates: org.apache.camel:camel-milo */ static MiloClientComponentBuilder miloClient() { return new MiloClientComponentBuilderImpl(); } /** * Builder for the OPC UA Client component. */ interface MiloClientComponentBuilder extends ComponentBuilder<MiloClientComponent> { /** * A virtual client id to force the creation of a new connection * instance. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default MiloClientComponentBuilder clientId(java.lang.String clientId) { doSetProperty("clientId", clientId); return this; } /** * All default options for client configurations. * * The option is a: * <code>org.apache.camel.component.milo.client.MiloClientConfiguration</code> type. * * Group: common */ default MiloClientComponentBuilder configuration( org.apache.camel.component.milo.client.MiloClientConfiguration configuration) { doSetProperty("configuration", configuration); return this; } /** * A suffix for endpoint URI when discovering. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default MiloClientComponentBuilder discoveryEndpointSuffix( java.lang.String discoveryEndpointSuffix) { doSetProperty("discoveryEndpointSuffix", discoveryEndpointSuffix); return this; } /** * An alternative discovery URI. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default MiloClientComponentBuilder discoveryEndpointUri( java.lang.String discoveryEndpointUri) { doSetProperty("discoveryEndpointUri", discoveryEndpointUri); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: <code>boolean</code> type. * * Default: false * Group: consumer */ default MiloClientComponentBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default MiloClientComponentBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: <code>boolean</code> type. * * Default: true * Group: advanced */ default MiloClientComponentBuilder autowiredEnabled( boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } /** * A set of allowed security policy URIs. Default is to accept all and * use the highest. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder allowedSecurityPolicies( java.lang.String allowedSecurityPolicies) { doSetProperty("allowedSecurityPolicies", allowedSecurityPolicies); return this; } /** * The application name. * * The option is a: <code>java.lang.String</code> type. * * Default: Apache Camel adapter for Eclipse Milo * Group: client */ default MiloClientComponentBuilder applicationName( java.lang.String applicationName) { doSetProperty("applicationName", applicationName); return this; } /** * The application URI. * * The option is a: <code>java.lang.String</code> type. * * Default: http://camel.apache.org/EclipseMilo/Client * Group: client */ default MiloClientComponentBuilder applicationUri( java.lang.String applicationUri) { doSetProperty("applicationUri", applicationUri); return this; } /** * Channel lifetime in milliseconds. * * The option is a: <code>java.lang.Long</code> type. * * Group: client */ default MiloClientComponentBuilder channelLifetime( java.lang.Long channelLifetime) { doSetProperty("channelLifetime", channelLifetime); return this; } /** * The name of the key in the keystore file. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder keyAlias(java.lang.String keyAlias) { doSetProperty("keyAlias", keyAlias); return this; } /** * The key password. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder keyPassword( java.lang.String keyPassword) { doSetProperty("keyPassword", keyPassword); return this; } /** * The keystore password. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder keyStorePassword( java.lang.String keyStorePassword) { doSetProperty("keyStorePassword", keyStorePassword); return this; } /** * The key store type. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder keyStoreType( java.lang.String keyStoreType) { doSetProperty("keyStoreType", keyStoreType); return this; } /** * The URL where the key should be loaded from. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder keyStoreUrl( java.lang.String keyStoreUrl) { doSetProperty("keyStoreUrl", keyStoreUrl); return this; } /** * The maximum number of pending publish requests. * * The option is a: <code>java.lang.Long</code> type. * * Group: client */ default MiloClientComponentBuilder maxPendingPublishRequests( java.lang.Long maxPendingPublishRequests) { doSetProperty("maxPendingPublishRequests", maxPendingPublishRequests); return this; } /** * The maximum number of bytes a response message may have. * * The option is a: <code>java.lang.Long</code> type. * * Group: client */ default MiloClientComponentBuilder maxResponseMessageSize( java.lang.Long maxResponseMessageSize) { doSetProperty("maxResponseMessageSize", maxResponseMessageSize); return this; } /** * Override the server reported endpoint host with the host from the * endpoint URI. * * The option is a: <code>boolean</code> type. * * Default: false * Group: client */ default MiloClientComponentBuilder overrideHost(boolean overrideHost) { doSetProperty("overrideHost", overrideHost); return this; } /** * The product URI. * * The option is a: <code>java.lang.String</code> type. * * Default: http://camel.apache.org/EclipseMilo * Group: client */ default MiloClientComponentBuilder productUri( java.lang.String productUri) { doSetProperty("productUri", productUri); return this; } /** * The requested publishing interval in milliseconds. * * The option is a: <code>java.lang.Double</code> type. * * Default: 1_000.0 * Group: client */ default MiloClientComponentBuilder requestedPublishingInterval( java.lang.Double requestedPublishingInterval) { doSetProperty("requestedPublishingInterval", requestedPublishingInterval); return this; } /** * Request timeout in milliseconds. * * The option is a: <code>java.lang.Long</code> type. * * Group: client */ default MiloClientComponentBuilder requestTimeout( java.lang.Long requestTimeout) { doSetProperty("requestTimeout", requestTimeout); return this; } /** * Session name. * * The option is a: <code>java.lang.String</code> type. * * Group: client */ default MiloClientComponentBuilder sessionName( java.lang.String sessionName) { doSetProperty("sessionName", sessionName); return this; } /** * Session timeout in milliseconds. * * The option is a: <code>java.lang.Long</code> type. * * Group: client */ default MiloClientComponentBuilder sessionTimeout( java.lang.Long sessionTimeout) { doSetProperty("sessionTimeout", sessionTimeout); return this; } } class MiloClientComponentBuilderImpl extends AbstractComponentBuilder<MiloClientComponent> implements MiloClientComponentBuilder { @Override protected MiloClientComponent buildConcreteComponent() { return new MiloClientComponent(); } private org.apache.camel.component.milo.client.MiloClientConfiguration getOrCreateConfiguration( org.apache.camel.component.milo.client.MiloClientComponent component) { if (component.getConfiguration() == null) { component.setConfiguration(new org.apache.camel.component.milo.client.MiloClientConfiguration()); } return component.getConfiguration(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "clientId": getOrCreateConfiguration((MiloClientComponent) component).setClientId((java.lang.String) value); return true; case "configuration": ((MiloClientComponent) component).setConfiguration((org.apache.camel.component.milo.client.MiloClientConfiguration) value); return true; case "discoveryEndpointSuffix": getOrCreateConfiguration((MiloClientComponent) component).setDiscoveryEndpointSuffix((java.lang.String) value); return true; case "discoveryEndpointUri": getOrCreateConfiguration((MiloClientComponent) component).setDiscoveryEndpointUri((java.lang.String) value); return true; case "bridgeErrorHandler": ((MiloClientComponent) component).setBridgeErrorHandler((boolean) value); return true; case "lazyStartProducer": ((MiloClientComponent) component).setLazyStartProducer((boolean) value); return true; case "autowiredEnabled": ((MiloClientComponent) component).setAutowiredEnabled((boolean) value); return true; case "allowedSecurityPolicies": getOrCreateConfiguration((MiloClientComponent) component).setAllowedSecurityPolicies((java.lang.String) value); return true; case "applicationName": getOrCreateConfiguration((MiloClientComponent) component).setApplicationName((java.lang.String) value); return true; case "applicationUri": getOrCreateConfiguration((MiloClientComponent) component).setApplicationUri((java.lang.String) value); return true; case "channelLifetime": getOrCreateConfiguration((MiloClientComponent) component).setChannelLifetime((java.lang.Long) value); return true; case "keyAlias": getOrCreateConfiguration((MiloClientComponent) component).setKeyAlias((java.lang.String) value); return true; case "keyPassword": getOrCreateConfiguration((MiloClientComponent) component).setKeyPassword((java.lang.String) value); return true; case "keyStorePassword": getOrCreateConfiguration((MiloClientComponent) component).setKeyStorePassword((java.lang.String) value); return true; case "keyStoreType": getOrCreateConfiguration((MiloClientComponent) component).setKeyStoreType((java.lang.String) value); return true; case "keyStoreUrl": getOrCreateConfiguration((MiloClientComponent) component).setKeyStoreUrl((java.lang.String) value); return true; case "maxPendingPublishRequests": getOrCreateConfiguration((MiloClientComponent) component).setMaxPendingPublishRequests((java.lang.Long) value); return true; case "maxResponseMessageSize": getOrCreateConfiguration((MiloClientComponent) component).setMaxResponseMessageSize((java.lang.Long) value); return true; case "overrideHost": getOrCreateConfiguration((MiloClientComponent) component).setOverrideHost((boolean) value); return true; case "productUri": getOrCreateConfiguration((MiloClientComponent) component).setProductUri((java.lang.String) value); return true; case "requestedPublishingInterval": getOrCreateConfiguration((MiloClientComponent) component).setRequestedPublishingInterval((java.lang.Double) value); return true; case "requestTimeout": getOrCreateConfiguration((MiloClientComponent) component).setRequestTimeout((java.lang.Long) value); return true; case "sessionName": getOrCreateConfiguration((MiloClientComponent) component).setSessionName((java.lang.String) value); return true; case "sessionTimeout": getOrCreateConfiguration((MiloClientComponent) component).setSessionTimeout((java.lang.Long) value); return true; default: return false; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.rpc.data; import com.google.common.base.Stopwatch; import com.google.common.collect.Lists; import com.typesafe.config.Config; import com.typesafe.config.ConfigValueFactory; import io.netty.buffer.ByteBuf; import mockit.Injectable; import mockit.Mock; import mockit.MockUp; import mockit.NonStrictExpectations; import org.apache.drill.BaseTestQuery; import org.apache.drill.categories.SecurityTest; import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.config.DrillProperties; import org.apache.drill.common.exceptions.UserRemoteException; import org.apache.drill.common.scanner.ClassPathScanner; import org.apache.drill.common.scanner.persistence.ScanResult; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.exception.DrillbitStartupException; import org.apache.drill.exec.exception.FragmentSetupException; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.ExecProtos.FragmentHandle; import org.apache.drill.exec.proto.GeneralRPCProtos.Ack; import org.apache.drill.exec.proto.UserBitShared; import org.apache.drill.exec.proto.UserBitShared.QueryId; import org.apache.drill.exec.record.FragmentWritableBatch; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.RawFragmentBatch; import org.apache.drill.exec.record.WritableBatch; import org.apache.drill.exec.rpc.RpcException; import org.apache.drill.exec.rpc.RpcOutcomeListener; import org.apache.drill.exec.rpc.control.WorkEventBus; import org.apache.drill.exec.rpc.security.KerberosHelper; import org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl; import org.apache.drill.exec.server.BootStrapContext; import org.apache.drill.exec.server.options.SystemOptionManager; import org.apache.drill.exec.vector.Float8Vector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.work.WorkManager.WorkerBee; import org.apache.drill.exec.work.fragment.FragmentManager; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; import java.lang.reflect.Field; import java.util.List; import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static junit.framework.TestCase.fail; import static org.junit.Assert.assertTrue; @Ignore("See DRILL-5387") @Category(SecurityTest.class) public class TestBitBitKerberos extends BaseTestQuery { //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestBitBitKerberos.class); private static KerberosHelper krbHelper; private static DrillConfig newConfig; private static BootStrapContext c1; private static FragmentManager manager; private int port = 1234; @BeforeClass public static void setupTest() throws Exception { final Config config = DrillConfig.create(cloneDefaultTestConfigProperties()); krbHelper = new KerberosHelper(TestBitBitKerberos.class.getSimpleName()); krbHelper.setupKdc(); newConfig = new DrillConfig( config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(true)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())), false); // Ignore the compile time warning caused by the code below. // Config is statically initialized at this point. But the above configuration results in a different // initialization which causes the tests to fail. So the following two changes are required. // (1) Refresh Kerberos config. sun.security.krb5.Config.refresh(); // (2) Reset the default realm. final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm"); defaultRealm.setAccessible(true); defaultRealm.set(null, KerberosUtil.getDefaultRealm()); updateTestCluster(1, newConfig); ScanResult result = ClassPathScanner.fromPrescan(newConfig); c1 = new BootStrapContext(newConfig, SystemOptionManager.createDefaultOptionDefinitions(), result); setupFragmentContextAndManager(); } private static void setupFragmentContextAndManager() { final FragmentContext fcontext = new MockUp<FragmentContext>(){ @SuppressWarnings("unused") BufferAllocator getAllocator(){ return c1.getAllocator(); } }.getMockInstance(); manager = new MockUp<FragmentManager>(){ int v = 0; @Mock boolean handle(IncomingDataBatch batch) throws FragmentSetupException, IOException { try { v++; if (v % 10 == 0) { System.out.println("sleeping."); Thread.sleep(3000); } } catch (InterruptedException e) { } RawFragmentBatch rfb = batch.newRawFragmentBatch(c1.getAllocator()); rfb.sendOk(); rfb.release(); return true; } @SuppressWarnings("unused") public FragmentContext getFragmentContext(){ return fcontext; } }.getMockInstance(); } private static WritableBatch getRandomBatch(BufferAllocator allocator, int records) { List<ValueVector> vectors = Lists.newArrayList(); for (int i = 0; i < 5; i++) { @SuppressWarnings("resource") Float8Vector v = (Float8Vector) TypeHelper.getNewVector( MaterializedField.create("a", Types.required(MinorType.FLOAT8)), allocator); v.allocateNew(records); v.getMutator().generateTestData(records); vectors.add(v); } return WritableBatch.getBatchNoHV(records, vectors, false); } private class TimingOutcome implements RpcOutcomeListener<Ack> { private AtomicLong max; private Stopwatch watch = Stopwatch.createStarted(); TimingOutcome(AtomicLong max) { super(); this.max = max; } @Override public void failed(RpcException ex) { ex.printStackTrace(); } @Override public void success(Ack value, ByteBuf buffer) { long micros = watch.elapsed(TimeUnit.MILLISECONDS); System.out.println(String.format("Total time to send: %d, start time %d", micros, System.currentTimeMillis() - micros)); while (true) { long nowMax = max.get(); if (nowMax < micros) { if (max.compareAndSet(nowMax, micros)) { break; } } else { break; } } } @Override public void interrupted(final InterruptedException e) { // TODO(We don't have any interrupts in test code) } } @Test public void success(@Injectable WorkerBee bee, @Injectable final WorkEventBus workBus) throws Exception { new NonStrictExpectations() {{ workBus.getFragmentManagerIfExists((FragmentHandle) any); result = manager; workBus.getFragmentManager( (FragmentHandle) any); result = manager; }}; DataConnectionConfig config = new DataConnectionConfig(c1.getAllocator(), c1, new DataServerRequestHandler(workBus, bee)); DataServer server = new DataServer(config); port = server.bind(port, true); DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build(); DataConnectionManager connectionManager = new DataConnectionManager(ep, config); DataTunnel tunnel = new DataTunnel(connectionManager); AtomicLong max = new AtomicLong(0); for (int i = 0; i < 40; i++) { long t1 = System.currentTimeMillis(); tunnel.sendRecordBatch(new TimingOutcome(max), new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1, getRandomBatch(c1.getAllocator(), 5000))); System.out.println(System.currentTimeMillis() - t1); // System.out.println("sent."); } System.out.println(String.format("Max time: %d", max.get())); assertTrue(max.get() > 2700); Thread.sleep(5000); } @Test public void successEncryption(@Injectable WorkerBee bee, @Injectable final WorkEventBus workBus) throws Exception { newConfig = new DrillConfig( config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(true)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())), false); updateTestCluster(1, newConfig); new NonStrictExpectations() {{ workBus.getFragmentManagerIfExists((FragmentHandle) any); result = manager; workBus.getFragmentManager( (FragmentHandle) any); result = manager; }}; DataConnectionConfig config = new DataConnectionConfig(c1.getAllocator(), c1, new DataServerRequestHandler(workBus, bee)); DataServer server = new DataServer(config); port = server.bind(port, true); DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build(); DataConnectionManager connectionManager = new DataConnectionManager(ep, config); DataTunnel tunnel = new DataTunnel(connectionManager); AtomicLong max = new AtomicLong(0); for (int i = 0; i < 40; i++) { long t1 = System.currentTimeMillis(); tunnel.sendRecordBatch(new TimingOutcome(max), new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1, getRandomBatch(c1.getAllocator(), 5000))); System.out.println(System.currentTimeMillis() - t1); } System.out.println(String.format("Max time: %d", max.get())); assertTrue(max.get() > 2700); Thread.sleep(5000); } @Test public void successEncryptionChunkMode(@Injectable WorkerBee bee, @Injectable final WorkEventBus workBus) throws Exception { newConfig = new DrillConfig( config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE, ConfigValueFactory.fromAnyRef(100000)) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(true)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())), false); updateTestCluster(1, newConfig); new NonStrictExpectations() {{ workBus.getFragmentManagerIfExists((FragmentHandle) any); result = manager; workBus.getFragmentManager( (FragmentHandle) any); result = manager; }}; DataConnectionConfig config = new DataConnectionConfig(c1.getAllocator(), c1, new DataServerRequestHandler(workBus, bee)); DataServer server = new DataServer(config); port = server.bind(port, true); DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build(); DataConnectionManager connectionManager = new DataConnectionManager(ep, config); DataTunnel tunnel = new DataTunnel(connectionManager); AtomicLong max = new AtomicLong(0); for (int i = 0; i < 40; i++) { long t1 = System.currentTimeMillis(); tunnel.sendRecordBatch(new TimingOutcome(max), new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1, getRandomBatch(c1.getAllocator(), 5000))); System.out.println(System.currentTimeMillis() - t1); } System.out.println(String.format("Max time: %d", max.get())); assertTrue(max.get() > 2700); Thread.sleep(5000); } @Test public void failureEncryptionOnlyPlainMechanism() throws Exception { try{ newConfig = new DrillConfig( config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("plain"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(true)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())), false); updateTestCluster(1, newConfig); fail(); } catch(Exception ex) { assertTrue(ex.getCause() instanceof DrillbitStartupException); } } /** * Test to validate that a query which is running only on local Foreman node runs fine even if the Bit-Bit * Auth config is wrong. With DRILL-5721, all the local fragment setup and status update * doesn't happen over Control tunnel but instead happens locally. Without the fix in DRILL-5721 these queries will * hang. * * This test only starts up 1 Drillbit so that all fragments are scheduled on Foreman Drillbit node * @throws Exception */ @Test public void localQuerySuccessWithWrongBitAuthConfig() throws Exception { final Properties connectionProps = new Properties(); connectionProps.setProperty(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL); connectionProps.setProperty(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL); connectionProps.setProperty(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath()); newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties()) .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL, ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())) .withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(false)) ,false); updateTestCluster(1, newConfig, connectionProps); // Run a query using the new client final String query = getFile("queries/tpch/01.sql"); test(query); } /** * Test to validate that query setup fails while scheduling remote fragments when multiple Drillbits are running with * wrong Bit-to-Bit Authentication configuration. * * This test starts up 2 Drillbit so that there are combination of local and remote fragments for query * execution. Note: When test runs with wrong config then for control connection Drillbit's uses wrong * service principal to talk to another Drillbit, and due to this Kerby server also fails with NullPointerException. * But for unit testing this should be fine. * @throws Exception */ @Test public void queryFailureWithWrongBitAuthConfig() throws Exception { try{ final Properties connectionProps = new Properties(); connectionProps.setProperty(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL); connectionProps.setProperty(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL); connectionProps.setProperty(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath()); newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties()) .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL, ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE)) .withValue(BootStrapContext.SERVICE_PRINCIPAL, ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL)) .withValue(BootStrapContext.SERVICE_KEYTAB_LOCATION, ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())) .withValue(ExecConstants.AUTHENTICATION_MECHANISMS, ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos"))) .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true)) .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM, ConfigValueFactory.fromAnyRef("kerberos")) .withValue(ExecConstants.USE_LOGIN_PRINCIPAL, ConfigValueFactory.fromAnyRef(false)) ,false); updateTestCluster(2, newConfig, connectionProps); test("alter session set `planner.slice_target` = 10"); final String query = getFile("queries/tpch/01.sql"); test(query); fail(); } catch(Exception ex) { assertTrue(ex instanceof UserRemoteException); assertTrue(((UserRemoteException)ex).getErrorType() == UserBitShared.DrillPBError.ErrorType.CONNECTION); } } @AfterClass public static void cleanTest() throws Exception { krbHelper.stopKdc(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.metricscachemgr.metricscache; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.junit.Test; import org.apache.heron.metricscachemgr.metricscache.query.MetricDatum; import org.apache.heron.metricscachemgr.metricscache.query.MetricRequest; import org.apache.heron.metricscachemgr.metricscache.query.MetricResponse; import org.apache.heron.metricscachemgr.metricscache.query.MetricTimeRangeValue; import org.apache.heron.proto.tmanager.TopologyManager; import org.apache.heron.spi.metricsmgr.metrics.MetricsFilter; import static org.apache.heron.metricscachemgr.metricscache.query.MetricGranularity.RAW; import static org.junit.Assert.assertEquals; public class CacheCoreTest { // test target private static CacheCore cacheCore; // cache timestamp private static long now; // aggregation type private static MetricsFilter metricsFilter; // sort MetricTimeRangeValue private static Comparator<MetricTimeRangeValue> timeRangeValueComparator; // sort MetricDatum private static Comparator<MetricDatum> datumComparator; private static void assertMetricValue( List<MetricTimeRangeValue> expected, List<MetricTimeRangeValue> actualIn) { List<MetricTimeRangeValue> actual = new ArrayList<>(actualIn); actual.sort(timeRangeValueComparator); int len = expected.size(); assertEquals(len, actual.size()); for (int i = 0; i < len; i++) { MetricTimeRangeValue expectedVal = expected.get(i); MetricTimeRangeValue actualVal = actual.get(i); assertEquals(expectedVal.getStartTime(), actualVal.getStartTime()); assertEquals(expectedVal.getEndTime(), actualVal.getEndTime()); assertEquals(expectedVal.getValue(), actualVal.getValue()); } } private static void assertMetricResponse( List<MetricDatum> metricListIn, MetricDatum... metricData) { List<MetricDatum> metricList = new ArrayList<>(metricListIn); assertEquals(metricData.length, metricList.size()); metricList.sort(datumComparator); for (int i = 0; i < metricData.length; i++) { MetricDatum expected = metricData[i]; MetricDatum actual = metricList.get(i); assertEquals(expected.getComponentName(), actual.getComponentName()); assertEquals(expected.getInstanceId(), actual.getInstanceId()); assertEquals(expected.getMetricName(), actual.getMetricName()); assertMetricValue(expected.getMetricValue(), actual.getMetricValue()); } } private void prepareDataForHashIndex() { // create cache with time window 100 seconds, bucket size 30 seconds and no exception store. // the cache should be initialized with 4 buckets: // bucket 1: [now-100 seconds ~ now-70 seconds) // bucket 2: [now-70 seconds ~ now-40 seconds) // bucket 3: [now-40 seconds ~ now-10 seconds) // bucket 4: [now-10 seconds ~ now] cacheCore = new CacheCore(Duration.ofSeconds(100), Duration.ofSeconds(30), 0); // current timestamp used as time origin // although it may be slightly different from the time origin // in the CacheCore initialization. now = System.currentTimeMillis(); TopologyManager.PublishMetrics.Builder builder = TopologyManager.PublishMetrics.newBuilder(); // should be in bucket 1 long ts = now - 90 * 1000; String[] components = new String[]{ "c1", "c2" }; String[] instances = new String[]{ "i1", "i2" }; String[] metrics = new String[]{ "m1", "m2" }; String[] vals = new String[]{ "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", "0.7", "0.8" }; int valIdx = 0; for (String component : components) { for (String instance : instances) { for (String metric : metrics) { builder.addMetrics(TopologyManager.MetricDatum.newBuilder() .setTimestamp(ts) .setComponentName(component).setInstanceId(instance) .setName(metric) .setValue(vals[valIdx++])); } } } cacheCore.addMetricException(builder.build()); metricsFilter = new MetricsFilter(); metricsFilter.setMetricToType("m1", MetricsFilter.MetricAggregationType.SUM); metricsFilter.setMetricToType("m2", MetricsFilter.MetricAggregationType.SUM); datumComparator = new Comparator<MetricDatum>() { @Override public int compare(MetricDatum o1, MetricDatum o2) { if (!o1.getComponentName().equals(o2.getComponentName())) { return o1.getComponentName().compareTo(o2.getComponentName()); } if (!o1.getInstanceId().equals(o2.getInstanceId())) { return o1.getInstanceId().compareTo(o2.getInstanceId()); } if (!o1.getMetricName().equals(o2.getMetricName())) { return o1.getMetricName().compareTo(o2.getMetricName()); } return 0; } }; } /* * query 1 metric */ @Test public void test1() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple assertMetricResponse(response.getMetricList(), new MetricDatum("c1", "i1", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )) ); } /* * query instances: null */ @Test public void testInstanceNull() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", null); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 2 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.3") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query instances: i1, i2 */ @Test public void testInstances() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); componentNameInstanceId.get("c1").add("i2"); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 2 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.3") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query instances: empty */ @Test public void testInstanceEmpty() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 0 <component, instance, metric> tuples assertEquals(response.getMetricList().size(), 0); } /* * query components: null */ @Test public void testComponentNull() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(null, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 4 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.3") )), new MetricDatum("c2", "i1", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.5") )), new MetricDatum("c2", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.7") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query components: c1, c2 */ @Test public void testComponent() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", null); componentNameInstanceId.put("c2", null); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 4 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.3") )), new MetricDatum("c2", "i1", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.5") )), new MetricDatum("c2", "i2", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.7") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query components: empty */ @Test public void testComponents() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 0 <component, instance, metric> tuples assertEquals(response.getMetricList().size(), 0); } /* * query metrics: m1, m2 */ @Test public void testMetricsSameComponentInstance() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); metricNames.add("m2"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 2 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i1", "m2", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.2") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query metrics: c1, c2, i1, m1, m2 */ @Test public void testMetrics() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); componentNameInstanceId.put("c2", new HashSet<String>()); componentNameInstanceId.get("c2").add("i1"); Set<String> metricNames = new HashSet<String>(); metricNames.add("m1"); metricNames.add("m2"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 4 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i1", "m2", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.2") )), new MetricDatum("c2", "i1", "m1", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.5") )), new MetricDatum("c2", "i1", "m2", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.6") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query metrics: null */ @Test public void testMetricsNull() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); MetricRequest request = new MetricRequest(componentNameInstanceId, null, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 2 <component, instance, metric> tuples MetricDatum[] expected = new MetricDatum[]{ new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1") )), new MetricDatum("c1", "i1", "m2", Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.2") )) }; assertMetricResponse(response.getMetricList(), expected); } /* * query metrics: empty */ @Test public void testMetricsEmpty() { prepareDataForHashIndex(); long startTime = now - 95 * 1000; long endTime = now - 85 * 1000; Map<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); Set<String> metricNames = new HashSet<String>(); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is 0 <component, instance, metric> tuples assertEquals(response.getMetricList().size(), 0); } private void prepareDataForTreeIndex() { // create cache with time window 100 seconds, bucket size 30 seconds and no exception store. // the cache should be initialized with 4 buckets: // bucket 1: [now-100 seconds ~ now-70 seconds) // bucket 2: [now-70 seconds ~ now-40 seconds) // bucket 3: [now-40 seconds ~ now-10 seconds) // bucket 4: [now-10 seconds ~ now] cacheCore = new CacheCore(Duration.ofSeconds(100), Duration.ofSeconds(30), 0); // current timestamp used as time origin // although it may be slightly different from the time origin // in the CacheCore initialization. now = System.currentTimeMillis(); TopologyManager.PublishMetrics.Builder builder = TopologyManager.PublishMetrics.newBuilder(); long[] ts = new long[]{ // the timestamp falls outside cache time window. too old to be in the cache now - 120 * 1000, // should be in bucket 1 now - 90 * 1000, // should be in bucket 1 now - 80 * 1000, // should be in bucket 2 now - 60 * 1000, // should be in bucket 2 now - 50 * 1000, // should be in bucket 3 now - 30 * 1000, // should be in bucket 3 now - 20 * 1000, // should be in bucket 4 now }; String[] vals = new String[]{ "0.0", "0.1", "0.2", "0.3", "0.4", "0.5", "0.6", "0.7" }; for (int i = 0; i < ts.length; i++) { builder.addMetrics(TopologyManager.MetricDatum.newBuilder() .setTimestamp(ts[i]) .setComponentName("c1").setInstanceId("i1") .setName("m1") .setValue(vals[i])); } cacheCore.addMetricException(builder.build()); // initialization metricsFilter = new MetricsFilter(); metricsFilter.setMetricToType("m1", MetricsFilter.MetricAggregationType.SUM); timeRangeValueComparator = new Comparator<MetricTimeRangeValue>() { @Override public int compare(MetricTimeRangeValue o1, MetricTimeRangeValue o2) { return (int) (o1.getStartTime() - o2.getStartTime()); } }; } /* * query 1 bucket */ @Test public void testTreeIndex1() { prepareDataForTreeIndex(); long startTime = now - 95 * 1000; long endTime = now - 75 * 1000; MetricRequest request = new MetricRequest(null, null, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple List<MetricDatum> metricList = response.getMetricList(); assertEquals(metricList.size(), 1); // there should be 2 metrics List<MetricTimeRangeValue> expected = Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1"), new MetricTimeRangeValue(now - 80 * 1000, now - 80 * 1000, "0.2") ); assertMetricValue(expected, metricList.get(0).getMetricValue()); } /* * query 2 buckets */ @Test public void testTreeIndex2() { prepareDataForTreeIndex(); long startTime = now - 95 * 1000; long endTime = now - 45 * 1000; MetricRequest request = new MetricRequest(null, null, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple List<MetricDatum> metricList = response.getMetricList(); assertEquals(metricList.size(), 1); // there should be 4 metrics List<MetricTimeRangeValue> expected = Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1"), new MetricTimeRangeValue(now - 80 * 1000, now - 80 * 1000, "0.2"), new MetricTimeRangeValue(now - 60 * 1000, now - 60 * 1000, "0.3"), new MetricTimeRangeValue(now - 50 * 1000, now - 50 * 1000, "0.4") ); assertMetricValue(expected, metricList.get(0).getMetricValue()); } /* * query all buckets */ @Test public void testTreeIndexAll() { prepareDataForTreeIndex(); long startTime = now - 200 * 1000; long endTime = now; MetricRequest request = new MetricRequest(null, null, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple List<MetricDatum> metricList = response.getMetricList(); assertEquals(metricList.size(), 1); // there should be 7 metrics List<MetricTimeRangeValue> expected = Arrays.asList( new MetricTimeRangeValue(now - 90 * 1000, now - 90 * 1000, "0.1"), new MetricTimeRangeValue(now - 80 * 1000, now - 80 * 1000, "0.2"), new MetricTimeRangeValue(now - 60 * 1000, now - 60 * 1000, "0.3"), new MetricTimeRangeValue(now - 50 * 1000, now - 50 * 1000, "0.4"), new MetricTimeRangeValue(now - 30 * 1000, now - 30 * 1000, "0.5"), new MetricTimeRangeValue(now - 20 * 1000, now - 20 * 1000, "0.6"), new MetricTimeRangeValue(now, now, "0.7") ); assertMetricValue(expected, metricList.get(0).getMetricValue()); } /* * query the last bucket */ @Test public void testTreeIndexLast() { prepareDataForTreeIndex(); long startTime = now - 5 * 1000; long endTime = now; MetricRequest request = new MetricRequest(null, null, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple List<MetricDatum> metricList = response.getMetricList(); assertEquals(metricList.size(), 1); // there should be 1 metric List<MetricTimeRangeValue> list = new ArrayList<>(metricList.get(0).getMetricValue()); assertEquals(list.size(), 1); // check value assertEquals(list.get(0).getValue(), "0.7"); } @Test public void testPurge() throws InterruptedException { // create cache with time window 10 seconds, bucket size 3 seconds and no exception store. // the cache should be initialized with 4 buckets: // bucket 1: [now-10 seconds ~ now-7 seconds) // bucket 2: [now-7 seconds ~ now-4 seconds) // bucket 3: [now-4 seconds ~ now-1 seconds) // bucket 4: [now-1 seconds ~ now] FakeTicker ticker = new FakeTicker(); cacheCore = new CacheCore(Duration.ofSeconds(10), Duration.ofSeconds(3), 0, ticker); // current timestamp used as time origin // although it may be slightly different from the time origin // in the CacheCore initialization. now = ticker.read(); TopologyManager.PublishMetrics.Builder builder = TopologyManager.PublishMetrics.newBuilder(); // should be in bucket 1 long ts = now - 9 * 1000; // c1-i1, m1: 0.1 builder.addMetrics(TopologyManager.MetricDatum.newBuilder() .setTimestamp(ts) .setComponentName("c1").setInstanceId("i1") .setName("m1") .setValue("0.1")); cacheCore.addMetricException(builder.build()); metricsFilter = new MetricsFilter(); metricsFilter.setMetricToType("m1", MetricsFilter.MetricAggregationType.SUM); // query before purge long startTime = now - 20 * 1000; long endTime = now; HashMap<String, Set<String>> componentNameInstanceId = new HashMap<>(); componentNameInstanceId.put("c1", new HashSet<String>()); componentNameInstanceId.get("c1").add("i1"); Set<String> metricNames = new HashSet<>(); metricNames.add("m1"); MetricRequest request = new MetricRequest(componentNameInstanceId, metricNames, startTime, endTime, RAW); MetricResponse response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple assertMetricResponse(response.getMetricList(), new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 9 * 1000, now - 9 * 1000, "0.1") )) ); // purge ticker.advance(Duration.ofSeconds(3)); // assure more than 1 bucket is purged cacheCore.purge(); // query after purge response = cacheCore.getMetrics(request, metricsFilter); // there is 1 <component, instance, metric> tuple: how to trim the gone metric in metadata? assertMetricResponse(response.getMetricList(), new MetricDatum("c1", "i1", "m1", Arrays.asList(new MetricTimeRangeValue[]{})) ); // insert-select after purge TopologyManager.PublishMetrics.Builder builder2 = TopologyManager.PublishMetrics.newBuilder(); // should be in bucket 1 ts = now - 3 * 1000; // c1-i1, m1: 0.1 builder2.addMetrics(TopologyManager.MetricDatum.newBuilder() .setTimestamp(ts) .setComponentName("c1").setInstanceId("i1") .setName("m1") .setValue("0.2")); cacheCore.addMetricException(builder2.build()); response = cacheCore.getMetrics(request, metricsFilter); // there is only one <component, instance, metric> tuple assertMetricResponse(response.getMetricList(), new MetricDatum("c1", "i1", "m1", Arrays.asList( // there should be 1 metric for each instance new MetricTimeRangeValue(now - 3 * 1000, now - 3 * 1000, "0.2") )) ); } private static final class FakeTicker extends CacheCore.Ticker { private AtomicLong now = new AtomicLong(System.currentTimeMillis()); void advance(Duration duration) { now.addAndGet(duration.toMillis()); } @Override long read() { return now.get(); } } }
package com.zimbra.qa.selenium.projects.desktop.core; import java.io.*; import java.lang.reflect.Method; import java.util.*; import org.apache.log4j.*; import org.testng.ITestContext; import org.testng.ITestNGMethod; import org.testng.ITestResult; import org.testng.annotations.*; import org.xml.sax.SAXException; import com.thoughtworks.selenium.*; import com.zimbra.qa.selenium.framework.core.ClientSessionFactory; import com.zimbra.qa.selenium.framework.core.Repository; import com.zimbra.qa.selenium.framework.ui.AbsTab; import com.zimbra.qa.selenium.framework.util.*; import com.zimbra.qa.selenium.framework.util.BuildUtility.*; import com.zimbra.qa.selenium.framework.util.GeneralUtility.WAIT_FOR_OPERAND; import com.zimbra.qa.selenium.framework.util.OperatingSystem.OsType; import com.zimbra.qa.selenium.framework.util.ZimbraAccount.SOAP_DESTINATION_HOST_TYPE; import com.zimbra.qa.selenium.framework.util.ZimbraSeleniumProperties.AppType; import com.zimbra.qa.selenium.framework.util.staf.StafServicePROCESS; import com.zimbra.qa.selenium.framework.util.staf.Stafzmtlsctl; import com.zimbra.qa.selenium.framework.util.staf.Stafzmtlsctl.SERVER_ACCESS; import com.zimbra.qa.selenium.projects.desktop.ui.*; /** * The <code>AjaxCommonTest</code> class is the base test case class * for normal Ajax client test case classes. * <p> * The AjaxCommonTest provides two basic functionalities: * <ol> * <li>{@link AbsTab} {@link #startingPage} - navigate to this * page before each test case method</li> * <li>{@link ZimbraAccount} {@link #startingAccountPreferences} - ensure this * account is authenticated before each test case method</li> * </ol> * <p> * It is important to note that no re-authentication (i.e. logout * followed by login) will occur if {@link #startingAccountPreferences} is * already the currently authenticated account. * <p> * The same rule applies to the {@link #startingPage}, as well. If * the "Contact App" is the specified starting page, and the contact * app is already opened, albiet in a "new contact" view, then the * "new contact" view will not be closed. * <p> * Typical usage:<p> * <pre> * {@code * public class TestCaseClass extends AjaxCommonTest { * * public TestCaseClass() { * * // All tests start at the Mail page * super.startingPage = app.zPageMail; * * // Create a new account to log into * ZimbraAccount account = new ZimbraAccount(); * super.startingAccount = account; * * // ... * * } * * // ... * * } * } * </pre> * * @author Matt Rhoades * */ public class AjaxCommonTest { protected static Logger logger = LogManager.getLogger(AjaxCommonTest.class); public final boolean isRunningDesktopTest = ZimbraSeleniumProperties.getStringProperty( ZimbraSeleniumProperties.getLocalHost() + ".desktop.test", "false").toLowerCase().equals("true") ? true : false; /** * The AdminConsole application object */ protected AppAjaxClient app = null; protected OsType osType = null; private String _downloadFilePath = null; private String[] _executableFilePath = null; private String [] _params = null; public final static String accountFlavor = "Zimbra"; public final static String defaultAccountName = ZimbraSeleniumProperties.getUniqueString(); public final static String yahooUserName = ZimbraSeleniumProperties.getStringProperty( ZimbraSeleniumProperties.getLocalHost() + ".desktop.yahoo.login"); public final static String yahooPassword = ZimbraSeleniumProperties.getStringProperty( ZimbraSeleniumProperties.getLocalHost() + ".desktop.yahoo.password"); public final static String gmailUserName = ZimbraSeleniumProperties.getStringProperty("desktop.gmail.login"); public final static String gmailPassword = ZimbraSeleniumProperties.getStringProperty("desktop.gmail.password"); public final static String hotmailUserName = ZimbraSeleniumProperties.getStringProperty("desktop.hotmail.login"); public final static String hotmailPassword = ZimbraSeleniumProperties.getStringProperty("desktop.hotmail.password"); public final static String hotmailUserName2 = ZimbraSeleniumProperties.getStringProperty("desktop.hotmail2.login"); public final static String hotmailPassword2 = ZimbraSeleniumProperties.getStringProperty("desktop.hotmail2.password"); public final static String gmailImapReceivingServer = "imap.gmail.com"; public final static String gmailImapSmtpServer = "smtp.gmail.com"; public final static String hotmailPopReceivingServer = "pop3.live.com"; public final static String hotmailPopSmtpServer = "smtp.live.com"; // This variable is to track desktop current account, if new account is created // then, desktop has to add that newly created account, while removing the // existing ones. For desktop purpose, this cannot use app.zGetActiveAccount // because the implementation is different where in Ajax client, active account // is set in login and logout, while in desktop, it is only set in addDefaultAccount private static ZimbraAccount _currentAccount = null; // Configurable from config file or input parameters private PRODUCT_NAME _productName = PRODUCT_NAME.ZDESKTOP; private BRANCH _branchName = BRANCH.HELIX; private ARCH _arch = null; private boolean _uninstallAppAfterTest = false; private boolean _forceInstall = false; protected String[] desktopZimlets = null; private static StartDesktopClient _startDesktopClient = null; private Repository _repository = new Repository(); /** * BeforeMethod variables * startingPage = the starting page before the test method starts * startingAccount = the account to log in as */ protected AbsTab startingPage = null; protected Map<String, String> startingAccountPreferences = null; protected Map<String, String> startingAccountZimletPreferences = null; protected AjaxCommonTest() { logger.info("New "+ AjaxCommonTest.class.getCanonicalName()); app = new AppAjaxClient(); startingPage = app.zPageMain; startingAccountPreferences = new HashMap<String, String>(); startingAccountZimletPreferences = new HashMap<String, String>(); } /** * Global BeforeSuite * <p> * <ol> * <li>Start the DefaultSelenium client</li> * </ol> * <p> * @throws HarnessException * @throws InterruptedException * @throws IOException * @throws SAXException */ @BeforeSuite(alwaysRun=true) public void commonTestBeforeSuite() throws HarnessException, IOException, InterruptedException, SAXException { logger.info("commonTestBeforeSuite: start"); // Most of the tests require the HTTP and HTTPS to be enabled, thus // enabling both mode at the very beginning of the test, so that it is buying // enough time for the server to recover after resetting the mailbox service, // which is usually down for 1 - 2 minutes after restart. Stafzmtlsctl stafzmtlsctl = new Stafzmtlsctl(); stafzmtlsctl.setServerAccess(SERVER_ACCESS.BOTH); StafServicePROCESS stafServiceProcess = new StafServicePROCESS(); // Disable the zimbraMtaTlsAuthOnly if it is true stafServiceProcess.execute("zmprov gs `zmhostname` zimbraMtaTlsAuthOnly"); String mode = stafServiceProcess.getStafResponse().split("zimbraMtaTlsAuthOnly:")[1].trim(); logger.debug("==================> Current zimbraMtaTlsAuthOnly: " + mode); if (mode.contains("TRUE")) { logger.debug("Setting zimbraMtaTlsAuthOnly to false"); String serverName = ZimbraSeleniumProperties.getStringProperty("server.host", "localhost"); stafServiceProcess.execute("zmprov ms " + serverName + " zimbraMtaTlsAuthOnly FALSE"); logger.debug("Restarting zmmtactl..."); stafServiceProcess.execute("zmmtactl restart"); } //Racetrack String DbHostURL = ZimbraSeleniumProperties.getStringProperty("racetrack.dbUrl", "racetrack.eng.vmware.com"); String buildNumber = ZimbraSeleniumProperties.getStringProperty("racetrack.buildNumber", "000000"); String userName = ZimbraSeleniumProperties.getStringProperty("racetrack.username", "anonymous"); String product = ZimbraSeleniumProperties.getStringProperty("racetrack.product", "zdesktop"); String description = ZimbraSeleniumProperties.getStringProperty("racetrack.description", "zdesktop description"); String branch = ZimbraSeleniumProperties.getStringProperty("racetrack.branch", "ZDESKTOP_7_1_2"); String buildType = ZimbraSeleniumProperties.getStringProperty("racetrack.buildType", "beta"); String testType = ZimbraSeleniumProperties.getStringProperty("racetrack.testType", "functional"); String recordToRacetrack = ZimbraSeleniumProperties.getStringProperty("racetrack.recordToRacetrack", "false"); String appendToExisting = ZimbraSeleniumProperties.getStringProperty("racetrack.appendToExisting", "false"); String resultId = ZimbraSeleniumProperties.getStringProperty("racetrack.resultId", ""); _repository.connectingToRacetrack(DbHostURL); _repository.beginTestSet( buildNumber, userName, product, description, branch, buildType, testType, Boolean.parseBoolean(recordToRacetrack), Boolean.parseBoolean(appendToExisting), resultId); // Make sure there is a new default account ZimbraAccount.ResetAccountZDC(); osType = OperatingSystem.getOSType(); try { DefaultSelenium _selenium = ClientSessionFactory.session().selenium(); logger.debug("Starting selenium"); // This is needed only in Mac OS because when selenium invokes the test browser window, // the window is not active (in background), thus any methods involving robot will not work // properly // Also for Mac OS, selenium start has to be at the very beginning in order for the robot to // activate the browser if (osType == OsType.MAC) { _selenium.start(); app.zPageMain.zMouseClick(100, 100); } if (isRunningDesktopTest) { ZimbraSeleniumProperties.setAppType(ZimbraSeleniumProperties.AppType.DESKTOP); _forceInstall = ZimbraSeleniumProperties.getStringProperty("desktop.forceInstall", "true").toLowerCase().equals("true") ? true : false; _uninstallAppAfterTest = ZimbraSeleniumProperties.getStringProperty("desktop.uninstallAfterTest", "false").toLowerCase().equals("true") ? true : false; String productName = ZimbraSeleniumProperties.getStringProperty("desktop.productName", "ZDESKTOP").toUpperCase(); try { logger.info("productName: " + productName); _productName = PRODUCT_NAME.valueOf(productName); } catch (IllegalArgumentException e) { _productName = PRODUCT_NAME.ZDESKTOP; } String productBranch = ZimbraSeleniumProperties.getStringProperty("desktop.productBranch", "HELIX").toUpperCase(); try { logger.info("productBranch: " + productBranch); _branchName = BRANCH.valueOf(productBranch); } catch (IllegalArgumentException e) { _branchName = BRANCH.HELIX; } logger.info("_forceInstall: " + _forceInstall); logger.info("_uninstallAppAfterTest: " + _uninstallAppAfterTest); logger.info("_productName: " + _productName); logger.info("_branchName: " + _branchName); logger.info("osType: " + osType); switch (osType){ case WINDOWS: case WINDOWS_XP: _downloadFilePath = "C:\\download-zimbra-qa-test\\"; _arch = ARCH.WINDOWS; String filePath = "C:\\Program Files (x86)"; File root = new File(filePath); if (root.exists()) { // 64 bit _executableFilePath = new String[] {"C:\\WINDOWS\\SysWOW64\\cscript.exe", "C:\\Program Files (x86)\\Zimbra\\Zimbra Desktop\\win32\\zdrun.vbs"}; } else { // 32 bit _executableFilePath = new String[] {"C:\\WINDOWS\\system32\\cscript.exe", "C:\\Program Files\\Zimbra\\Zimbra Desktop\\win32\\zdrun.vbs"}; } break; case LINUX: _downloadFilePath = "/download-zimbra-qa-test/"; _arch = ARCH.RHEL4; String username = ZimbraDesktopProperties.getInstance().getUserName(); String command = "/opt/zimbra/zdesktop/linux/prism/zdclient -webapp /home/<USER_NAME>/zdesktop/zdesktop.webapp -override /home/<USER_NAME>/zdesktop/zdesktop.webapp/override.ini -profile /home/<USER_NAME>/zdesktop/profile"; command = command.replaceAll("<USER_NAME>", username); _executableFilePath = new String[] {"su", "-", username, "-c", command}; _params = null; break; case MAC: _downloadFilePath = "/download-zimbra-qa-test/"; _arch = ARCH.MACOSX_X86_10_6; username = ZimbraDesktopProperties.getInstance().getUserName(); command = "/Applications/Zimbra\\ Desktop/Zimbra\\ Desktop.app/Contents/MacOS/zdrun"; _executableFilePath = new String[] {"su", "-", username, "-c", command}; _params = null; } logger.info("_forceInstall: " + _forceInstall); if (_forceInstall) { DesktopInstallUtil.forceInstallLatestBuild(_productName, _branchName, _arch, _downloadFilePath); } else { if (!DesktopInstallUtil.isDesktopAppInstalled()) { String buildUrl = ZimbraSeleniumProperties.getStringProperty("desktop.buildUrl", ""); String downloadPath = null; if (buildUrl.equals("")) { downloadPath = BuildUtility.downloadLatestBuild(_downloadFilePath, _productName, _branchName, _arch); } else { downloadPath = BuildUtility.downloadBuild(_downloadFilePath, buildUrl); } logger.info("Now installing: " + downloadPath); DesktopInstallUtil.installDesktopApp(downloadPath); } else { // Running test with already installed Desktop App. logger.info("Running with already installed app"); } } if (!DesktopInstallUtil.isDesktopAppRunning()) { logger.info("Executable file path: " + Arrays.toString(_executableFilePath)); _startDesktopClient = new StartDesktopClient(_executableFilePath, _params); _startDesktopClient.start(); } else { logger.info("App is already running..."); } GeneralUtility.waitFor(null, ZimbraAccount.AccountZDC(), false, "authenticateToMailClientHost", null, WAIT_FOR_OPERAND.NEQ, null, 60000, 3000); } else { // AJAX test ZimbraSeleniumProperties.setAppType(ZimbraSeleniumProperties.AppType.AJAX); } // For non Mac OS, selenium start is done after the installation and app initialization. if (osType != OsType.MAC) { _selenium.start(); } _selenium.windowMaximize(); _selenium.windowFocus(); _selenium.allowNativeXpath("true"); _selenium.setTimeout("30000");// Use 30 second timeout for opening the browser // Dynamic wait for App to be ready int maxRetry = 10; int retry = 0; boolean appIsReady = false; while (retry < maxRetry && !appIsReady) { try { logger.info("Retry #" + retry); retry ++; _selenium.open(ZimbraSeleniumProperties.getBaseURL()); appIsReady = true; } catch (SeleniumException e) { if (retry == maxRetry) { logger.error("Unable to open admin app." + " Is a valid cert installed?", e); throw e; } else { logger.info("App is still not ready...", e); SleepUtil.sleep(10000); continue; } } } logger.info("App is ready!"); } catch (SeleniumException e) { throw new HarnessException("Unable to open app", e); } catch (Exception e) { throw new HarnessException("Error in Before Suite", e); } logger.info("commonTestBeforeSuite: finish"); } /** * Global BeforeClass * * @throws HarnessException */ @BeforeClass(alwaysRun=true) public void commonTestBeforeClass() throws HarnessException { logger.info("commonTestBeforeClass: start"); if (isRunningDesktopTest) { logger.info("Wait dynamically until the application is loaded"); boolean isLoaded = (Boolean) GeneralUtility.waitFor(null, app, false, "zIsLoaded", null, WAIT_FOR_OPERAND.EQ, true, 30000, 1000); // Navigating to login page is important because for new App is created // in each different class, and tests are using zGetActiveAcount. // The only way zSetActiveAccount is called is whenever logging in // and logging out, so unlike Ajax, Desktop is comparing the AccountZWC // with the current added account, not necessarily ActiveAccount in Ajax app.zPageLogin.zNavigateTo(); if (!isLoaded) { throw new HarnessException("Nothing is loaded, please check the connection"); } } logger.info("commonTestBeforeClass: finish"); } /** * Going to login page, then going back to the starting page * @throws HarnessException */ public void relogin() throws HarnessException { app.zPageLogin.zNavigateTo(); startingPage.zNavigateTo(); } /** * Add default account using HTTP post * @throws HarnessException */ public void addDefaultAccount() throws HarnessException { logger.info("Creating new account..."); String serverScheme = ZimbraSeleniumProperties.getStringProperty("server.scheme", "http"); String serverName = ZimbraSeleniumProperties.getStringProperty("desktop.server.host", "localhost"); ZimbraDesktopProperties zdp = ZimbraDesktopProperties.getInstance(); String connectionPort = zdp.getConnectionPort(); String emailServerName = ZimbraSeleniumProperties.getStringProperty("adminName", "admin@localhost").split("@")[1]; String emailServerPort = ZimbraSeleniumProperties.getStringProperty("server.port", "80"); String securityType = serverScheme.equals("http") ? "&security=cleartext" : ""; String accountSetupUrl = new StringBuilder(serverScheme).append("://") .append(serverName). append(":") .append(connectionPort).append("/") .append("zimbra/desktop/accsetup.jsp?at=") .append(zdp.getSerialNumber()).append("&accountId=&verb=add&accountFlavor=") .append(accountFlavor).append("&accountName=") .append(defaultAccountName).append("&email=") .append(ZimbraAccount.AccountZDC().EmailAddress).append("&password=") .append(ZimbraAccount.AccountZDC().Password).append("&host=") .append(emailServerName).append("&port=") .append(emailServerPort).append("&syncFreqSecs=900&debugTraceEnabled=on") .append(securityType) .append("&syncEmailDate=0") .append("&syncFixedDate=") .append("&syncRelativeDate=") .append("&syncFieldName=Week").toString(); //.append("&dev=1&scripterrors=1").toString(); logger.info("accountSetupUrl: " + accountSetupUrl); GeneralUtility.doHttpPost(accountSetupUrl); String accountUrl = new StringBuilder(serverScheme).append("://") .append(serverName). append(":") .append(connectionPort).append("/") .append("?at=") .append(zdp.getSerialNumber()).toString(); //append("&dev=1&scripterrors=1").toString(); logger.debug("Selenium is opening: " + accountUrl); ClientSessionFactory.session().selenium().open(accountUrl); GeneralUtility.waitForElementPresent(app.zPageLogin, PageLogin.Locators.zBtnLoginDesktop); } /** * Global BeforeMethod * <p> * <ol> * <li>For all tests, make sure {@link #startingPage} is active</li> * <li>For all tests, make sure {@link #startingAccountPreferences} is logged in</li> * <li>For all tests, make any compose tabs are closed</li> * </ol> * <p> * @throws HarnessException */ @BeforeMethod(alwaysRun=true) public void commonTestBeforeMethod(Method method, ITestContext testContext) throws HarnessException { logger.info("commonTestBeforeMethod: start"); String packageName = method.getDeclaringClass().getPackage().getName(); String methodName = method.getName(); // Get the test description // By default, the test description is set to method's name // if it is set, then change it to the specified one String testDescription = methodName; for (ITestNGMethod ngMethod : testContext.getAllTestMethods()) { String methodClass = ngMethod.getRealClass().getSimpleName(); if (methodClass.equals(method.getDeclaringClass().getSimpleName()) && ngMethod.getMethodName().equals(method.getName())) { synchronized (AjaxCommonTest.class) { logger.info("---------BeforeMethod-----------------------"); logger.info("Test : " + methodClass + "." + ngMethod.getMethodName()); logger.info("Description: " + ngMethod.getDescription()); logger.info("----------------------------------------"); testDescription = ngMethod.getDescription(); } break; } } Repository.testCaseBegin(methodName, packageName, testDescription); SOAP_DESTINATION_HOST_TYPE destType = null; AppType appType = ZimbraSeleniumProperties.getAppType(); switch (appType) { case AJAX: destType = SOAP_DESTINATION_HOST_TYPE.SERVER; break; case DESKTOP: destType = SOAP_DESTINATION_HOST_TYPE.CLIENT; if (_currentAccount != ZimbraAccount.AccountZDC()) { app.zPageLogin.zNavigateTo(); if (app.zPageLogin.sIsElementPresent(PageLogin.Locators.zBtnLoginDesktop)) { boolean bFoundOtherUser = true; logger.debug("Cleaning up all existing users"); String deleteButtonLocator = null; // If this is the first time checking, then cleaning up all the pre-existing user // Otherwise, only cleans the non-default users, which is second user and so on... // Second user is located in row 3. if (_currentAccount != ZimbraAccount.AccountZDC()) { deleteButtonLocator = PageLogin.Locators.zDeleteButton; } else { String[] temp = PageLogin.Locators.zDeleteButton.trim().split(" "); deleteButtonLocator = new StringBuffer(temp[0]).append(" tr:nth-child(3)>td div[class^='ZAccount'] "). append(temp[1]).toString(); } int maxRetry = 30; int retry = 0; while (bFoundOtherUser && retry < maxRetry) { SleepUtil.sleepSmall(); if (app.zPageLogin.sIsElementPresent(PageLogin.Locators.zMyAccountsTab)) { app.zPageLogin.sClick(PageLogin.Locators.zMyAccountsTab); GeneralUtility.waitForElementPresent(app.zPageLogin, PageLogin.Locators.zBtnLoginDesktop); } if (app.zPageLogin.sIsElementPresent(deleteButtonLocator)) { String attribute = app.zPageLogin.sGetAttribute(deleteButtonLocator + "@href"); String accountId = attribute.split("'")[1]; String accountName = attribute.split("'")[3]; String accountFlavor = attribute.split("'")[5]; String accountType = attribute.split("'")[7]; app.zDeleteDesktopAccount(accountName, accountId, accountType, accountType); String nthChildString = "nth-child(3)"; if (deleteButtonLocator.contains(nthChildString)) { // It is switched from 3 to 4 because after clicking the delete button the first time // , there will be confirmation message which appears to be on the 3rd row. deleteButtonLocator = deleteButtonLocator.replace(nthChildString, "nth-child(4)"); } } if (!(Boolean)GeneralUtility.waitForElementPresent(app.zPageLogin, deleteButtonLocator, 5000)) { bFoundOtherUser = false; } retry++; } if (retry == maxRetry) { throw new HarnessException("Retry deleting the user timed out"); } } if (startingPage != app.zPageAddNewAccount) { addDefaultAccount(); _currentAccount = ZimbraAccount.AccountZDC(); } } if (startingPage != app.zPageAddNewAccount) { ZimbraAdminAccount.GlobalAdmin().authenticateToMailClientHost(); ZimbraAccount.AccountZDC().authenticateToMailClientHost(); } break; default: throw new HarnessException("Please add a support for appType: " + appType); } // If test account preferences are defined, then make sure the test account // uses those preferences // if ( (startingAccountPreferences != null) && (!startingAccountPreferences.isEmpty()) ) { logger.debug("commonTestBeforeMethod: startingAccountPreferences are defined"); ZimbraAccount.AccountZDC().modifyPreferences(startingAccountPreferences, destType); /**StringBuilder settings = new StringBuilder(); for (Map.Entry<String, String> entry : startingAccountPreferences.entrySet()) { settings.append(String.format("<a n='%s'>%s</a>", entry.getKey(), entry.getValue())); } ZimbraAdminAccount.GlobalAdmin().soapSend( "<ModifyAccountRequest xmlns='urn:zimbraAdmin'>" + "<id>"+ ZimbraAccount.AccountZWC().ZimbraId +"</id>" + settings.toString() + "</ModifyAccountRequest>", destType); */ // Set the flag so the account is reset for the next test ZimbraAccount.AccountZDC().accountIsDirty = true; } // If test account zimlet preferences are defined, then make sure the test account // uses those zimlet preferences // if ( (startingAccountZimletPreferences != null) && (!startingAccountZimletPreferences.isEmpty()) ) { logger.debug("commonTestBeforeMethod: startingAccountPreferences are defined"); ZimbraAccount.AccountZDC().modifyZimletPreferences(startingAccountZimletPreferences, destType); } // If a startingPage is defined, then make sure we are on that page if ( startingPage != null ) { logger.debug("commonTestBeforeMethod: startingPage is defined"); // If the starting page is not active, navigate to it if ( !startingPage.zIsActive() ) { startingPage.zNavigateTo(); } // Confirm that the page is active if ( !startingPage.zIsActive() ) { throw new HarnessException("Unable to navigate to "+ startingPage.myPageName()); } if (ZimbraSeleniumProperties.getAppType() == AppType.DESKTOP && startingPage != app.zPageLogin && startingPage != app.zPageAddNewAccount) { if (desktopZimlets == null) { desktopZimlets = app.zGetActiveAccount().getAvailableZimlets( SOAP_DESTINATION_HOST_TYPE.CLIENT); } logger.debug("Desktop Zimlets are: "); for (int i = 0; i < desktopZimlets.length; i++) { logger.debug("==> Zimlet " + i + " is: " + desktopZimlets[i]); } app.zTreeMail.zExpandAll(); } } // Make sure any extra compose tabs are closed app.zPageMain.zCloseComposeTabs(); logger.info("commonTestBeforeMethod: finish"); } /** * Global AfterSuite * <p> * <ol> * <li>Stop the DefaultSelenium client</li> * </ol> * * @throws HarnessException */ @AfterSuite(alwaysRun=true) public void commonTestAfterSuite() throws HarnessException { logger.info("commonTestAfterSuite: start"); // Only for linux, kill the desktop process because // in linux, the app is holding up the thread if (OperatingSystem.getOSType() == OsType.LINUX || OperatingSystem.getOSType() == OsType.MAC) { DesktopInstallUtil.killDesktopProcess(); } _startDesktopClient = null; ClientSessionFactory.session().selenium().stop(); _repository.endRepository(); logger.info("commonTestAfterSuite: finish"); } /** * Global AfterClass * * @throws HarnessException */ @AfterClass(alwaysRun=true) public void commonTestAfterClass() throws HarnessException { logger.info("commonTestAfterClass: start"); logger.info("commonTestAfterClass: finish"); } /** * Global AfterMethod * * @throws HarnessException */ @AfterMethod(alwaysRun=true) public void commonTestAfterMethod(Method method, ITestResult testResult) throws HarnessException { logger.info("commonTestAfterMethod: start"); // For Ajax, if account is considered dirty (modified), // then recreate a new account, but for desktop, the zimlet // preferences has to be reset to default, all core zimlets are enabled ZimbraAccount currentAccount = app.zGetActiveAccount(); if (currentAccount != null) { if (startingPage != app.zPageLogin && startingPage != app.zPageAddNewAccount && desktopZimlets == null) { throw new HarnessException("Desktop zimlets are null for unknown reason"); } // Reset the zimlets preferences to default Map<String, String> defaultZimlets = new HashMap<String, String>(); for (int i = 0; i < desktopZimlets.length; i++) { defaultZimlets.put(desktopZimlets[i], "enabled"); } currentAccount.authenticateToMailClientHost(); currentAccount.modifyZimletPreferences(defaultZimlets, SOAP_DESTINATION_HOST_TYPE.CLIENT); } String testCaseResult = String.valueOf(testResult.getStatus()); Repository.testCaseEnd(testCaseResult); logger.info("commonTestAfterMethod: finish"); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Structure specifying an attachment reference. * * <h5>Description</h5> * * <p>Parameters defined by this structure with the same name as those in {@link VkAttachmentReference} have the identical effect to those parameters.</p> * * <p>{@code aspectMask} is ignored when this structure is used to describe anything other than an input attachment reference.</p> * * <p>If the <a target="_blank" href="https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-separateDepthStencilLayouts">{@code separateDepthStencilLayouts}</a> feature is enabled, and {@code attachment} has a depth/stencil format, {@code layout} <b>can</b> be set to a layout that only specifies the layout of the depth aspect.</p> * * <p>If {@code layout} only specifies the layout of the depth aspect of the attachment, the layout of the stencil aspect is specified by the {@code stencilLayout} member of a {@link VkAttachmentReferenceStencilLayout} structure included in the {@code pNext} chain. Otherwise, {@code layout} describes the layout for all relevant image aspects.</p> * * <h5>Valid Usage</h5> * * <ul> * <li>If {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, {@code layout} <b>must</b> not be {@link VK10#VK_IMAGE_LAYOUT_UNDEFINED IMAGE_LAYOUT_UNDEFINED}, {@link VK10#VK_IMAGE_LAYOUT_PREINITIALIZED IMAGE_LAYOUT_PREINITIALIZED}, or {@link KHRSwapchain#VK_IMAGE_LAYOUT_PRESENT_SRC_KHR IMAGE_LAYOUT_PRESENT_SRC_KHR}</li> * <li>If the <a target="_blank" href="https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-separateDepthStencilLayouts">{@code separateDepthStencilLayouts}</a> feature is not enabled, and {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, {@code layout} <b>must</b> not be {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL}, or {@link VK12#VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL},</li> * <li>If {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, and the format of the referenced attachment is a color format, {@code layout} <b>must</b> not be {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}, {@link VK12#VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL} or {@link VK12#VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL}</li> * <li>If {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, and the format of the referenced attachment is a depth/stencil format which includes both depth and stencil aspects, and {@code layout} is {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL} or {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}, the {@code pNext} chain <b>must</b> include a {@link VkAttachmentReferenceStencilLayout} structure</li> * <li>If {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, and the format of the referenced attachment is a depth/stencil format which includes only the depth aspect, {@code layout} <b>must</b> not be {@link VK12#VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL} or {@link VK12#VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL}</li> * <li>If {@code attachment} is not {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED}, and the format of the referenced attachment is a depth/stencil format which includes only the stencil aspect, {@code layout} <b>must</b> not be {@link VK12#VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL} or {@link VK12#VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL}</li> * </ul> * * <h5>Valid Usage (Implicit)</h5> * * <ul> * <li>{@code sType} <b>must</b> be {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2}</li> * <li>{@code pNext} <b>must</b> be {@code NULL} or a pointer to a valid instance of {@link VkAttachmentReferenceStencilLayout}</li> * <li>The {@code sType} value of each struct in the {@code pNext} chain <b>must</b> be unique</li> * <li>{@code layout} <b>must</b> be a valid {@code VkImageLayout} value</li> * </ul> * * <h5>See Also</h5> * * <p>{@link VkFragmentShadingRateAttachmentInfoKHR}, {@link VkSubpassDescription2}, {@link VkSubpassDescriptionDepthStencilResolve}</p> * * <h3>Layout</h3> * * <pre><code> * struct VkAttachmentReference2 { * VkStructureType {@link #sType}; * void const * {@link #pNext}; * uint32_t {@link #attachment}; * VkImageLayout {@link #layout}; * VkImageAspectFlags {@link #aspectMask}; * }</code></pre> */ public class VkAttachmentReference2 extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int STYPE, PNEXT, ATTACHMENT, LAYOUT, ASPECTMASK; static { Layout layout = __struct( __member(4), __member(POINTER_SIZE), __member(4), __member(4), __member(4) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); STYPE = layout.offsetof(0); PNEXT = layout.offsetof(1); ATTACHMENT = layout.offsetof(2); LAYOUT = layout.offsetof(3); ASPECTMASK = layout.offsetof(4); } /** * Creates a {@code VkAttachmentReference2} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkAttachmentReference2(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** the type of this structure. */ @NativeType("VkStructureType") public int sType() { return nsType(address()); } /** {@code NULL} or a pointer to a structure extending this structure. */ @NativeType("void const *") public long pNext() { return npNext(address()); } /** either an integer value identifying an attachment at the corresponding index in {@link VkRenderPassCreateInfo2}{@code ::pAttachments}, or {@link VK10#VK_ATTACHMENT_UNUSED ATTACHMENT_UNUSED} to signify that this attachment is not used. */ @NativeType("uint32_t") public int attachment() { return nattachment(address()); } /** a {@code VkImageLayout} value specifying the layout the attachment uses during the subpass. */ @NativeType("VkImageLayout") public int layout() { return nlayout(address()); } /** a mask of which aspect(s) <b>can</b> be accessed within the specified subpass as an input attachment. */ @NativeType("VkImageAspectFlags") public int aspectMask() { return naspectMask(address()); } /** Sets the specified value to the {@link #sType} field. */ public VkAttachmentReference2 sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; } /** Sets the {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2} value to the {@link #sType} field. */ public VkAttachmentReference2 sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2); } /** Sets the specified value to the {@link #pNext} field. */ public VkAttachmentReference2 pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; } /** Prepends the specified {@link VkAttachmentReferenceStencilLayout} value to the {@code pNext} chain. */ public VkAttachmentReference2 pNext(VkAttachmentReferenceStencilLayout value) { return this.pNext(value.pNext(this.pNext()).address()); } /** Prepends the specified {@link VkAttachmentReferenceStencilLayoutKHR} value to the {@code pNext} chain. */ public VkAttachmentReference2 pNext(VkAttachmentReferenceStencilLayoutKHR value) { return this.pNext(value.pNext(this.pNext()).address()); } /** Sets the specified value to the {@link #attachment} field. */ public VkAttachmentReference2 attachment(@NativeType("uint32_t") int value) { nattachment(address(), value); return this; } /** Sets the specified value to the {@link #layout} field. */ public VkAttachmentReference2 layout(@NativeType("VkImageLayout") int value) { nlayout(address(), value); return this; } /** Sets the specified value to the {@link #aspectMask} field. */ public VkAttachmentReference2 aspectMask(@NativeType("VkImageAspectFlags") int value) { naspectMask(address(), value); return this; } /** Initializes this struct with the specified values. */ public VkAttachmentReference2 set( int sType, long pNext, int attachment, int layout, int aspectMask ) { sType(sType); pNext(pNext); attachment(attachment); layout(layout); aspectMask(aspectMask); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public VkAttachmentReference2 set(VkAttachmentReference2 src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code VkAttachmentReference2} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkAttachmentReference2 malloc() { return wrap(VkAttachmentReference2.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VkAttachmentReference2} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkAttachmentReference2 calloc() { return wrap(VkAttachmentReference2.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VkAttachmentReference2} instance allocated with {@link BufferUtils}. */ public static VkAttachmentReference2 create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VkAttachmentReference2.class, memAddress(container), container); } /** Returns a new {@code VkAttachmentReference2} instance for the specified memory address. */ public static VkAttachmentReference2 create(long address) { return wrap(VkAttachmentReference2.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkAttachmentReference2 createSafe(long address) { return address == NULL ? null : wrap(VkAttachmentReference2.class, address); } /** * Returns a new {@link VkAttachmentReference2.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VkAttachmentReference2.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkAttachmentReference2.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VkAttachmentReference2.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkAttachmentReference2.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } /** * Returns a new {@code VkAttachmentReference2} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkAttachmentReference2 malloc(MemoryStack stack) { return wrap(VkAttachmentReference2.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VkAttachmentReference2} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkAttachmentReference2 calloc(MemoryStack stack) { return wrap(VkAttachmentReference2.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkAttachmentReference2.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkAttachmentReference2.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkAttachmentReference2.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #sType}. */ public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentReference2.STYPE); } /** Unsafe version of {@link #pNext}. */ public static long npNext(long struct) { return memGetAddress(struct + VkAttachmentReference2.PNEXT); } /** Unsafe version of {@link #attachment}. */ public static int nattachment(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentReference2.ATTACHMENT); } /** Unsafe version of {@link #layout}. */ public static int nlayout(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentReference2.LAYOUT); } /** Unsafe version of {@link #aspectMask}. */ public static int naspectMask(long struct) { return UNSAFE.getInt(null, struct + VkAttachmentReference2.ASPECTMASK); } /** Unsafe version of {@link #sType(int) sType}. */ public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentReference2.STYPE, value); } /** Unsafe version of {@link #pNext(long) pNext}. */ public static void npNext(long struct, long value) { memPutAddress(struct + VkAttachmentReference2.PNEXT, value); } /** Unsafe version of {@link #attachment(int) attachment}. */ public static void nattachment(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentReference2.ATTACHMENT, value); } /** Unsafe version of {@link #layout(int) layout}. */ public static void nlayout(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentReference2.LAYOUT, value); } /** Unsafe version of {@link #aspectMask(int) aspectMask}. */ public static void naspectMask(long struct, int value) { UNSAFE.putInt(null, struct + VkAttachmentReference2.ASPECTMASK, value); } // ----------------------------------- /** An array of {@link VkAttachmentReference2} structs. */ public static class Buffer extends StructBuffer<VkAttachmentReference2, Buffer> implements NativeResource { private static final VkAttachmentReference2 ELEMENT_FACTORY = VkAttachmentReference2.create(-1L); /** * Creates a new {@code VkAttachmentReference2.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkAttachmentReference2#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VkAttachmentReference2 getElementFactory() { return ELEMENT_FACTORY; } /** @return the value of the {@link VkAttachmentReference2#sType} field. */ @NativeType("VkStructureType") public int sType() { return VkAttachmentReference2.nsType(address()); } /** @return the value of the {@link VkAttachmentReference2#pNext} field. */ @NativeType("void const *") public long pNext() { return VkAttachmentReference2.npNext(address()); } /** @return the value of the {@link VkAttachmentReference2#attachment} field. */ @NativeType("uint32_t") public int attachment() { return VkAttachmentReference2.nattachment(address()); } /** @return the value of the {@link VkAttachmentReference2#layout} field. */ @NativeType("VkImageLayout") public int layout() { return VkAttachmentReference2.nlayout(address()); } /** @return the value of the {@link VkAttachmentReference2#aspectMask} field. */ @NativeType("VkImageAspectFlags") public int aspectMask() { return VkAttachmentReference2.naspectMask(address()); } /** Sets the specified value to the {@link VkAttachmentReference2#sType} field. */ public VkAttachmentReference2.Buffer sType(@NativeType("VkStructureType") int value) { VkAttachmentReference2.nsType(address(), value); return this; } /** Sets the {@link VK12#VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2} value to the {@link VkAttachmentReference2#sType} field. */ public VkAttachmentReference2.Buffer sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2); } /** Sets the specified value to the {@link VkAttachmentReference2#pNext} field. */ public VkAttachmentReference2.Buffer pNext(@NativeType("void const *") long value) { VkAttachmentReference2.npNext(address(), value); return this; } /** Prepends the specified {@link VkAttachmentReferenceStencilLayout} value to the {@code pNext} chain. */ public VkAttachmentReference2.Buffer pNext(VkAttachmentReferenceStencilLayout value) { return this.pNext(value.pNext(this.pNext()).address()); } /** Prepends the specified {@link VkAttachmentReferenceStencilLayoutKHR} value to the {@code pNext} chain. */ public VkAttachmentReference2.Buffer pNext(VkAttachmentReferenceStencilLayoutKHR value) { return this.pNext(value.pNext(this.pNext()).address()); } /** Sets the specified value to the {@link VkAttachmentReference2#attachment} field. */ public VkAttachmentReference2.Buffer attachment(@NativeType("uint32_t") int value) { VkAttachmentReference2.nattachment(address(), value); return this; } /** Sets the specified value to the {@link VkAttachmentReference2#layout} field. */ public VkAttachmentReference2.Buffer layout(@NativeType("VkImageLayout") int value) { VkAttachmentReference2.nlayout(address(), value); return this; } /** Sets the specified value to the {@link VkAttachmentReference2#aspectMask} field. */ public VkAttachmentReference2.Buffer aspectMask(@NativeType("VkImageAspectFlags") int value) { VkAttachmentReference2.naspectMask(address(), value); return this; } } }
package org.apache.lucene.index; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Similarity; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; public class TestIndexReaderOnDiskFull extends LuceneTestCase { /** * Make sure if reader tries to commit but hits disk * full that reader remains consistent and usable. */ public void testDiskFull() throws IOException { Term searchTerm = new Term("content", "aaa"); int START_COUNT = 157; int END_COUNT = 144; // First build up a starting index: MockDirectoryWrapper startDir = newDirectory(); IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); if (VERBOSE) { System.out.println("TEST: create initial index"); writer.setInfoStream(System.out); } for(int i=0;i<157;i++) { Document d = new Document(); d.add(newField("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.add(newField("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(d); if (0==i%10) writer.commit(); } writer.close(); { IndexReader r = IndexReader.open(startDir); IndexSearcher searcher = newSearcher(r); ScoreDoc[] hits = null; try { hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; } catch (IOException e) { e.printStackTrace(); fail("exception when init searching: " + e); } searcher.close(); r.close(); } long diskUsage = startDir.getRecomputedActualSizeInBytes(); long diskFree = diskUsage+_TestUtil.nextInt(random, 50, 200); IOException err = null; boolean done = false; boolean gotExc = false; // Iterate w/ ever increasing free disk space: while(!done) { MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir)); // If IndexReader hits disk full, it can write to // the same files again. dir.setPreventDoubleWrite(false); IndexReader reader = IndexReader.open(dir, false); // For each disk size, first try to commit against // dir that will hit random IOExceptions & disk // full; after, give it infinite disk space & turn // off random IOExceptions & retry w/ same reader: boolean success = false; for(int x=0;x<2;x++) { double rate = 0.05; double diskRatio = ((double) diskFree)/diskUsage; long thisDiskFree; String testName; if (0 == x) { thisDiskFree = diskFree; if (diskRatio >= 2.0) { rate /= 2; } if (diskRatio >= 4.0) { rate /= 2; } if (diskRatio >= 6.0) { rate = 0.0; } if (VERBOSE) { System.out.println("\ncycle: " + diskFree + " bytes"); } testName = "disk full during reader.close() @ " + thisDiskFree + " bytes"; } else { thisDiskFree = 0; rate = 0.0; if (VERBOSE) { System.out.println("\ncycle: same writer: unlimited disk space"); } testName = "reader re-use after disk full"; } dir.setMaxSizeInBytes(thisDiskFree); dir.setRandomIOExceptionRate(rate); Similarity sim = new DefaultSimilarity(); try { if (0 == x) { int docId = 12; for(int i=0;i<13;i++) { reader.deleteDocument(docId); reader.setNorm(docId, "content", sim.encodeNormValue(2.0f)); docId += 12; } } reader.close(); success = true; if (0 == x) { done = true; } } catch (IOException e) { if (VERBOSE) { System.out.println(" hit IOException: " + e); e.printStackTrace(System.out); } err = e; gotExc = true; if (1 == x) { e.printStackTrace(); fail(testName + " hit IOException after disk space was freed up"); } } // Finally, verify index is not corrupt, and, if // we succeeded, we see all docs changed, and if // we failed, we see either all docs or no docs // changed (transactional semantics): IndexReader newReader = null; try { newReader = IndexReader.open(dir, false); } catch (IOException e) { e.printStackTrace(); fail(testName + ":exception when creating IndexReader after disk full during close: " + e); } /* int result = newReader.docFreq(searchTerm); if (success) { if (result != END_COUNT) { fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT); } } else { // On hitting exception we still may have added // all docs: if (result != START_COUNT && result != END_COUNT) { err.printStackTrace(); fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT); } } */ IndexSearcher searcher = newSearcher(newReader); ScoreDoc[] hits = null; try { hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; } catch (IOException e) { e.printStackTrace(); fail(testName + ": exception when searching: " + e); } int result2 = hits.length; if (success) { if (result2 != END_COUNT) { fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT); } } else { // On hitting exception we still may have added // all docs: if (result2 != START_COUNT && result2 != END_COUNT) { err.printStackTrace(); fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT); } } searcher.close(); newReader.close(); if (result2 == END_COUNT) { if (!gotExc) fail("never hit disk full"); break; } } dir.close(); // Try again with more bytes of free space: diskFree += TEST_NIGHTLY ? _TestUtil.nextInt(random, 5, 20) : _TestUtil.nextInt(random, 50, 200); } startDir.close(); } }
/** * *Copyright (C) 2012 Atef Haouari - VEEUP * *This program is free software; you can redistribute it and/or *modify it under the terms of the GNU Lesser General Public *License as published by the Free Software Foundation; either *version 3 of the License, or (at your option) any later version. * *This program is distributed in the hope that it will be useful, *but WITHOUT ANY WARRANTY; without even the implied warranty of *MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *Lesser General Public License for more details. * *You should have received a copy of the GNU Lesser General Public *License along with this program; if not, write to the Free Software *Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA* */ package org.arengine.engine; import java.util.ArrayList; import java.util.List; import org.arengine.devices.SensorsHandler; import org.arengine.painter.DefaultPainter; import org.arengine.painter.DefaultSonar; import android.app.Activity; import android.content.pm.ActivityInfo; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.Display; import android.view.Surface; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.WindowManager.LayoutParams; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.ImageView.ScaleType; /** * The Class AREngineActivity. * * @author Atef Haouari * * The main Activity that contains the CameraView and the AREngineView. * * By default, the CameraView and AREngineView fill all the screen. If a * custom layout is specified (trough overriding getCustomLayoutId), the * the CameraView and AREngineView will fill the frame layout with id * org.arengine.R.id.arEngineContent. * * Also by default, only the default sonar is drawn as a custom overlay * and a basic painter is user. * * override this activity the create their own augmented reality * activity. */ public class AREngineActivity extends Activity { /** The Constant DEFAULT_MAX_DISTANCE_FILTER. */ public static final double DEFAULT_MAX_DISTANCE_FILTER = 10000; /** The Constant AR_VIEW_TAG. */ public static final String AR_VIEW_TAG = "AR_VIEW"; /** The capteur handler. */ private SensorsHandler capteurHandler; /** The overlays. */ private List<CustomOverlay> overlays = new ArrayList<CustomOverlay>(); /** The poi list. */ private List<Poi> poiList = new ArrayList<Poi>(); /** The poi touch listener. */ private OnPoiTouchListener poiTouchListener; /** The poi painter. */ private PoiPainter poiPainter; /** The max distance filter. */ private double maxDistanceFilter = DEFAULT_MAX_DISTANCE_FILTER; /** The is filter distance. */ private boolean isFilterDistance = false; /** The ar engine view. */ private AREngineView arEngineView; /** The ar engine content. */ private FrameLayout arEngineContent = null; /** The camera view. */ private CameraView cameraView; /** The captue images. */ private ImageView captueImages = null; /** The sonar. */ private DefaultSonar sonar = null; private PoiPainter defaultPoiPainter = null; /* * (non-Javadoc) * * @see android.app.Activity#onCreate(android.os.Bundle) */ @Override final protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); int defaultScreenOrientation = checkDefaultScreenOrientation(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); int layoutId = getCustomLayoutId(); if (layoutId != -1) { setContentView(layoutId); arEngineContent = (FrameLayout) ((FrameLayout) this .findViewById(android.R.id.content)) .findViewWithTag(AR_VIEW_TAG); if (arEngineContent == null) { throw new IllegalStateException( "Must contain FrameLayout with tag AR_VIEW_TAG"); } } else { arEngineContent = new FrameLayout(this); setContentView(arEngineContent); } LayoutParams layoutParams = new LayoutParams(); layoutParams.width = LayoutParams.MATCH_PARENT; layoutParams.height = LayoutParams.MATCH_PARENT; cameraView = new CameraView(this, null); cameraView.setKeepScreenOn(true); cameraView.setLayoutParams(layoutParams); arEngineContent.addView(cameraView); captueImages = new ImageView(this); captueImages.setScaleType(ScaleType.FIT_XY); captueImages.setLayoutParams(layoutParams); arEngineContent.addView(captueImages); arEngineView = new AREngineView(this); arEngineView.setKeepScreenOn(true); arEngineView.setLayoutParams(layoutParams); capteurHandler = new SensorsHandler(arEngineView, defaultScreenOrientation); Projector projector = new Projector(capteurHandler, cameraView.getCameraManager()); arEngineView.setProjector(projector); arEngineContent.addView(arEngineView); sonar = new DefaultSonar(); defaultPoiPainter = new DefaultPainter(); onInitAREngine(); onAddOverlays(); onAddPoiList(); } /** * On init ar engine. */ public void onInitAREngine() { } /** * On add poi list. */ public void onAddPoiList() { } /** * On add overlays. */ public void onAddOverlays() { } /** * Adds the sonar overlay. */ public void addSonarOverlay() { this.addOverlay(sonar); } /** * Removes the sonar. */ public void removeSonarOverlay() { this.removeOverlay(sonar); } /** * Gets the custom layout id. * * @return the custom layout id */ public int getCustomLayoutId() { return -1; } /* * (non-Javadoc) * * @see android.app.Activity#onStart() */ @Override protected void onStart() { super.onStart(); capteurHandler.startCapteurs(); } /* * (non-Javadoc) * * @see android.app.Activity#onStop() */ @Override protected void onStop() { super.onStart(); capteurHandler.stopCapteurs(); } /** * Adds the overlay. * * @param overlay * the overlay */ public void addOverlay(CustomOverlay overlay) { overlays.add(overlay); } /** * Removes the overlay. * * @param overlay * the overlay */ public void removeOverlay(CustomOverlay overlay) { overlays.remove(overlay); } /** * Adds the poi. * * @param poi * the poi */ public void addPoi(Poi poi) { poiList.add(poi); } /** * Removes the poi. * * @param poi * the poi */ public void removePoi(Poi poi) { poiList.remove(poi); } /** * Gets the overlays. * * @return the overlays */ public CustomOverlay[] getOverlays() { return overlays.toArray(new CustomOverlay[0]); } /** * Gets the poi list. * * @return the poi list */ public Poi[] getPoiList() { return poiList.toArray(new Poi[0]); } /** * Gets the poi touch listener. * * @return the poi touch listener */ public OnPoiTouchListener getPoiTouchListener() { return poiTouchListener; } /** * Sets the poi touch listener. * * @param poiTouchListener * the new poi touch listener */ public void setPoiTouchListener(OnPoiTouchListener poiTouchListener) { this.poiTouchListener = poiTouchListener; } /** * Gets the poi painter. * * @return the poi painter */ public PoiPainter getPoiPainter() { return poiPainter; } /** * Gets the default poi painter. * * @return the default poi painter */ public PoiPainter getDefaultPoiPainter() { return defaultPoiPainter; } /** * Sets the poi painter. * * @param poiPainter * the new poi painter */ public void setPoiPainter(PoiPainter poiPainter) { this.poiPainter = poiPainter; } /** * Check default screen orientation. * * @return the int */ private int checkDefaultScreenOrientation() { int lastOrientation = getRequestedOrientation(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); Display display; display = getWindow().getWindowManager().getDefaultDisplay(); int rotation = display.getOrientation(); int width = 0; int height = 0; switch (rotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: width = display.getWidth(); height = display.getHeight(); break; case Surface.ROTATION_90: case Surface.ROTATION_270: width = display.getHeight(); height = display.getWidth(); break; default: break; } setRequestedOrientation(lastOrientation); if (width > height) { return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE; } else { return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; } } /** * Gets the max distance filter. * * @return the max distance filter */ public double getMaxDistanceFilter() { return maxDistanceFilter; } /** * Sets the max distance filter. * * @param maxDistanceFilter * the new max distance filter */ public void setMaxDistanceFilter(double maxDistanceFilter) { this.maxDistanceFilter = maxDistanceFilter; } /** * Checks if is filter distance enabled. * * @return true, if is filter distance enabled */ public boolean isFilterDistanceEnabled() { return isFilterDistance; } /** * Enable filter distance. */ public void enableFilterDistance() { isFilterDistance = true; } /** * Disable filter distance. */ public void disableFilterDistance() { isFilterDistance = false; } /** * Captrue. * * @param callback * the callback * @return the bitmap */ public void captrue(final CaptureCallback callback) { new Thread(new CaptrueHandler(this, callback)).start(); } /** * The Class CaptrueHandler. */ private static class CaptrueHandler extends Handler implements Runnable { /** The capture bitmap. */ private Bitmap captureBitmap; /** The capture drawble. */ private Drawable captureDrawble; /** The activity. */ private AREngineActivity activity; /** The callback. */ private CaptureCallback callback; /** * Instantiates a new captrue handler. * * @param activity * the activity * @param callback * the callback */ public CaptrueHandler(AREngineActivity activity, CaptureCallback callback) { this.activity = activity; this.callback = callback; } /* * (non-Javadoc) * * @see android.os.Handler#handleMessage(android.os.Message) */ @Override public void handleMessage(Message msg) { activity.captueImages.setImageDrawable(captureDrawble); activity.captueImages.setVisibility(View.VISIBLE); Canvas canvas = new Canvas(captureBitmap); activity.arEngineContent.draw(canvas); activity.captueImages.setVisibility(View.GONE); if (callback != null) { callback.onCapture(captureBitmap); } } /* * (non-Javadoc) * * @see java.lang.Runnable#run() */ @Override public void run() { captureBitmap = Bitmap.createBitmap(activity.cameraView.getWidth(), activity.cameraView.getHeight(), Bitmap.Config.ARGB_8888); captureDrawble = new BitmapDrawable(activity.cameraView.capture()); if (activity.captueImages.getDrawable() != null) { ((BitmapDrawable) activity.captueImages.getDrawable()) .getBitmap().recycle(); } sendEmptyMessage(1); } } }
package eu.newsreader.eventcoreference.storyline; import eu.newsreader.eventcoreference.input.*; import eu.newsreader.eventcoreference.util.EuroVoc; import eu.newsreader.eventcoreference.util.Util; import org.json.JSONException; import org.json.JSONObject; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Set; /** * Created by piek on 1/3/14. */ @Deprecated public class TrigToJsonStoryPerspectives { static TrigTripleData trigTripleData = new TrigTripleData(); static HashMap<String, ArrayList<String>> iliMap = new HashMap<String, ArrayList<String>>(); static ArrayList<String> blacklist = new ArrayList<String>(); static boolean ONESTORY = false; static boolean ALL = false; /// if true we do not filter events static boolean SKIPPEVENTS = false; /// if true we we exclude perspective events from the stories static boolean MERGE = false; static String timeGran = "D"; static String actionOnt = ""; static int actionSim = 1; static int interSect = 1; static boolean PERSPECTIVE = true; // @Deprecated, can be taken out eventually static boolean COMBINE = true; // @Deprecated, can be taken out eventually static EsoReader esoReader = new EsoReader(); static FrameNetReader frameNetReader = new FrameNetReader(); static ArrayList<String> topFrames = new ArrayList<String>(); static int fnLevel = 0; static int esoLevel = 0; static int climaxThreshold = 0; static String entityFilter = ""; static Integer actorThreshold = -1; static int topicThreshold = 0; static int nEvents = 0; static int nActors = 0; static int nMentions = 0; static int nStories = 0; static String year = ""; static String EVENTSCHEMA = ""; static EuroVoc euroVoc = new EuroVoc(); static EuroVoc euroVocBlackList = new EuroVoc(); static String log = ""; static public void main (String[] args) { trigTripleData = new TrigTripleData(); String project = "NewsReader storyline"; String pathToILIfile = ""; String sparqlQuery = ""; String eventQuery = ""; String entityQuery = ""; String trigfolder = ""; String trigfile = ""; String pathToRawTextIndexFile = ""; String pathToFtDataFile = ""; String blackListFile = ""; String fnFile = ""; String esoFile = ""; String euroVocFile = ""; String euroVocBlackListFile = ""; String pathToTokenIndex = ""; log = ""; fnLevel = 0; esoLevel = 0; for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--trig-folder") && args.length>(i+1)) { trigfolder = args[i+1]; } else if (arg.equals("--sparql") && args.length>(i+1)) { sparqlQuery = args[i+1]; } else if (arg.equals("--event") && args.length>(i+1)) { eventQuery = args[i+1]; String[] fields = entityQuery.split(":"); if (fields.length==2) { EVENTSCHEMA = fields[0]; entityQuery = fields[1]; } } else if (arg.equals("--tokens") && args.length>(i+1)) { pathToTokenIndex = args[i+1]; } else if (arg.equals("--entity") && args.length>(i+1)) { entityQuery = args[i+1]; } else if (arg.equals("--year") && args.length>(i+1)) { year = args[i+1]; } else if (arg.equals("--onestory")) { ONESTORY = true; } else if (arg.equals("--ft") && args.length>(i+1)) { pathToFtDataFile = args[i+1]; } else if (arg.equals("--time") && args.length>(i+1)) { timeGran = args[i+1]; } else if (arg.equals("--actor-intersect") && args.length>(i+1)) { try { interSect = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } else if (arg.equals("--action-sim") && args.length>(i+1)) { try { actionSim = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } else if (arg.equals("--action-ont") && args.length>(i+1)) { actionOnt = args[i+1]; } else if (arg.equals("--action-schema") && args.length>(i+1)) { EVENTSCHEMA = args[i+1]; } else if (arg.equals("--merge")) { MERGE = true; }/* else if (arg.equals("--perspective")) { PERSPECTIVE = true; }*/ else if (arg.equals("--eurovoc") && args.length>(i+1)) { euroVocFile = args[i+1]; euroVoc.readEuroVoc(euroVocFile,"en"); } else if (arg.equals("--eurovoc-blacklist") && args.length>(i+1)) { euroVocBlackListFile = args[i+1]; euroVocBlackList.readEuroVoc(euroVocBlackListFile, "en"); System.out.println("euroVocBlackList = " + euroVocBlackList.uriLabelMap.size()); } else if (arg.equals("--project") && args.length>(i+1)) { project = args[i+1]; } else if (arg.equals("--trig-file") && args.length>(i+1)) { trigfile = args[i+1]; } else if (arg.equals("--ili") && args.length>(i+1)) { pathToILIfile = args[i+1]; } else if (arg.equals("--raw-text") && args.length>(i+1)) { pathToRawTextIndexFile = args[i+1]; } else if (arg.equals("--black-list") && args.length>(i+1)) { blackListFile = args[i+1]; } else if (arg.equals("--actor-cnt") && args.length>(i+1)) { actorThreshold = Integer.parseInt(args[i+1]); } else if (arg.equals("--all")){ ALL = true; } else if (arg.equals("--frame-relations") && args.length>(i+1)) { fnFile = args[i+1]; } else if (arg.equals("--frame-level") && args.length>(i+1)) { try { fnLevel = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } else if (arg.equals("--eso-relations") && args.length>(i+1)) { esoFile = args[i+1]; } else if (arg.equals("--eso-level") && args.length>(i+1)) { try { esoLevel = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } else if (arg.equals("--climax-level") && args.length>(i+1)) { try { climaxThreshold = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } else if (arg.equals("--topic-level") && args.length>(i+1)) { try { topicThreshold = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); } } } System.out.println("climaxThreshold = " + climaxThreshold); System.out.println("topicThreshold = " + topicThreshold); System.out.println("actionOnt = " + actionOnt); System.out.println("actionSim = " + actionSim); System.out.println("actorThreshold = " + actorThreshold); System.out.println("actor interSect = " + interSect); System.out.println("pathToRawTextIndexFile = " + pathToRawTextIndexFile); System.out.println("MERGE = " + MERGE); System.out.println("PERSPECTIVE = " + PERSPECTIVE); if (!blackListFile.isEmpty()) { blacklist = Util.ReadFileToStringArrayList(blackListFile); } if (!fnFile.isEmpty()) { frameNetReader.parseFile(fnFile); topFrames = frameNetReader.getTopsFrameNetTree(); frameNetReader.flatRelations(fnLevel); } if (!esoFile.isEmpty()) { esoReader.parseFile(esoFile); } iliMap = Util.ReadFileToStringHashMap(pathToILIfile); ArrayList<File> trigFiles = new ArrayList<File>(); if (!trigfolder.isEmpty()) { System.out.println("trigfolder = " + trigfolder); trigFiles = Util.makeRecursiveFileList(new File(trigfolder), ".trig"); } else if (!trigfile.isEmpty()) { System.out.println("trigfile = " + trigfile); trigFiles.add(new File(trigfile)); } if (trigFiles.size()>0) { System.out.println("trigFiles.size() = " + trigFiles.size()); trigTripleData = TrigTripleReader.readTripleFromTrigFiles(trigFiles); } try { ArrayList<JSONObject> jsonObjects = JsonStoryUtil.getJSONObjectArray(trigTripleData, ALL,SKIPPEVENTS, EVENTSCHEMA, blacklist, iliMap, fnLevel, frameNetReader, topFrames, esoLevel, esoReader); System.out.println("Events in SEM-RDF files = " + jsonObjects.size()); if (blacklist.size()>0) { jsonObjects = JsonStoryUtil.filterEventsForBlackList(jsonObjects, blacklist); System.out.println("Events after blacklist filter= " + jsonObjects.size()); } if (actorThreshold>0) { jsonObjects = JsonStoryUtil.filterEventsForActors(jsonObjects, entityFilter, actorThreshold); System.out.println("Events after actor count filter = " + jsonObjects.size()); } /* jsonObjects = JsonStoryUtil.removePerspectiveEvents(trigTripleData, jsonObjects); System.out.println("Events after removing perspective events = " + jsonObjects.size()); */ if (ONESTORY) { System.out.println("creating one story..."); jsonObjects = JsonStoryUtil.createOneStoryForJSONArrayList(jsonObjects, climaxThreshold, MERGE, timeGran, actionOnt, actionSim); } else { jsonObjects = JsonStoryUtil.createStoryLinesForJSONArrayList(jsonObjects, topicThreshold, climaxThreshold, entityFilter, MERGE, timeGran, actionOnt, actionSim, interSect); } System.out.println("Events after storyline filter = " + jsonObjects.size()); //JsonStoryUtil.augmentEventLabelsWithArguments(jsonObjects); JsonStoryUtil.minimalizeActors(jsonObjects); // System.out.println("eurovoc = " + euroVoc.uriLabelMap.size()); if (euroVoc.uriLabelMap.size()>0) { JsonStoryUtil.renameStories(jsonObjects, euroVoc, euroVocBlackList); } ArrayList<JSONObject> rawTextArrayList = new ArrayList<JSONObject>(); ArrayList<JSONObject> perspectiveEvents = new ArrayList<JSONObject>(); ArrayList<JSONObject> structuredEvents = new ArrayList<JSONObject>(); if (PERSPECTIVE && jsonObjects.size()>0) { if (!entityQuery.isEmpty() || !eventQuery.isEmpty() ||!sparqlQuery.isEmpty()) { System.out.println("Getting perspectives for: " + jsonObjects.size() + " events"); TrigKSTripleReader.integrateAttributionFromKs(jsonObjects); } else { JsonStoryUtil.integratePerspectivesInEventObjects(trigTripleData, jsonObjects, project); } } if (!pathToTokenIndex.isEmpty()) { log += MentionResolver.createSnippetIndexFromMentions(jsonObjects, pathToTokenIndex); } else if (!pathToRawTextIndexFile.isEmpty()) { // rawTextArrayList = Util.ReadFileToUriTextArrayList(pathToRawTextIndexFile); MentionResolver.ReadFileToUriTextArrayList(pathToRawTextIndexFile, jsonObjects); } nEvents = jsonObjects.size(); nActors = JsonStoryUtil.countActors(jsonObjects); nMentions = JsonStoryUtil.countMentions(jsonObjects); nStories = JsonStoryUtil.countGroups(jsonObjects); JsonSerialization.writeJsonObjectArrayWithStructuredData(trigfolder, "", project, jsonObjects, rawTextArrayList, nEvents, nStories, nActors, nMentions, "polls", structuredEvents); /// @Deprecated /// Creates separate JSON files for each story. @Deprecated //splitStories(jsonObjects,rawTextArrayList,structuredEvents,project,trigfolder); /// @Deprecated /* if (!COMBINE) { if (PERSPECTIVE && perspectiveEvents.size()>0) { JsonSerialization.writeJsonPerspectiveArray(trigfolder, project, perspectiveEvents); } if (!pathToFtDataFile.isEmpty() && structuredEvents.size()>0) { JsonSerialization.writeJsonStructuredArray(trigfolder, project, structuredEvents); } }*/ } catch (JSONException e) { e.printStackTrace(); } System.out.println("story_cnt = " + nStories); System.out.println("event_cnt = " + nEvents); System.out.println("mention_cnt = "+ nMentions); System.out.println("actor_cnt = " + nActors); } static void splitStories (ArrayList<JSONObject> events, ArrayList<JSONObject> rawTextArrayList, ArrayList<JSONObject> structuredEvents, String project, String trigFolder ) { HashMap<String, ArrayList<JSONObject>> storyMap = new HashMap<String, ArrayList<JSONObject>>(); for (int i = 0; i < events.size(); i++) { JSONObject event = events.get(i); try { String group = event.getString("group"); if (storyMap.containsKey(group)) { ArrayList<JSONObject> groupEvents = storyMap.get(group); groupEvents.add(event); storyMap.put(group,groupEvents); } else { ArrayList<JSONObject> groupEvents = new ArrayList<JSONObject>(); groupEvents.add(event); storyMap.put(group,groupEvents); } } catch (JSONException e) { e.printStackTrace(); } } Set keySet = storyMap.keySet(); Iterator<String> keys = keySet.iterator(); while ((keys.hasNext())) { String group = keys.next(); ArrayList<JSONObject> groupEvents = storyMap.get(group); int nActors = JsonStoryUtil.countActors(groupEvents); int nMentions = JsonStoryUtil.countMentions(groupEvents); // System.out.println("group = " + group); JsonSerialization.writeJsonObjectArrayWithStructuredData(trigFolder, group, project, groupEvents, rawTextArrayList, groupEvents.size(), 1, nActors, nMentions, "polls", structuredEvents); } } }
/* * Copyright 2005-2007 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.core.util; import org.apache.commons.dbcp.BasicDataSource; import org.wso2.carbon.user.api.RealmConfiguration; import org.wso2.carbon.user.core.UserStoreException; import org.wso2.carbon.user.core.UserStoreManager; import org.wso2.carbon.user.core.config.RealmConfigXMLProcessor; import org.wso2.carbon.user.core.jdbc.JDBCUserStoreManager; import org.wso2.carbon.utils.InputReader; import org.wso2.carbon.utils.ServerConstants; import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; /** * A tool for updating the password of a user */ public class PasswordUpdater { private static final String USAGE_MSG_INDENT_SPACES = " "; public static final String DB_URL = "--db-url"; public static final String DB_DRIVER = "--db-driver"; public static final String DB_USERNAME = "--db-username"; public static final String DB_PASSWORD = "--db-password"; public static final String USERNAME = "--username"; public static final String NEW_PASSWORD = "--new-password"; public static void main(String[] args) { new PasswordUpdater().run(args); } private void run(String[] args) { String wso2wsasHome = System.getProperty(ServerConstants.CARBON_HOME); if (wso2wsasHome == null) { wso2wsasHome = new File(".").getAbsolutePath(); System.setProperty(ServerConstants.CARBON_HOME, wso2wsasHome); } if (args.length == 0) { printUsage(); System.exit(0); } String dbURL = getParam(DB_URL, args); if (dbURL == null || dbURL.indexOf("jdbc:") != 0) { System.err.println(" Invalid database DB_URL : " + dbURL); printUsage(); System.exit(0); } // ------- DB Connection params String dbDriver = getParam(DB_DRIVER, args); if (dbDriver == null) { dbDriver = "org.h2.Driver"; } String dbUsername = getParam(DB_USERNAME, args); if (dbUsername == null) { dbUsername = "wso2carbon"; } String dbPassword = getParam(DB_PASSWORD, args); if (dbPassword == null) { dbPassword = "wso2carbon"; } // ------------ Load the DB Driver try { Class.forName(dbDriver); } catch (ClassNotFoundException e) { System.err.println(" Database driver [" + dbDriver + "] not found in classpath."); System.exit(1); } // Connect to the database Connection conn = null; try { conn = DriverManager.getConnection(dbURL, dbUsername, dbPassword); } catch (Exception e) { System.err.println("Cannot connect to database. \n" + "Please make sure that the JDBC URL is correct and that you have \n" + "stopped WSO2 Carbon before running this script. Root cause is : \n" + e); System.exit(1); } finally { if (conn != null) { try { conn.close(); } catch (SQLException e) { e.printStackTrace(); } } } // --------- Capture the service username and password String username = getParam(USERNAME, args); while (username == null || username.trim().length() == 0) { System.out.print("Username: "); try { username = InputReader.readInput(); } catch (IOException e) { System.err.println(" Could not read username : " + e); System.exit(1); } } String password = getParam(NEW_PASSWORD, args); if (password == null || password.trim().length() == 0) { String passwordRepeat = null; while (password == null || password.trim().length() == 0) { try { password = InputReader.readPassword("New password: "); } catch (IOException e) { System.err.println("Unable to read password : " + e); System.exit(1); } } while (passwordRepeat == null || passwordRepeat.trim().length() == 0) { try { passwordRepeat = InputReader.readPassword("Re-enter new password: "); } catch (IOException e) { System.err.println("Unable to read re-entered password : " + e); System.exit(1); } } if (!password.equals(passwordRepeat)) { System.err.println(" Password and re-entered password do not match"); System.exit(1); } } // DataSource is created to connect to user store DB using input parameters given by user BasicDataSource ds = new BasicDataSource(); ds.setUrl(dbURL); ds.setDriverClassName(dbDriver); ds.setUsername(dbUsername); ds.setPassword(dbPassword); try { RealmConfiguration realmConfig = new RealmConfigXMLProcessor() .buildRealmConfigurationFromFile(); JDBCUserStoreManager userStore = new JDBCUserStoreManager(ds, realmConfig); userStore.doUpdateCredentialByAdmin(username, password); System.out.println("Password updated successfully."); } catch (UserStoreException ex) { System.err.println("Error updating credentials for user " + username + " : " + ex); } } /** * This will check the given parameter in the array and will return, if available * * @param param * @param args * @return the parameter */ private String getParam(String param, String[] args) { if (param == null || "".equals(param)) { return null; } for (int i = 0; i < args.length; i = i + 2) { String arg = args[i]; if (param.equalsIgnoreCase(arg) && (args.length >= (i + 1))) { return args[i + 1]; } } return null; } private void printUsage() { System.out.println("Usage: chpasswd --db-url DB_URL [OPTIONS]\n"); System.out.println(USAGE_MSG_INDENT_SPACES + DB_URL + " : The JDBC database URL. " + "e.g. jdbc:h2:/home/carbon/database/WSO2CARBON_DB\n"); System.out.println("Options"); System.out.println(USAGE_MSG_INDENT_SPACES + DB_DRIVER + " : The database driver class. " + "e.g. org.h2.Driver"); System.out.println(USAGE_MSG_INDENT_SPACES + DB_USERNAME + " : The database username"); System.out.println(USAGE_MSG_INDENT_SPACES + DB_PASSWORD + " : The database password"); System.out.println(USAGE_MSG_INDENT_SPACES + USERNAME + " : The username of the user whose " + "password is to be changed. If this is not given, " + "you will be prompted for this field later."); System.out.println(USAGE_MSG_INDENT_SPACES + NEW_PASSWORD + " : The new password of the user " + "whose password is to be changed. If this is not given, " + "you will be prompted for this field later."); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.projectRoots.ui; import com.google.common.collect.Lists; import com.intellij.openapi.Disposable; import com.intellij.openapi.SdkEditorAdditionalOptionsProvider; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.projectRoots.*; import com.intellij.openapi.projectRoots.impl.ProjectJdkImpl; import com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.ui.OrderRootTypeUIFactory; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.ui.navigation.History; import com.intellij.ui.navigation.Place; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.io.File; import java.util.*; import java.util.List; /** * @author MYakovlev * @since Aug 15, 2002 */ public class SdkEditor implements Configurable, Place.Navigator { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.projectRoots.ui.SdkEditor"); private static final String SDK_TAB = "sdkTab"; private Sdk mySdk; private final Map<OrderRootType, SdkPathEditor> myPathEditors = new HashMap<>(); private TextFieldWithBrowseButton myHomeComponent; private final Map<SdkType, List<AdditionalDataConfigurable>> myAdditionalDataConfigurables = new HashMap<>(); private final Map<AdditionalDataConfigurable, JComponent> myAdditionalDataComponents = new HashMap<>(); private JPanel myAdditionalDataPanel; private final SdkModificator myEditedSdkModificator = new EditedSdkModificator(); // GUI components private JPanel myMainPanel; private TabbedPaneWrapper myTabbedPane; private Project myProject; private final SdkModel mySdkModel; private JLabel myHomeFieldLabel; private String myVersionString; private String myInitialName; private String myInitialPath; private final History myHistory; private final Disposable myDisposable = Disposer.newDisposable(); public SdkEditor(Project project, SdkModel sdkModel, History history, final ProjectJdkImpl sdk) { myProject = project; mySdkModel = sdkModel; myHistory = history; mySdk = sdk; createMainPanel(); initSdk(sdk); } private void initSdk(Sdk sdk) { mySdk = sdk; if (mySdk != null) { myInitialName = mySdk.getName(); myInitialPath = mySdk.getHomePath(); } else { myInitialName = ""; myInitialPath = ""; } for (final AdditionalDataConfigurable additionalDataConfigurable : getAdditionalDataConfigurable()) { additionalDataConfigurable.setSdk(sdk); } if (myMainPanel != null) { reset(); } } @Override public String getDisplayName() { return ProjectBundle.message("sdk.configure.editor.title"); } @Override public String getHelpTopic() { return null; } @Override public JComponent createComponent() { return myMainPanel; } private void createMainPanel() { myMainPanel = new JPanel(new GridBagLayout()); myTabbedPane = new TabbedPaneWrapper(myDisposable); for (OrderRootType type : OrderRootType.getAllTypes()) { if (mySdk == null || showTabForType(type)) { final SdkPathEditor pathEditor = OrderRootTypeUIFactory.FACTORY.getByKey(type).createPathEditor(mySdk); if (pathEditor != null) { pathEditor.setAddBaseDir(mySdk.getHomeDirectory()); myTabbedPane.addTab(pathEditor.getDisplayName(), pathEditor.createComponent()); myPathEditors.put(type, pathEditor); } } } myTabbedPane.addChangeListener(e -> myHistory.pushQueryPlace()); myHomeComponent = createHomeComponent(); myHomeComponent.getTextField().setEditable(false); myHomeFieldLabel = new JLabel(getHomeFieldLabelValue()); myMainPanel.add(myHomeFieldLabel, new GridBagConstraints( 0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, JBUI.insets(2, 10, 2, 2), 0, 0)); myMainPanel.add(myHomeComponent, new GridBagConstraints( 1, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, JBUI.insets(2, 2, 2, 10), 0, 0)); myAdditionalDataPanel = new JPanel(new BorderLayout()); myMainPanel.add(myAdditionalDataPanel, new GridBagConstraints( 0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.insets(2, 10, 0, 10), 0, 0)); myMainPanel.add(myTabbedPane.getComponent(), new GridBagConstraints( 0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.insetsTop(2), 0, 0)); } protected TextFieldWithBrowseButton createHomeComponent() { return new TextFieldWithBrowseButton(e -> doSelectHomePath()); } protected boolean showTabForType(@NotNull OrderRootType type) { return ((SdkType)mySdk.getSdkType()).isRootTypeApplicable(type); } private String getHomeFieldLabelValue() { return mySdk != null ? ((SdkType)mySdk.getSdkType()).getHomeFieldLabel() : ProjectBundle.message("sdk.configure.general.home.path"); } @Override public boolean isModified() { boolean isModified = !Comparing.equal(mySdk == null ? null : mySdk.getName(), myInitialName); isModified = isModified || !Comparing.equal(FileUtil.toSystemIndependentName(getHomeValue()), FileUtil.toSystemIndependentName(myInitialPath)); for (PathEditor pathEditor : myPathEditors.values()) { isModified = isModified || pathEditor.isModified(); } for (final AdditionalDataConfigurable configurable : getAdditionalDataConfigurable()) { isModified = isModified || configurable.isModified(); } return isModified; } @Override public void apply() throws ConfigurationException { if (!Comparing.equal(myInitialName, mySdk == null ? "" : mySdk.getName())) { if (mySdk == null || mySdk.getName().isEmpty()) { throw new ConfigurationException(ProjectBundle.message("sdk.list.name.required.error")); } } if (mySdk != null) { myInitialName = mySdk.getName(); myInitialPath = mySdk.getHomePath(); final SdkModificator sdkModificator = mySdk.getSdkModificator(); sdkModificator.setHomePath(FileUtil.toSystemDependentName(getHomeValue())); for (SdkPathEditor pathEditor : myPathEditors.values()) { pathEditor.apply(sdkModificator); } ApplicationManager.getApplication().runWriteAction(() -> sdkModificator.commitChanges()); for (final AdditionalDataConfigurable configurable : getAdditionalDataConfigurable()) { if (configurable != null) { configurable.apply(); } } } } @Override public void reset() { if (mySdk == null) { setHomePathValue(""); for (SdkPathEditor pathEditor : myPathEditors.values()) { pathEditor.reset(null); } } else { final SdkModificator sdkModificator = mySdk.getSdkModificator(); for (OrderRootType type : myPathEditors.keySet()) { myPathEditors.get(type).reset(sdkModificator); } sdkModificator.commitChanges(); setHomePathValue(FileUtil.toSystemDependentName(ObjectUtils.notNull(mySdk.getHomePath(), ""))); } myVersionString = null; myHomeFieldLabel.setText(getHomeFieldLabelValue()); updateAdditionalDataComponent(); for (final AdditionalDataConfigurable configurable : getAdditionalDataConfigurable()) { configurable.reset(); } myHomeComponent.setEnabled(mySdk != null); for (int i = 0; i < myTabbedPane.getTabCount(); i++) { myTabbedPane.setEnabledAt(i, mySdk != null); } } @Override public void disposeUIResources() { for (final SdkType sdkType : myAdditionalDataConfigurables.keySet()) { for (final AdditionalDataConfigurable configurable : myAdditionalDataConfigurables.get(sdkType)) { configurable.disposeUIResources(); } } myAdditionalDataConfigurables.clear(); myAdditionalDataComponents.clear(); Disposer.dispose(myDisposable); } private String getHomeValue() { return myHomeComponent.getText().trim(); } private void clearAllPaths() { for (PathEditor editor : myPathEditors.values()) { editor.clearList(); } } private void setHomePathValue(String absolutePath) { myHomeComponent.setText(absolutePath); final Color fg; if (absolutePath != null && !absolutePath.isEmpty() && mySdk != null && mySdk.getSdkType().isLocalSdk(mySdk)) { final File homeDir = new File(absolutePath); boolean homeMustBeDirectory = ((SdkType)mySdk.getSdkType()).getHomeChooserDescriptor().isChooseFolders(); fg = homeDir.exists() && homeDir.isDirectory() == homeMustBeDirectory ? UIUtil.getFieldForegroundColor() : PathEditor.INVALID_COLOR; } else { fg = UIUtil.getFieldForegroundColor(); } myHomeComponent.getTextField().setForeground(fg); } private void doSelectHomePath() { final SdkType sdkType = (SdkType)mySdk.getSdkType(); SdkConfigurationUtil.selectSdkHome(sdkType, path -> doSetHomePath(path, sdkType)); } private void doSetHomePath(final String homePath, final SdkType sdkType) { if (homePath == null) { return; } setHomePathValue(homePath.replace('/', File.separatorChar)); final String newSdkName = suggestSdkName(homePath); ((ProjectJdkImpl)mySdk).setName(newSdkName); try { final Sdk dummySdk = (Sdk)mySdk.clone(); SdkModificator sdkModificator = dummySdk.getSdkModificator(); sdkModificator.setHomePath(homePath); sdkModificator.removeAllRoots(); sdkModificator.commitChanges(); sdkType.setupSdkPaths(dummySdk, mySdkModel); clearAllPaths(); myVersionString = dummySdk.getVersionString(); if (myVersionString == null) { Messages.showMessageDialog(ProjectBundle.message("sdk.java.corrupt.error", homePath), ProjectBundle.message("sdk.java.corrupt.title"), Messages.getErrorIcon()); } sdkModificator = dummySdk.getSdkModificator(); for (OrderRootType type : myPathEditors.keySet()) { SdkPathEditor pathEditor = myPathEditors.get(type); pathEditor.setAddBaseDir(dummySdk.getHomeDirectory()); pathEditor.addPaths(sdkModificator.getRoots(type)); } mySdkModel.getMulticaster().sdkHomeSelected(dummySdk, homePath); } catch (CloneNotSupportedException e) { LOG.error(e); // should not happen in normal program } } private String suggestSdkName(final String homePath) { final String currentName = mySdk.getName(); final String suggestedName = ((SdkType)mySdk.getSdkType()).suggestSdkName(currentName, homePath); if (Comparing.equal(currentName, suggestedName)) return currentName; String newSdkName = suggestedName; final Set<String> allNames = new HashSet<>(); Sdk[] sdks = mySdkModel.getSdks(); for (Sdk sdk : sdks) { allNames.add(sdk.getName()); } int i = 0; while (allNames.contains(newSdkName)) { newSdkName = suggestedName + " (" + ++i + ")"; } return newSdkName; } private void updateAdditionalDataComponent() { myAdditionalDataPanel.removeAll(); for (AdditionalDataConfigurable configurable : getAdditionalDataConfigurable()) { JComponent component = myAdditionalDataComponents.get(configurable); if (component == null) { component = configurable.createComponent(); myAdditionalDataComponents.put(configurable, component); } if (component != null) { if (configurable.getTabName() != null) { for (int i = 0; i < myTabbedPane.getTabCount(); i++) { if (configurable.getTabName().equals(myTabbedPane.getTitleAt(i))) { myTabbedPane.removeTabAt(i); } } myTabbedPane.addTab(configurable.getTabName(), component); } else { myAdditionalDataPanel.add(component, BorderLayout.CENTER); } } } } @NotNull private List<AdditionalDataConfigurable> getAdditionalDataConfigurable() { if (mySdk == null) { return ContainerUtil.emptyList(); } return initAdditionalDataConfigurable(mySdk); } @NotNull private List<AdditionalDataConfigurable> initAdditionalDataConfigurable(Sdk sdk) { final SdkType sdkType = (SdkType)sdk.getSdkType(); List<AdditionalDataConfigurable> configurables = myAdditionalDataConfigurables.get(sdkType); if (configurables == null) { configurables = Lists.newArrayList(); myAdditionalDataConfigurables.put(sdkType, configurables); AdditionalDataConfigurable sdkConfigurable = sdkType.createAdditionalDataConfigurable(mySdkModel, myEditedSdkModificator); if (sdkConfigurable != null) { configurables.add(sdkConfigurable); } for (SdkEditorAdditionalOptionsProvider factory : SdkEditorAdditionalOptionsProvider.getSdkOptionsFactory(mySdk.getSdkType())) { AdditionalDataConfigurable options = factory.createOptions(myProject, mySdk); if (options != null) { configurables.add(options); } } } return configurables; } private class EditedSdkModificator implements SdkModificator { @Override public String getName() { return mySdk.getName(); } @Override public void setName(String name) { ((ProjectJdkImpl)mySdk).setName(name); } @Override public String getHomePath() { return getHomeValue(); } @Override public void setHomePath(String path) { doSetHomePath(path, (SdkType)mySdk.getSdkType()); } @Override public String getVersionString() { return myVersionString != null ? myVersionString : mySdk.getVersionString(); } @Override public void setVersionString(String versionString) { throw new UnsupportedOperationException(); // not supported for this editor } @Override public SdkAdditionalData getSdkAdditionalData() { return mySdk.getSdkAdditionalData(); } @Override public void setSdkAdditionalData(SdkAdditionalData data) { throw new UnsupportedOperationException(); // not supported for this editor } @Override public VirtualFile[] getRoots(OrderRootType rootType) { final PathEditor editor = myPathEditors.get(rootType); if (editor == null) throw new IllegalStateException("no editor for root type " + rootType); return editor.getRoots(); } @Override public void addRoot(VirtualFile root, OrderRootType rootType) { myPathEditors.get(rootType).addPaths(root); } @Override public void removeRoot(VirtualFile root, OrderRootType rootType) { myPathEditors.get(rootType).removePaths(root); } @Override public void removeRoots(OrderRootType rootType) { myPathEditors.get(rootType).clearList(); } @Override public void removeAllRoots() { for (PathEditor editor : myPathEditors.values()) { editor.clearList(); } } @Override public void commitChanges() { } @Override public boolean isWritable() { return true; } } @Override public ActionCallback navigateTo(@Nullable final Place place, final boolean requestFocus) { if (place == null) return ActionCallback.DONE; myTabbedPane.setSelectedTitle((String)place.getPath(SDK_TAB)); return ActionCallback.DONE; } @Override public void queryPlace(@NotNull final Place place) { place.putPath(SDK_TAB, myTabbedPane.getSelectedTitle()); } @Override public void setHistory(final History history) { } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.breaker; import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; /** * CircuitBreakerService that attempts to redistribute space between breakers * if tripped */ public class HierarchyCircuitBreakerService extends CircuitBreakerService { private static final String CHILD_LOGGER_PREFIX = "org.elasticsearch.indices.breaker."; private final ConcurrentMap<String, CircuitBreaker> breakers = new ConcurrentHashMap<>(); public static final Setting<ByteSizeValue> TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.memorySizeSetting("indices.breaker.total.limit", "70%", Property.Dynamic, Property.NodeScope); public static final Setting<ByteSizeValue> FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.memorySizeSetting("indices.breaker.fielddata.limit", "60%", Property.Dynamic, Property.NodeScope); public static final Setting<Double> FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.fielddata.overhead", 1.03d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting<CircuitBreaker.Type> FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); public static final Setting<ByteSizeValue> REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.memorySizeSetting("indices.breaker.request.limit", "60%", Property.Dynamic, Property.NodeScope); public static final Setting<Double> REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting<CircuitBreaker.Type> REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("indices.breaker.request.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); public static final Setting<ByteSizeValue> IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.memorySizeSetting("network.breaker.inflight_requests.limit", "100%", Property.Dynamic, Property.NodeScope); public static final Setting<Double> IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("network.breaker.inflight_requests.overhead", 1.0d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting<CircuitBreaker.Type> IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_TYPE_SETTING = new Setting<>("network.breaker.inflight_requests.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); private volatile BreakerSettings parentSettings; private volatile BreakerSettings fielddataSettings; private volatile BreakerSettings inFlightRequestsSettings; private volatile BreakerSettings requestSettings; // Tripped count for when redistribution was attempted but wasn't successful private final AtomicLong parentTripCount = new AtomicLong(0); public HierarchyCircuitBreakerService(Settings settings, ClusterSettings clusterSettings) { super(settings); this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); this.inFlightRequestsSettings = new BreakerSettings(CircuitBreaker.IN_FLIGHT_REQUESTS, IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); this.requestSettings = new BreakerSettings(CircuitBreaker.REQUEST, REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.get(settings), REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), 1.0, CircuitBreaker.Type.PARENT); if (logger.isTraceEnabled()) { logger.trace("parent circuit breaker with settings {}", this.parentSettings); } registerBreaker(this.requestSettings); registerBreaker(this.fielddataSettings); registerBreaker(this.inFlightRequestsSettings); clusterSettings.addSettingsUpdateConsumer(TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, this::setTotalCircuitBreakerLimit, this::validateTotalCircuitBreakerLimit); clusterSettings.addSettingsUpdateConsumer(FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setFieldDataBreakerLimit); clusterSettings.addSettingsUpdateConsumer(IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING, IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setInFlightRequestsBreakerLimit); clusterSettings.addSettingsUpdateConsumer(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING, REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, this::setRequestBreakerLimit); } private void setRequestBreakerLimit(ByteSizeValue newRequestMax, Double newRequestOverhead) { BreakerSettings newRequestSettings = new BreakerSettings(CircuitBreaker.REQUEST, newRequestMax.getBytes(), newRequestOverhead, HierarchyCircuitBreakerService.this.requestSettings.getType()); registerBreaker(newRequestSettings); HierarchyCircuitBreakerService.this.requestSettings = newRequestSettings; logger.info("Updated breaker settings request: {}", newRequestSettings); } private void setInFlightRequestsBreakerLimit(ByteSizeValue newInFlightRequestsMax, Double newInFlightRequestsOverhead) { BreakerSettings newInFlightRequestsSettings = new BreakerSettings(CircuitBreaker.IN_FLIGHT_REQUESTS, newInFlightRequestsMax.getBytes(), newInFlightRequestsOverhead, HierarchyCircuitBreakerService.this.inFlightRequestsSettings.getType()); registerBreaker(newInFlightRequestsSettings); HierarchyCircuitBreakerService.this.inFlightRequestsSettings = newInFlightRequestsSettings; logger.info("Updated breaker settings for in-flight requests: {}", newInFlightRequestsSettings); } private void setFieldDataBreakerLimit(ByteSizeValue newFielddataMax, Double newFielddataOverhead) { long newFielddataLimitBytes = newFielddataMax == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getLimit() : newFielddataMax.getBytes(); newFielddataOverhead = newFielddataOverhead == null ? HierarchyCircuitBreakerService.this.fielddataSettings.getOverhead() : newFielddataOverhead; BreakerSettings newFielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA, newFielddataLimitBytes, newFielddataOverhead, HierarchyCircuitBreakerService.this.fielddataSettings.getType()); registerBreaker(newFielddataSettings); HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; logger.info("Updated breaker settings field data: {}", newFielddataSettings); } private boolean validateTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.getBytes(), 1.0, CircuitBreaker.Type.PARENT); validateSettings(new BreakerSettings[]{newParentSettings}); return true; } private void setTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { BreakerSettings newParentSettings = new BreakerSettings(CircuitBreaker.PARENT, byteSizeValue.getBytes(), 1.0, CircuitBreaker.Type.PARENT); this.parentSettings = newParentSettings; } /** * Validate that child settings are valid */ public static void validateSettings(BreakerSettings[] childrenSettings) throws IllegalStateException { for (BreakerSettings childSettings : childrenSettings) { // If the child is disabled, ignore it if (childSettings.getLimit() == -1) { continue; } if (childSettings.getOverhead() < 0) { throw new IllegalStateException("Child breaker overhead " + childSettings + " must be non-negative"); } } } @Override public CircuitBreaker getBreaker(String name) { return this.breakers.get(name); } @Override public AllCircuitBreakerStats stats() { long parentEstimated = 0; List<CircuitBreakerStats> allStats = new ArrayList<>(this.breakers.size()); // Gather the "estimated" count for the parent breaker by adding the // estimations for each individual breaker for (CircuitBreaker breaker : this.breakers.values()) { allStats.add(stats(breaker.getName())); parentEstimated += breaker.getUsed(); } // Manually add the parent breaker settings since they aren't part of the breaker map allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(), parentEstimated, 1.0, parentTripCount.get())); return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()])); } @Override public CircuitBreakerStats stats(String name) { CircuitBreaker breaker = this.breakers.get(name); return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount()); } /** * Checks whether the parent breaker has been tripped */ public void checkParentLimit(String label) throws CircuitBreakingException { long totalUsed = 0; for (CircuitBreaker breaker : this.breakers.values()) { totalUsed += (breaker.getUsed() * breaker.getOverhead()); } long parentLimit = this.parentSettings.getLimit(); if (totalUsed > parentLimit) { this.parentTripCount.incrementAndGet(); final String message = "[parent] Data too large, data for [" + label + "]" + " would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" + ", which is larger than the limit of [" + parentLimit + "/" + new ByteSizeValue(parentLimit) + "]"; throw new CircuitBreakingException(message, totalUsed, parentLimit); } } /** * Allows to register a custom circuit breaker. * Warning: Will overwrite any existing custom breaker with the same name. */ @Override public void registerBreaker(BreakerSettings breakerSettings) { // Validate the settings validateSettings(new BreakerSettings[] {breakerSettings}); if (breakerSettings.getType() == CircuitBreaker.Type.NOOP) { CircuitBreaker breaker = new NoopCircuitBreaker(breakerSettings.getName()); breakers.put(breakerSettings.getName(), breaker); } else { CircuitBreaker oldBreaker; CircuitBreaker breaker = new ChildMemoryCircuitBreaker(breakerSettings, Loggers.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), this, breakerSettings.getName()); for (;;) { oldBreaker = breakers.putIfAbsent(breakerSettings.getName(), breaker); if (oldBreaker == null) { return; } breaker = new ChildMemoryCircuitBreaker(breakerSettings, (ChildMemoryCircuitBreaker)oldBreaker, Loggers.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), this, breakerSettings.getName()); if (breakers.replace(breakerSettings.getName(), oldBreaker, breaker)) { return; } } } } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.util.ssl; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class SslCipherSuitesTest { private static final String[] JAVA6_CIPHERSUITES = new String[] { "SSL_RSA_WITH_RC4_128_MD5", "SSL_RSA_WITH_RC4_128_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDH_ECDSA_WITH_RC4_128_SHA", "TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDH_RSA_WITH_RC4_128_SHA", "TLS_ECDH_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_RC4_128_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_DHE_RSA_WITH_AES_128_CBC_SHA", "TLS_DHE_RSA_WITH_AES_256_CBC_SHA", "TLS_DHE_DSS_WITH_AES_128_CBC_SHA", "TLS_DHE_DSS_WITH_AES_256_CBC_SHA", "SSL_RSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA", "SSL_RSA_WITH_DES_CBC_SHA", "SSL_DHE_RSA_WITH_DES_CBC_SHA", "SSL_DHE_DSS_WITH_DES_CBC_SHA", "SSL_RSA_EXPORT_WITH_RC4_40_MD5", "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA", "SSL_RSA_WITH_NULL_MD5", "SSL_RSA_WITH_NULL_SHA", "TLS_ECDH_ECDSA_WITH_NULL_SHA", "TLS_ECDH_RSA_WITH_NULL_SHA", "TLS_ECDHE_ECDSA_WITH_NULL_SHA", "TLS_ECDHE_RSA_WITH_NULL_SHA", "SSL_DH_anon_WITH_RC4_128_MD5", "TLS_DH_anon_WITH_AES_128_CBC_SHA", "TLS_DH_anon_WITH_AES_256_CBC_SHA", "SSL_DH_anon_WITH_3DES_EDE_CBC_SHA", "SSL_DH_anon_WITH_DES_CBC_SHA", "TLS_ECDH_anon_WITH_RC4_128_SHA", "TLS_ECDH_anon_WITH_AES_128_CBC_SHA", "TLS_ECDH_anon_WITH_AES_256_CBC_SHA", "TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA", "SSL_DH_anon_EXPORT_WITH_RC4_40_MD5", "SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA", "TLS_ECDH_anon_WITH_NULL_SHA", "TLS_KRB5_WITH_RC4_128_SHA", "TLS_KRB5_WITH_RC4_128_MD5", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA", "TLS_KRB5_WITH_3DES_EDE_CBC_MD5", "TLS_KRB5_WITH_DES_CBC_SHA", "TLS_KRB5_WITH_DES_CBC_MD5", "TLS_KRB5_EXPORT_WITH_RC4_40_SHA", "TLS_KRB5_EXPORT_WITH_RC4_40_MD5" }; private static final String[] OPENSSL_CIPHER_NICKNAMES = new String[] { "RC4-MD5", "RC4-SHA", "AES128-SHA", "AES256-SHA", "ECDH-ECDSA-RC4-SHA", "ECDH-ECDSA-AES128-SHA", "ECDH-ECDSA-AES256-SHA", "ECDH-RSA-RC4-SHA", "ECDH-RSA-AES128-SHA", "ECDH-RSA-AES256-SHA", "ECDHE-ECDSA-RC4-SHA", "ECDHE-ECDSA-AES128-SHA", "ECDHE-ECDSA-AES256-SHA", "ECDHE-RSA-RC4-SHA", "ECDHE-RSA-AES128-SHA", "ECDHE-RSA-AES256-SHA", "DHE-RSA-AES128-SHA", "DHE-RSA-AES256-SHA", "DHE-DSS-AES128-SHA", "DHE-DSS-AES256-SHA", "DES-CBC3-SHA", "ECDH-ECDSA-DES-CBC3-SHA", "ECDH-RSA-DES-CBC3-SHA", "ECDHE-ECDSA-DES-CBC3-SHA", "ECDHE-RSA-DES-CBC3-SHA", "EDH-RSA-DES-CBC3-SHA", "EDH-DSS-DES-CBC3-SHA", "DES-CBC-SHA", "EDH-RSA-DES-CBC-SHA", "EDH-DSS-DES-CBC-SHA", "EXP-RC4-MD5", "EXP-DES-CBC-SHA", "EXP-EDH-RSA-DES-CBC-SHA", "EXP-EDH-DSS-DES-CBC-SHA", "NULL-MD5", "NULL-SHA", "ECDH-ECDSA-NULL-SHA", "ECDH-RSA-NULL-SHA", "ECDHE-ECDSA-NULL-SHA", "ECDHE-RSA-NULL-SHA", "ADH-RC4-MD5", "ADH-AES128-SHA", "ADH-AES256-SHA", "ADH-DES-CBC3-SHA", "ADH-DES-CBC-SHA", "AECDH-RC4-SHA", "AECDH-AES128-SHA", "AECDH-AES256-SHA", "AECDH-DES-CBC3-SHA", "EXP-ADH-RC4-MD5", "EXP-ADH-DES-CBC-SHA", "AECDH-NULL-SHA", // TLS1_TXT_ECDH_anon_WITH_NULL_SHA "KRB5-RC4-SHA", "KRB5-RC4-MD5", "KRB5-DES-CBC3-SHA", "KRB5-DES-CBC3-MD5", "KRB5-DES-CBC-SHA", "KRB5-DES-CBC-MD5", "EXP-KRB5-RC4-SHA", "EXP-KRB5-RC4-MD5" }; @BeforeClass public static void init() throws Exception { SslCipherSuites.reset(); } @Test public void shouldResolveJava6CipherSuites() throws Exception { for (String ciphersuite : JAVA6_CIPHERSUITES) { List<String> configured = new ArrayList<String>(1); configured.add(ciphersuite); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ciphersuite, got null", resolved != null); Assert.assertTrue(String.format("Expected 1 ciphersuite, got %d", resolved.size()), resolved.size() == 1); Assert.assertTrue(String.format("Expected ciphersuite %s, got %s", configured.get(0), resolved.get(0)), resolved.get(0).equals(configured.get(0))); } } // Strengths @Test public void shouldResolveHighStrength() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("HIGH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected HIGH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveMediumStrength() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("MEDIUM"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected MEDIUM cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveLowStrength() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("LOW"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected LOW cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveExport40Strength() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EXPORT40"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected EXPORT40 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test(expected = IllegalArgumentException.class) public void shouldResolveExport56Strength() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EXPORT56"); List<String> resolved = SslCipherSuites.resolve(configured); } // Key exchanges @Test public void shouldResolveRSAKeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kRSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kRSA cipher suites, got null", resolved != null && resolved.size() != 0); } // There are no DH key exchanges supported by Java currently @Test(expected = IllegalArgumentException.class) public void shouldResolveDHKeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kDH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDHECDSAKeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kECDHe"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kECDHe cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDHRSAKeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kECDHr"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kECDHr cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDHEKeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kEECDHe"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kEECDHe cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveKRB5KeyExchange() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kKRB5"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kKRB5 cipher suites, got null", resolved != null && resolved.size() != 0); } // Authentications @Test public void shouldResolveRSAAuthentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aRSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aRSA cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveDSSAuthentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aDSS"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aDSS cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDHAuthentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aECDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aECDH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDSAAuthentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aECDSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aECDSA cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveKRB5Authentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aKRB5"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aKRB5 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveNullAuthentication() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("aNULL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected aNULL cipher suites, got null", resolved != null && resolved.size() != 0); } // Encryptions @Test public void shouldResolveRC4Encryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected RC4 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveAES128Encryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("AES128"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected AES128 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveAES256Encryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("AES256"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected AES256 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolve3DESEncryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("3DES"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected 3DES cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveDESEncryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("DES"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected DES cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveNullEncryption() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("eNULL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected eNULL cipher suites, got null", resolved != null && resolved.size() != 0); } // MACs @Test public void shouldResolveMD5MAC() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("MD5"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected MD5 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveSHA1MAC() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveSHA256MAC() throws Exception { if (!System.getProperty("java.version").startsWith("1.7.")) { return; } List<String> configured = new ArrayList<String>(1); configured.add("SHA256"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA256 cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 11 matching ciphers, got %d", resolved.size()), resolved.size() == 11); } @Test public void shouldResolveSHA384MAC() throws Exception { if (!System.getProperty("java.version").startsWith("1.7.")) { return; } List<String> configured = new ArrayList<String>(1); configured.add("SHA384"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA384 cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 4 matching ciphers, got %d", resolved.size()), resolved.size() == 4); } // Protocol versions @Test public void shouldResolveSSLv3() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SSLv3"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SSLv3 cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 19 matching ciphers, got %d", resolved.size()), resolved.size() == 19); } @Test public void shouldResolveTLSv1() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("TLSv1"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected TLSv1 cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 43 matching ciphers, got %d", resolved.size()), resolved.size() == 43); } // NB: This is expected to fail, since there aren't any ciphersuites // specific to TLSv1.1 @Test(expected = IllegalArgumentException.class) public void shouldResolveTLSv11() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("TLSv1.1"); List<String> resolved = SslCipherSuites.resolve(configured); } @Test public void shouldResolveTLSv12() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("TLSv1.2"); if (System.getProperty("java.version").startsWith("1.7.")) { List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected TLSv1.2 cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 15; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } else { // Java 6 does not support any of the TLSv1.2 ciphers } } // FIPS @Test public void shouldResolveFIPS() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("FIPS"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected FIPS cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 49; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } // Cipher nicknames @Test public void shouldResolveCipherNicknames() throws Exception { for (int i = 0; i < OPENSSL_CIPHER_NICKNAMES.length; i++) { String ciphersuite = OPENSSL_CIPHER_NICKNAMES[i]; List<String> configured = new ArrayList<String>(1); configured.add(ciphersuite); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ciphersuite, got null", resolved != null); Assert.assertTrue(String.format("Expected 1 ciphersuite, got %d", resolved.size()), resolved.size() == 1); String expected = JAVA6_CIPHERSUITES[i]; Assert.assertTrue(String.format("Expected ciphersuite %s, got %s", expected, resolved.get(0)), resolved.get(0).equals(expected)); } } // Group aliases @Test(expected = IllegalArgumentException.class) public void shouldResolveDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("DH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected DH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolvekECDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("kECDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected kECDH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("ECDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ECDH cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveDSSAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("DSS"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected DSS cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveECDSAAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("ECDSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ECDSA cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveNullAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("NULL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected NULL cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveKRB5Alias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("KRB5"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected KRB5 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveRSAAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("RSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected RSA cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveAESAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("AES"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected AES cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveRC4Alias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected RC4 cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveExpAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EXP"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected EXP cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveExportAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EXPORT"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected EXPORT cipher suites, got null", resolved != null && resolved.size() != 0); } @Test public void shouldResolveEDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected EDH cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 14; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveADHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("ADH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ADH cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 52; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveAllAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("ALL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected ALL cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 69; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveEECDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("EECDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected EECDH cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 7; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveAECDHAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("AECDH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected AECDH cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 5 matching ciphers, got %d", resolved.size()), resolved.size() == 5); } @Test public void shouldResolveSHAAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA cipher suites, got null", resolved != null && resolved.size() != 0); Assert.assertTrue(String.format("Expected 52 matching ciphers, got %d", resolved.size()), resolved.size() == 52); } @Test public void shouldResolveComplementofdefaultAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("COMPLEMENTOFDEFAULT"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected COMPLEMENTOFDEFAULT cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 21; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveComplementofallAlias() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("COMPLEMENTOFALL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected COMPLEMENTOFALL cipher suites, got null", resolved != null && resolved.size() != 0); int expected = 8; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } // Logical AND @Test public void shouldResolveLogicalAnd() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1+RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1+RC4 cipher suites, got null", resolved != null); // The resolved list should include all ciphersuites that use an SHA1 // MAC AND use RC4 encryption. Assert.assertTrue(String.format("Expected 8 matching ciphers, got %d", resolved.size()), resolved.size() == 8); } // Logical OR @Test public void shouldResolveLogicalOr() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1"); configured.add("RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1 and RC4 cipher suites, got null", resolved != null); // The resolved list should include all ciphersuites that use an SHA1 // MAC OR use RC4 encryption. Assert.assertTrue(String.format("Expected 66 matching ciphers, got %d", resolved.size()), resolved.size() == 66); } @Test public void shouldResolveSortedCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1+RC4"); configured.add("@STRENGTH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected sorted SHA1+RC4 cipher suites, got null", resolved != null); // The resolved list should include all ciphersuites that use an SHA1 // MAC AND use RC4 encryption. Assert.assertTrue(String.format("Expected 8 matching ciphers, got %d", resolved.size()), resolved.size() == 8); String expected = "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA"; Assert.assertTrue(String.format("Expected %s at index 0, got %s", expected, resolved.get(0)), resolved.get(0).equals(expected)); expected = "TLS_KRB5_EXPORT_WITH_RC4_40_SHA"; Assert.assertTrue(String.format("Expected %s at index 7, got %s", expected, resolved.get(0)), resolved.get(7).equals(expected)); } @Test public void shouldResolveKilledCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1+RC4"); configured.add("!ECDSA"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1+RC4 (and NOT ECDSA) cipher suites, got null", resolved != null); // The resolved list should include all ciphersuites that use an SHA1 // MAC AND use RC4 encryption. Assert.assertTrue(String.format("Expected 7 matching ciphers, got %d", resolved.size()), resolved.size() == 7); } @Test public void shouldResolveKilledExportCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("RC4"); configured.add("!EXPORT"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected RC4 (and NOT EXPORT) cipher suites, got null", resolved != null); // The resolved list should include all ciphersuites that use RC4 // encryption which are NOT export-grade. Assert.assertTrue(String.format("Expected 10 matching ciphers, got %d", resolved.size()), resolved.size() == 10); } @Test public void shouldResolveRightShiftedCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1+RC4"); configured.add("+RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1+RC4 (right-shifted RC4) cipher suites, got null", resolved != null); Assert.assertTrue(String.format("Expected 8 matching ciphers, got %d", resolved.size()), resolved.size() == 8); } @Test public void shouldResolveRemovedCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("SHA1"); configured.add("-RC4"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected SHA1 (removed RC4) cipher suites, got null", resolved != null); Assert.assertTrue(String.format("Expected 44 matching ciphers, got %d", resolved.size()), resolved.size() == 44); } @Test public void shouldResolveUnknownName() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("CUSTOM"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected cipher suites, got null", resolved != null); Assert.assertTrue(String.format("Expected %s, got %s", configured, resolved), resolved.equals(configured)); } @Test(expected = IllegalArgumentException.class) public void shouldFailNoMatchingCiphers() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("!FIPS"); List<String> resolved = SslCipherSuites.resolve(configured); } @Test public void shouldResolveDefaults() throws Exception { List<String> resolved = SslCipherSuites.resolve(null); Assert.assertTrue("Expected default cipher suites, got null", resolved != null); int expected = 19; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveExplicitDefault() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("DEFAULT"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected default cipher suites, got null", resolved != null); int expected = 19; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test public void shouldResolveExplicitDefaultAndNullAlias() throws Exception { List<String> configured = new ArrayList<String>(2); configured.add("DEFAULT"); configured.add("NULL"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected default cipher suites, got null", resolved != null); int expected = 24; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.size()), resolved.size() == expected); } @Test(expected = IllegalArgumentException.class) public void shouldNotResolveNullAliasAndExplicitDefault() throws Exception { List<String> configured = new ArrayList<String>(2); configured.add("NULL"); configured.add("DEFAULT"); List<String> resolved = SslCipherSuites.resolve(configured); } @Test public void shouldResolveSortedUnknownCipher() throws Exception { List<String> configured = new ArrayList<String>(1); configured.add("TLS_ECDH_RSA_WITH_AES_256_CBC_SHA"); configured.add("CUSTOM"); configured.add("SSL_RSA_WITH_NULL_MD5"); configured.add("@STRENGTH"); List<String> resolved = SslCipherSuites.resolve(configured); Assert.assertTrue("Expected cipher suites, got null", resolved != null); Assert.assertTrue(String.format("Expected 3 matching ciphers, got %d", resolved.size()), resolved.size() == 3); String expected = "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA"; Assert.assertTrue(String.format("Expected cipher #1 to be %s, got %s", expected, resolved.get(0)), resolved.get(0).equals(expected)); expected = "CUSTOM"; Assert.assertTrue(String.format("Expected cipher #2 to be %s, got %s", expected, resolved.get(1)), resolved.get(1).equals(expected)); expected = "SSL_RSA_WITH_NULL_MD5"; Assert.assertTrue(String.format("Expected cipher #3 to be %s, got %s", expected, resolved.get(2)), resolved.get(2).equals(expected)); } @Test public void shouldResolveCSV() throws Exception { String configured = "HIGH, MEDIUM, !ADH"; String[] resolved = SslCipherSuites.resolveCSV(configured); Assert.assertTrue("Expected cipher suites, got null", resolved != null); int expected = 23; Assert.assertTrue(String.format("Expected %d matching ciphers, got %d", expected, resolved.length), resolved.length == expected); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.engine.main; import java.util.ArrayDeque ; import java.util.Deque ; import org.apache.jena.atlas.logging.Log ; import org.apache.jena.sparql.algebra.Op ; import org.apache.jena.sparql.algebra.OpVisitor ; import org.apache.jena.sparql.algebra.op.* ; import org.apache.jena.sparql.engine.QueryIterator ; /** Class to provide type-safe execution dispatch using the visitor support of Op */ class ExecutionDispatch implements OpVisitor { private Deque<QueryIterator> stack = new ArrayDeque<>() ; private OpExecutor opExecutor ; ExecutionDispatch(OpExecutor exec) { opExecutor = exec ; } QueryIterator exec(Op op, QueryIterator input) { push(input) ; int x = stack.size() ; op.visit(this) ; int y = stack.size() ; if ( x != y ) Log.warn(this, "Possible stack misalignment") ; QueryIterator qIter = pop() ; return qIter ; } @Override public void visit(OpBGP opBGP) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opBGP, input) ; push(qIter) ; } @Override public void visit(OpQuadPattern quadPattern) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(quadPattern, input) ; push(qIter) ; } @Override public void visit(OpQuadBlock quadBlock) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(quadBlock, input) ; push(qIter) ; } @Override public void visit(OpTriple opTriple) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opTriple, input) ; push(qIter) ; } @Override public void visit(OpQuad opQuad) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opQuad, input) ; push(qIter) ; } @Override public void visit(OpPath opPath) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opPath, input) ; push(qIter) ; } @Override public void visit(OpProcedure opProc) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opProc, input) ; push(qIter) ; } @Override public void visit(OpPropFunc opPropFunc) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opPropFunc, input) ; push(qIter) ; } @Override public void visit(OpJoin opJoin) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opJoin, input) ; push(qIter) ; } @Override public void visit(OpSequence opSequence) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opSequence, input) ; push(qIter) ; } @Override public void visit(OpDisjunction opDisjunction) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opDisjunction, input) ; push(qIter) ; } @Override public void visit(OpLeftJoin opLeftJoin) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opLeftJoin, input) ; push(qIter) ; } @Override public void visit(OpDiff opDiff) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opDiff, input) ; push(qIter) ; } @Override public void visit(OpMinus opMinus) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opMinus, input) ; push(qIter) ; } @Override public void visit(OpUnion opUnion) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opUnion, input) ; push(qIter) ; } @Override public void visit(OpConditional opCondition) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opCondition, input) ; push(qIter) ; } @Override public void visit(OpFilter opFilter) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opFilter, input) ; push(qIter) ; } @Override public void visit(OpGraph opGraph) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opGraph, input) ; push(qIter) ; } @Override public void visit(OpService opService) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opService, input) ; push(qIter) ; } @Override public void visit(OpDatasetNames dsNames) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(dsNames, input) ; push(qIter) ; } @Override public void visit(OpTable opTable) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opTable, input) ; push(qIter) ; } @Override public void visit(OpExt opExt) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opExt, input) ; push(qIter) ; } @Override public void visit(OpNull opNull) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opNull, input) ; push(qIter) ; } @Override public void visit(OpLabel opLabel) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opLabel, input) ; push(qIter) ; } @Override public void visit(OpList opList) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opList, input) ; push(qIter) ; } @Override public void visit(OpOrder opOrder) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opOrder, input) ; push(qIter) ; } @Override public void visit(OpProject opProject) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opProject, input) ; push(qIter) ; } @Override public void visit(OpDistinct opDistinct) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opDistinct, input) ; push(qIter) ; } @Override public void visit(OpReduced opReduced) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opReduced, input) ; push(qIter) ; } @Override public void visit(OpAssign opAssign) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opAssign, input) ; push(qIter) ; } @Override public void visit(OpExtend opExtend) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opExtend, input) ; push(qIter) ; } @Override public void visit(OpSlice opSlice) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opSlice, input) ; push(qIter) ; } @Override public void visit(OpGroup opGroup) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opGroup, input) ; push(qIter) ; } @Override public void visit(OpTopN opTop) { QueryIterator input = pop() ; QueryIterator qIter = opExecutor.execute(opTop, input) ; push(qIter) ; } private void push(QueryIterator qIter) { stack.push(qIter) ; } private QueryIterator pop() { if ( stack.size() == 0 ) Log.warn(this, "Warning: pop: empty stack") ; return stack.pop() ; } }
package org.luaj.vm2; import java.io.ByteArrayOutputStream; import java.io.PrintStream; /** * Debug helper class to pretty-print lua bytecodes. * @see Prototype * @see LuaClosure */ public class Print extends Lua { /** opcode names */ private static final String STRING_FOR_NULL = "null"; private static final String[] OPNAMES = { "MOVE", "LOADK", "LOADBOOL", "LOADNIL", "GETUPVAL", "GETGLOBAL", "GETTABLE", "SETGLOBAL", "SETUPVAL", "SETTABLE", "NEWTABLE", "SELF", "ADD", "SUB", "MUL", "DIV", "MOD", "POW", "UNM", "NOT", "LEN", "CONCAT", "JMP", "EQ", "LT", "LE", "TEST", "TESTSET", "CALL", "TAILCALL", "RETURN", "FORLOOP", "FORPREP", "TFORLOOP", "SETLIST", "CLOSE", "CLOSURE", "VARARG", }; static void printString(PrintStream ps, LuaString s) { ps.print('"'); for(int i = 0, n = s._length; i < n; i++) { int c = s._bytes[s._offset + i]; if(c >= ' ' && c <= '~' && c != '\"' && c != '\\') ps.print((char)c); else { switch(c) { case '"': ps.print("\\\""); break; case '\\': ps.print("\\\\"); break; case 0x0007: /* bell */ ps.print("\\a"); break; case '\b': /* backspace */ ps.print("\\b"); break; case '\f': /* form feed */ ps.print("\\f"); break; case '\t': /* tab */ ps.print("\\t"); break; case '\r': /* carriage return */ ps.print("\\r"); break; case '\n': /* newline */ ps.print("\\n"); break; case 0x000B: /* vertical tab */ ps.print("\\v"); break; default: ps.print('\\'); ps.print(Integer.toString(1000 + 0xff & c).substring(1)); break; } } } ps.print('"'); } static void printValue(PrintStream ps, LuaValue v) { switch(v.type()) { case LuaValue.TSTRING: printString(ps, (LuaString)v); break; default: ps.print(v.tojstring()); } } static void printConstant(PrintStream ps, Prototype f, int i) { printValue(ps, f.k[i]); } /** * Print the code in a prototype * @param f the {@link Prototype} */ public static void printCode(PrintStream ps, Prototype f) { int[] code = f.code; int pc, n = code.length; for(pc = 0; pc < n; pc++) { printOpCode(ps, f, pc); ps.println(); } } /** * Print an opcode in a prototype * @param ps the {@link PrintStream} to print to * @param f the {@link Prototype} * @param pc the program counter to look up and print */ public static void printOpCode(PrintStream ps, Prototype f, int pc) { int[] code = f.code; int i = code[pc]; int o = GET_OPCODE(i); int a = GETARG_A(i); int b = GETARG_B(i); int c = GETARG_C(i); int bx = GETARG_Bx(i); int sbx = GETARG_sBx(i); int line = getline(f, pc); ps.print(" " + (pc + 1) + " "); if(line > 0) ps.print("[" + line + "] "); else ps.print("[-] "); ps.print(OPNAMES[o] + " "); switch(getOpMode(o)) { case iABC: ps.print(a); if(getBMode(o) != OpArgN) ps.print(" " + (ISK(b) ? (-1 - INDEXK(b)) : b)); if(getCMode(o) != OpArgN) ps.print(" " + (ISK(c) ? (-1 - INDEXK(c)) : c)); break; case iABx: if(getBMode(o) == OpArgK) { ps.print(a + " " + (-1 - bx)); } else { ps.print(a + " " + (bx)); } break; case iAsBx: if(o == OP_JMP) ps.print(sbx); else ps.print(a + " " + sbx); break; } switch(o) { case OP_LOADK: ps.print(" ; "); printConstant(ps, f, bx); break; case OP_GETUPVAL: case OP_SETUPVAL: ps.print(" ; "); if(f.upvalues.length > b) printValue(ps, f.upvalues[b]); else ps.print("-"); break; case OP_GETGLOBAL: case OP_SETGLOBAL: ps.print(" ; "); printConstant(ps, f, bx); break; case OP_GETTABLE: case OP_SELF: if(ISK(c)) { ps.print(" ; "); printConstant(ps, f, INDEXK(c)); } break; case OP_SETTABLE: case OP_ADD: case OP_SUB: case OP_MUL: case OP_DIV: case OP_POW: case OP_EQ: case OP_LT: case OP_LE: if(ISK(b) || ISK(c)) { ps.print(" ; "); if(ISK(b)) printConstant(ps, f, INDEXK(b)); else ps.print("-"); ps.print(" "); if(ISK(c)) printConstant(ps, f, INDEXK(c)); else ps.print("-"); } break; case OP_JMP: case OP_FORLOOP: case OP_FORPREP: ps.print(" ; to " + (sbx + pc + 2)); break; case OP_CLOSURE: ps.print(" ; " + f.p[bx].getClass().getName()); break; case OP_SETLIST: if(c == 0) ps.print(" ; " + code[++pc]); else ps.print(" ; " + c); break; case OP_VARARG: ps.print(" ; is_vararg=" + f.is_vararg); break; default: break; } } private static int getline(Prototype f, int pc) { return pc > 0 && f.lineinfo != null && pc < f.lineinfo.length ? f.lineinfo[pc] : -1; } static void printHeader(PrintStream ps, Prototype f) { String s = String.valueOf(f.source); if(s.startsWith("@") || s.startsWith("=")) s = s.substring(1); else if("\033Lua".equals(s)) s = "(bstring)"; else s = "(string)"; String a = (f.linedefined == 0) ? "main" : "function"; ps.print("\n%" + a + " <" + s + ":" + f.linedefined + "," + f.lastlinedefined + "> (" + f.code.length + " instructions, " + f.code.length * 4 + " bytes at " + id() + ")\n"); ps.print(f.numparams + " param, " + f.maxstacksize + " slot, " + f.upvalues.length + " upvalue, "); ps.print(f.locvars.length + " local, " + f.k.length + " constant, " + f.p.length + " function\n"); } static void printConstants(PrintStream ps, Prototype f) { int i, n = f.k.length; ps.print("constants (" + n + ") for " + id() + ":\n"); for(i = 0; i < n; i++) { ps.print(" " + (i + 1) + " "); printValue(ps, f.k[i]); ps.print("\n"); } } static void printLocals(PrintStream ps, Prototype f) { int i, n = f.locvars.length; ps.print("locals (" + n + ") for " + id() + ":\n"); for(i = 0; i < n; i++) { ps.println(" " + i + " " + f.locvars[i]._varname + " " + (f.locvars[i]._startpc + 1) + " " + (f.locvars[i]._endpc + 1)); } } static void printUpValues(PrintStream ps, Prototype f) { int i, n = f.upvalues.length; ps.print("upvalues (" + n + ") for " + id() + ":\n"); for(i = 0; i < n; i++) { ps.print(" " + i + " " + f.upvalues[i] + "\n"); } } public static void print(PrintStream ps, Prototype p) { printFunction(ps, p, true); } public static void printFunction(PrintStream ps, Prototype f, boolean full) { int i, n = f.p.length; printHeader(ps, f); printCode(ps, f); if(full) { printConstants(ps, f); printLocals(ps, f); printUpValues(ps, f); } for(i = 0; i < n; i++) printFunction(ps, f.p[i], full); } private static void format(PrintStream ps, String s, int maxcols) { int n = s.length(); if(n > maxcols) ps.print(s.substring(0, maxcols)); else { ps.print(s); for(int i = maxcols - n; --i >= 0;) ps.print(' '); } } private static String id() { return "Proto"; } /** * Print the state of a {@link LuaClosure} that is being executed * @param cl the {@link LuaClosure} * @param pc the program counter * @param stack the stack of {@link LuaValue} * @param top the top of the stack * @param varargs any {@link Varargs} value that may apply */ public static void printState(LuaClosure cl, int pc, LuaValue[] stack, int top, Varargs varargs) { // print opcode into buffer ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); printOpCode(ps, cl._p, pc); ps.flush(); ps.close(); ps = System.out; format(ps, baos.toString(), 50); // print stack ps.print('['); for(int i = 0; i < stack.length; i++) { LuaValue v = stack[i]; if(v == null) ps.print(STRING_FOR_NULL); else switch(v.type()) { case LuaValue.TSTRING: LuaString s = v.checkstring(); ps.print(s.length() < 48 ? s.tojstring() : s.substring(0, 32).tojstring() + "...+" + (s.length() - 32) + "b"); break; case LuaValue.TFUNCTION: ps.print((v instanceof LuaClosure) ? ((LuaClosure)v)._p.toString() : v.tojstring()); break; case LuaValue.TUSERDATA: Object o = v.touserdata(); if(o != null) { String n = o.getClass().getName(); n = n.substring(n.lastIndexOf('.') + 1); ps.print(n + ": " + Integer.toHexString(o.hashCode())); } else { ps.print(v.toString()); } break; default: ps.print(v.tojstring()); } if(i + 1 == top) ps.print(']'); ps.print(" | "); } ps.print(varargs); ps.println(); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.impl.proxy; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.QueueAddAllCodec; import com.hazelcast.client.impl.protocol.codec.QueueAddListenerCodec; import com.hazelcast.client.impl.protocol.codec.QueueClearCodec; import com.hazelcast.client.impl.protocol.codec.QueueCompareAndRemoveAllCodec; import com.hazelcast.client.impl.protocol.codec.QueueCompareAndRetainAllCodec; import com.hazelcast.client.impl.protocol.codec.QueueContainsAllCodec; import com.hazelcast.client.impl.protocol.codec.QueueContainsCodec; import com.hazelcast.client.impl.protocol.codec.QueueDrainToCodec; import com.hazelcast.client.impl.protocol.codec.QueueDrainToMaxSizeCodec; import com.hazelcast.client.impl.protocol.codec.QueueIsEmptyCodec; import com.hazelcast.client.impl.protocol.codec.QueueIteratorCodec; import com.hazelcast.client.impl.protocol.codec.QueueOfferCodec; import com.hazelcast.client.impl.protocol.codec.QueuePeekCodec; import com.hazelcast.client.impl.protocol.codec.QueuePollCodec; import com.hazelcast.client.impl.protocol.codec.QueuePutCodec; import com.hazelcast.client.impl.protocol.codec.QueueRemainingCapacityCodec; import com.hazelcast.client.impl.protocol.codec.QueueRemoveCodec; import com.hazelcast.client.impl.protocol.codec.QueueRemoveListenerCodec; import com.hazelcast.client.impl.protocol.codec.QueueSizeCodec; import com.hazelcast.client.impl.protocol.codec.QueueTakeCodec; import com.hazelcast.client.impl.spi.ClientContext; import com.hazelcast.client.impl.spi.EventHandler; import com.hazelcast.client.impl.spi.impl.ListenerMessageCodec; import com.hazelcast.cluster.Member; import com.hazelcast.collection.IQueue; import com.hazelcast.collection.ItemEvent; import com.hazelcast.collection.ItemListener; import com.hazelcast.collection.LocalQueueStats; import com.hazelcast.collection.impl.common.DataAwareItemEvent; import com.hazelcast.collection.impl.queue.QueueIterator; import com.hazelcast.core.HazelcastException; import com.hazelcast.core.ItemEventType; import com.hazelcast.internal.serialization.Data; import javax.annotation.Nonnull; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.UUID; import java.util.concurrent.TimeUnit; import static com.hazelcast.internal.util.CollectionUtil.objectToDataCollection; import static com.hazelcast.internal.util.Preconditions.checkNotNull; import static java.lang.Thread.currentThread; /** * Proxy implementation of {@link IQueue}. * * @param <E> the type of elements in this queue */ public final class ClientQueueProxy<E> extends PartitionSpecificClientProxy implements IQueue<E> { public ClientQueueProxy(String serviceName, String name, ClientContext context) { super(serviceName, name, context); } @Nonnull @Override public UUID addItemListener(@Nonnull ItemListener<E> listener, boolean includeValue) { checkNotNull(listener, "Null listener is not allowed!"); EventHandler<ClientMessage> eventHandler = new ItemEventHandler(includeValue, listener); return registerListener(createItemListenerCodec(includeValue), eventHandler); } private ListenerMessageCodec createItemListenerCodec(final boolean includeValue) { return new ListenerMessageCodec() { @Override public ClientMessage encodeAddRequest(boolean localOnly) { return QueueAddListenerCodec.encodeRequest(name, includeValue, localOnly); } @Override public UUID decodeAddResponse(ClientMessage clientMessage) { return QueueAddListenerCodec.decodeResponse(clientMessage); } @Override public ClientMessage encodeRemoveRequest(UUID realRegistrationId) { return QueueRemoveListenerCodec.encodeRequest(name, realRegistrationId); } @Override public boolean decodeRemoveResponse(ClientMessage clientMessage) { return QueueRemoveListenerCodec.decodeResponse(clientMessage); } }; } private class ItemEventHandler extends QueueAddListenerCodec.AbstractEventHandler implements EventHandler<ClientMessage> { private final boolean includeValue; private final ItemListener<E> listener; ItemEventHandler(boolean includeValue, ItemListener<E> listener) { this.includeValue = includeValue; this.listener = listener; } @Override public void handleItemEvent(Data dataItem, UUID uuid, int eventType) { Member member = getContext().getClusterService().getMember(uuid); ItemEvent<E> itemEvent = new DataAwareItemEvent(name, ItemEventType.getByType(eventType), dataItem, member, getSerializationService()); if (eventType == ItemEventType.ADDED.getType()) { listener.itemAdded(itemEvent); } else { listener.itemRemoved(itemEvent); } } } @Override public boolean removeItemListener(@Nonnull UUID registrationId) { checkNotNull(registrationId, "Null registrationId is not allowed!"); return deregisterListener(registrationId); } @Override public LocalQueueStats getLocalQueueStats() { throw new UnsupportedOperationException("Locality is ambiguous for client!"); } @Override public boolean add(@Nonnull E e) { if (offer(e)) { return true; } throw new IllegalStateException("Queue is full!"); } /** * It is advised to use this method in a try-catch block to take the offer operation * full lifecycle control, in a "lost node" scenario you can not be sure * offer is succeeded or not so you may want to retry. * * @param e the element to add * @return <tt>true</tt> if the element was added to this queue. * <tt>false</tt> if there is not enough capacity to insert the element. * @throws HazelcastException if client loses the connected node. */ @Override public boolean offer(@Nonnull E e) { try { return offer(e, 0, TimeUnit.SECONDS); } catch (InterruptedException ex) { currentThread().interrupt(); return false; } } @Override public void put(@Nonnull E e) throws InterruptedException { checkNotNull(e, "Null item is not allowed!"); Data data = toData(e); ClientMessage request = QueuePutCodec.encodeRequest(name, data); invokeOnPartitionInterruptibly(request); } @Override public boolean offer(@Nonnull E e, long timeout, @Nonnull TimeUnit unit) throws InterruptedException { checkNotNull(e, "Null item is not allowed!"); checkNotNull(unit, "Null timeUnit is not allowed!"); Data data = toData(e); ClientMessage request = QueueOfferCodec.encodeRequest(name, data, unit.toMillis(timeout)); ClientMessage response = invokeOnPartitionInterruptibly(request); return QueueOfferCodec.decodeResponse(response); } @Nonnull @Override public E take() throws InterruptedException { ClientMessage request = QueueTakeCodec.encodeRequest(name); ClientMessage response = invokeOnPartitionInterruptibly(request); return toObject(QueueTakeCodec.decodeResponse(response)); } @Override public E poll(long timeout, @Nonnull TimeUnit unit) throws InterruptedException { checkNotNull(unit, "Null timeUnit is not allowed!"); ClientMessage request = QueuePollCodec.encodeRequest(name, unit.toMillis(timeout)); ClientMessage response = invokeOnPartitionInterruptibly(request); return toObject(QueuePollCodec.decodeResponse(response)); } @Override public int remainingCapacity() { ClientMessage request = QueueRemainingCapacityCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return QueueRemainingCapacityCodec.decodeResponse(response); } @Override public boolean remove(@Nonnull Object o) { checkNotNull(o, "Null item is not allowed!"); Data data = toData(o); ClientMessage request = QueueRemoveCodec.encodeRequest(name, data); ClientMessage response = invokeOnPartition(request); return QueueRemoveCodec.decodeResponse(response); } @Override public boolean contains(@Nonnull Object o) { checkNotNull(o, "Null item is not allowed!"); Data data = toData(o); ClientMessage request = QueueContainsCodec.encodeRequest(name, data); ClientMessage response = invokeOnPartition(request); return QueueContainsCodec.decodeResponse(response); } @Override public int drainTo(@Nonnull Collection<? super E> objects) { checkNotNull(objects, "Null objects parameter is not allowed!"); ClientMessage request = QueueDrainToCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); Collection<Data> resultCollection = QueueDrainToCodec.decodeResponse(response); for (Data data : resultCollection) { E e = toObject(data); objects.add(e); } return resultCollection.size(); } @Override public int drainTo(@Nonnull Collection<? super E> c, int maxElements) { checkNotNull(c, "Null collection parameter is not allowed!"); ClientMessage request = QueueDrainToMaxSizeCodec.encodeRequest(name, maxElements); ClientMessage response = invokeOnPartition(request); Collection<Data> resultCollection = QueueDrainToMaxSizeCodec.decodeResponse(response); for (Data data : resultCollection) { E e = toObject(data); c.add(e); } return resultCollection.size(); } @Override public E remove() { final E res = poll(); if (res == null) { throw new NoSuchElementException("Queue is empty!"); } return res; } @Override public E poll() { try { return poll(0, TimeUnit.SECONDS); } catch (InterruptedException e) { currentThread().interrupt(); return null; } } @Override public E element() { final E res = peek(); if (res == null) { throw new NoSuchElementException("Queue is empty!"); } return res; } @Override public E peek() { ClientMessage request = QueuePeekCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return toObject(QueuePeekCodec.decodeResponse(response)); } @Override public int size() { ClientMessage request = QueueSizeCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return QueueSizeCodec.decodeResponse(response); } @Override public boolean isEmpty() { ClientMessage request = QueueIsEmptyCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return QueueIsEmptyCodec.decodeResponse(response); } @Override public Iterator<E> iterator() { ClientMessage request = QueueIteratorCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); Collection<Data> resultCollection = QueueIteratorCodec.decodeResponse(response); return new QueueIterator<E>(resultCollection.iterator(), getSerializationService(), false); } @Override public Object[] toArray() { ClientMessage request = QueueIteratorCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); Collection<Data> resultCollection = QueueIteratorCodec.decodeResponse(response); int i = 0; Object[] array = new Object[resultCollection.size()]; for (Data data : resultCollection) { array[i++] = toObject(data); } return array; } @Nonnull @Override public <T> T[] toArray(@Nonnull T[] ts) { checkNotNull(ts, "Null array parameter is not allowed!"); ClientMessage request = QueueIteratorCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); Collection<Data> resultCollection = QueueIteratorCodec.decodeResponse(response); int size = resultCollection.size(); if (ts.length < size) { ts = (T[]) java.lang.reflect.Array.newInstance(ts.getClass().getComponentType(), size); } int i = 0; for (Data data : resultCollection) { ts[i++] = (T) toObject(data); } return ts; } @Override public boolean containsAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed!"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = QueueContainsAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return QueueContainsAllCodec.decodeResponse(response); } @Override public boolean addAll(@Nonnull Collection<? extends E> c) { checkNotNull(c, "Null collection is not allowed!"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = QueueAddAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return QueueAddAllCodec.decodeResponse(response); } @Override public boolean removeAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed!"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = QueueCompareAndRemoveAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return QueueCompareAndRemoveAllCodec.decodeResponse(response); } @Override public boolean retainAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed!"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = QueueCompareAndRetainAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return QueueCompareAndRetainAllCodec.decodeResponse(response); } @Override public void clear() { ClientMessage request = QueueClearCodec.encodeRequest(name); invokeOnPartition(request); } @Override public String toString() { return "IQueue{" + "name='" + name + '\'' + '}'; } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticbeanstalk.model; import java.io.Serializable; /** * <p> * A list of available AWS Elastic Beanstalk solution stacks. * </p> */ public class ListAvailableSolutionStacksResult implements Serializable, Cloneable { /** * A list of available solution stacks. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> solutionStacks; /** * A list of available solution stacks and their * <a>SolutionStackDescription</a>. */ private com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription> solutionStackDetails; /** * A list of available solution stacks. * * @return A list of available solution stacks. */ public java.util.List<String> getSolutionStacks() { if (solutionStacks == null) { solutionStacks = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); solutionStacks.setAutoConstruct(true); } return solutionStacks; } /** * A list of available solution stacks. * * @param solutionStacks A list of available solution stacks. */ public void setSolutionStacks(java.util.Collection<String> solutionStacks) { if (solutionStacks == null) { this.solutionStacks = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> solutionStacksCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(solutionStacks.size()); solutionStacksCopy.addAll(solutionStacks); this.solutionStacks = solutionStacksCopy; } /** * A list of available solution stacks. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSolutionStacks(java.util.Collection)} or {@link * #withSolutionStacks(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param solutionStacks A list of available solution stacks. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListAvailableSolutionStacksResult withSolutionStacks(String... solutionStacks) { if (getSolutionStacks() == null) setSolutionStacks(new java.util.ArrayList<String>(solutionStacks.length)); for (String value : solutionStacks) { getSolutionStacks().add(value); } return this; } /** * A list of available solution stacks. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param solutionStacks A list of available solution stacks. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListAvailableSolutionStacksResult withSolutionStacks(java.util.Collection<String> solutionStacks) { if (solutionStacks == null) { this.solutionStacks = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> solutionStacksCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(solutionStacks.size()); solutionStacksCopy.addAll(solutionStacks); this.solutionStacks = solutionStacksCopy; } return this; } /** * A list of available solution stacks and their * <a>SolutionStackDescription</a>. * * @return A list of available solution stacks and their * <a>SolutionStackDescription</a>. */ public java.util.List<SolutionStackDescription> getSolutionStackDetails() { if (solutionStackDetails == null) { solutionStackDetails = new com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription>(); solutionStackDetails.setAutoConstruct(true); } return solutionStackDetails; } /** * A list of available solution stacks and their * <a>SolutionStackDescription</a>. * * @param solutionStackDetails A list of available solution stacks and their * <a>SolutionStackDescription</a>. */ public void setSolutionStackDetails(java.util.Collection<SolutionStackDescription> solutionStackDetails) { if (solutionStackDetails == null) { this.solutionStackDetails = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription> solutionStackDetailsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription>(solutionStackDetails.size()); solutionStackDetailsCopy.addAll(solutionStackDetails); this.solutionStackDetails = solutionStackDetailsCopy; } /** * A list of available solution stacks and their * <a>SolutionStackDescription</a>. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSolutionStackDetails(java.util.Collection)} or * {@link #withSolutionStackDetails(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param solutionStackDetails A list of available solution stacks and their * <a>SolutionStackDescription</a>. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListAvailableSolutionStacksResult withSolutionStackDetails(SolutionStackDescription... solutionStackDetails) { if (getSolutionStackDetails() == null) setSolutionStackDetails(new java.util.ArrayList<SolutionStackDescription>(solutionStackDetails.length)); for (SolutionStackDescription value : solutionStackDetails) { getSolutionStackDetails().add(value); } return this; } /** * A list of available solution stacks and their * <a>SolutionStackDescription</a>. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param solutionStackDetails A list of available solution stacks and their * <a>SolutionStackDescription</a>. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListAvailableSolutionStacksResult withSolutionStackDetails(java.util.Collection<SolutionStackDescription> solutionStackDetails) { if (solutionStackDetails == null) { this.solutionStackDetails = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription> solutionStackDetailsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SolutionStackDescription>(solutionStackDetails.size()); solutionStackDetailsCopy.addAll(solutionStackDetails); this.solutionStackDetails = solutionStackDetailsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSolutionStacks() != null) sb.append("SolutionStacks: " + getSolutionStacks() + ","); if (getSolutionStackDetails() != null) sb.append("SolutionStackDetails: " + getSolutionStackDetails() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSolutionStacks() == null) ? 0 : getSolutionStacks().hashCode()); hashCode = prime * hashCode + ((getSolutionStackDetails() == null) ? 0 : getSolutionStackDetails().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListAvailableSolutionStacksResult == false) return false; ListAvailableSolutionStacksResult other = (ListAvailableSolutionStacksResult)obj; if (other.getSolutionStacks() == null ^ this.getSolutionStacks() == null) return false; if (other.getSolutionStacks() != null && other.getSolutionStacks().equals(this.getSolutionStacks()) == false) return false; if (other.getSolutionStackDetails() == null ^ this.getSolutionStackDetails() == null) return false; if (other.getSolutionStackDetails() != null && other.getSolutionStackDetails().equals(this.getSolutionStackDetails()) == false) return false; return true; } @Override public ListAvailableSolutionStacksResult clone() { try { return (ListAvailableSolutionStacksResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> { @Override protected RangeQueryBuilder doCreateTestQueryBuilder() { RangeQueryBuilder query; // switch between numeric and date ranges switch (randomIntBetween(0, 2)) { case 0: // use mapped integer field for numeric range queries query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(randomIntBetween(1, 100)); query.to(randomIntBetween(101, 200)); break; case 1: // use mapped date field, using date string representation query = new RangeQueryBuilder(DATE_FIELD_NAME); query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); // Create timestamp option only then we have a date mapper, // otherwise we could trigger exception. if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) { if (randomBoolean()) { query.timeZone(randomTimeZone()); } if (randomBoolean()) { query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"); } } break; case 2: default: query = new RangeQueryBuilder(STRING_FIELD_NAME); query.from("a" + randomAsciiOfLengthBetween(1, 10)); query.to("z" + randomAsciiOfLengthBetween(1, 10)); break; } query.includeLower(randomBoolean()).includeUpper(randomBoolean()); if (randomBoolean()) { query.from(null); } if (randomBoolean()) { query.to(null); } return query; } @Override protected Map<String, RangeQueryBuilder> getAlternateVersions() { Map<String, RangeQueryBuilder> alternateVersions = new HashMap<>(); RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME); rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200)); rangeQueryBuilder.includeLower(randomBoolean()); rangeQueryBuilder.includeUpper(randomBoolean()); String query = "{\n" + " \"range\":{\n" + " \"" + INT_FIELD_NAME + "\": {\n" + " \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" + " \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" + " }\n" + " }\n" + "}"; alternateVersions.put(query, rangeQueryBuilder); return alternateVersions; } @Override protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) { assertThat(query, instanceOf(TermRangeQuery.class)); TermRangeQuery termRangeQuery = (TermRangeQuery) query; assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName())); assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from()))); assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to()))); assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { //we can't properly test unmapped dates because LateParsingQuery is package private } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { assertThat(query, instanceOf(LegacyNumericRangeQuery.class)); LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); } else { throw new UnsupportedOperationException(); } } public void testIllegalArguments() { try { if (randomBoolean()) { new RangeQueryBuilder(null); } else { new RangeQueryBuilder(""); } fail("cannot be null or empty"); } catch (IllegalArgumentException e) { // expected } RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("test"); try { if (randomBoolean()) { rangeQueryBuilder.timeZone(null); } else { rangeQueryBuilder.timeZone("badID"); } fail("cannot be null or unknown id"); } catch (IllegalArgumentException e) { // expected } try { if (randomBoolean()) { rangeQueryBuilder.format(null); } else { rangeQueryBuilder.format("badFormat"); } fail("cannot be null or bad format"); } catch (IllegalArgumentException e) { // expected } } /** * Specifying a timezone together with a numeric range query should throw an exception. */ public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(1).to(10).timeZone("UTC"); try { query.toQuery(createShardContext()); fail("Expected QueryShardException"); } catch (QueryShardException e) { assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } } /** * Specifying a timezone together with an unmapped field should throw an exception. */ public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); query.from(1).to(10).timeZone("UTC"); try { query.toQuery(createShardContext()); fail("Expected QueryShardException"); } catch (QueryShardException e) { assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } } public void testToQueryNumericField() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); assertThat(rangeQuery.includesMin(), equalTo(true)); assertThat(rangeQuery.includesMax(), equalTo(false)); } public void testDateRangeQueryFormat() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); // We test 01/01/2012 from gte and 2030 for lt String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"dd/MM/yyyy||yyyy\"\n" + " }\n" + " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); // Min value was 01/01/2012 (dd/MM/yyyy) DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); // Max value was 2030 (yyyy) DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); // Test Invalid format query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"yyyy\"\n" + " }\n" + " }\n" + "}"; try { parseQuery(query).toQuery(createShardContext()).rewrite(null); fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException"); } catch (ElasticsearchParseException e) { // We expect it } } public void testDateRangeBoundaries() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2014-11-05||/M\",\n" + " \"lte\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); assertTrue(rangeQuery.includesMin()); DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); assertTrue(rangeQuery.includesMax()); query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gt\": \"2014-11-05||/M\",\n" + " \"lt\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}"; parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); rangeQuery = (LegacyNumericRangeQuery) parsedQuery; min = DateTime.parse("2014-11-30T23:59:59.999+00"); assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); assertFalse(rangeQuery.includesMin()); max = DateTime.parse("2014-12-08T00:00:00.000+00"); assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); assertFalse(rangeQuery.includesMax()); } public void testDateRangeQueryTimezone() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); long startDate = System.currentTimeMillis(); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2012-01-01\",\n" + " \"lte\": \"now\",\n" + " \"time_zone\": \"+01:00\"\n" + " }\n" + " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); // Min value was 2012-01-01 (UTC) so we need to remove one hour DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); // Max value is when we started the test. So it should be some ms from now DateTime max = new DateTime(startDate, DateTimeZone.UTC); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); // We should not have a big difference here (should be some ms) assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); query = "{\n" + " \"range\" : {\n" + " \"" + INT_FIELD_NAME + "\" : {\n" + " \"gte\": \"0\",\n" + " \"lte\": \"100\",\n" + " \"time_zone\": \"-01:00\"\n" + " }\n" + " }\n" + "}"; try { parseQuery(query).toQuery(createShardContext()); fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException"); } catch (QueryShardException e) { // We expect it } } public void testFromJson() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"include_lower\" : true,\n" + " \"include_upper\" : true,\n" + " \"time_zone\" : \"+01:00\",\n" + " \"boost\" : 1.0\n" + " }\n" + " }\n" + "}"; RangeQueryBuilder parsed = (RangeQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); assertEquals(json, "2015-01-01 00:00:00", parsed.from()); assertEquals(json, "now", parsed.to()); } public void testNamedQueryParsing() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0,\n" + " \"_name\" : \"my_range\"\n" + " }\n" + " }\n" + "}"; assertNotNull(parseQuery(json)); json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0\n" + " },\n" + " \"_name\" : \"my_range\"\n" + " }\n" + "}"; // non strict parsing should accept "_name" on top level assertNotNull(parseQuery(json, ParseFieldMatcher.EMPTY)); // with strict parsing, ParseField will throw exception try { parseQuery(json, ParseFieldMatcher.STRICT); fail("Strict parsing should trigger exception for '_name' on top level"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]")); } } public void testRewriteDateToMatchAll() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.WITHIN; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime shardMinValue = new DateTime(2015, 3, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime shardMaxValue = new DateTime(2015, 9, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = queryShardContext(); QueryBuilder<?> rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.from(), equalTo(null)); assertThat(rewrittenRange.to(), equalTo(null)); } public void testRewriteDateToMatchNone() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.DISJOINT; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = queryShardContext(); QueryBuilder<?> rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); } public void testRewriteDateToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = queryShardContext(); QueryBuilder<?> rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } public void testRewriteOpenBoundsToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; QueryShardContext queryShardContext = queryShardContext(); QueryBuilder<?> rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.utils; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Suppliers; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; /** * Implements versioning used in Cassandra and CQL. * <p> * Note: The following code uses a slight variation from the semver document (http://semver.org). * </p> */ public class CassandraVersion implements Comparable<CassandraVersion> { /** * note: 3rd/4th groups matches to words but only allows number and checked after regexp test. * this is because 3rd and the last can be identical. **/ private static final String VERSION_REGEXP = "(\\d+)\\.(\\d+)(?:\\.(\\w+))?(?:\\.(\\w+))?(\\-[-.\\w]+)?([.+][.\\w]+)?"; private static final Pattern PATTERN_WORDS = Pattern.compile("\\w+"); @VisibleForTesting static final int NO_HOTFIX = -1; private static final Pattern PATTERN = Pattern.compile(VERSION_REGEXP); public static final CassandraVersion CASSANDRA_4_0 = new CassandraVersion("4.0").familyLowerBound.get(); public static final CassandraVersion CASSANDRA_4_0_RC2 = new CassandraVersion(4, 0, 0, NO_HOTFIX, new String[] {"rc2"}, null); public static final CassandraVersion CASSANDRA_3_4 = new CassandraVersion("3.4").familyLowerBound.get(); public final int major; public final int minor; public final int patch; public final int hotfix; public final Supplier<CassandraVersion> familyLowerBound = Suppliers.memoize(this::getFamilyLowerBound); private final String[] preRelease; private final String[] build; @VisibleForTesting CassandraVersion(int major, int minor, int patch, int hotfix, String[] preRelease, String[] build) { this.major = major; this.minor = minor; this.patch = patch; this.hotfix = hotfix; this.preRelease = preRelease; this.build = build; } /** * Parse a version from a string. * * @param version the string to parse * @throws IllegalArgumentException if the provided string does not * represent a version */ public CassandraVersion(String version) { Matcher matcher = PATTERN.matcher(version); if (!matcher.matches()) throw new IllegalArgumentException("Invalid version value: " + version); try { this.major = Integer.parseInt(matcher.group(1)); this.minor = Integer.parseInt(matcher.group(2)); this.patch = matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : 0; this.hotfix = matcher.group(4) != null ? Integer.parseInt(matcher.group(4)) : NO_HOTFIX; String pr = matcher.group(5); String bld = matcher.group(6); this.preRelease = pr == null || pr.isEmpty() ? null : parseIdentifiers(version, pr); this.build = bld == null || bld.isEmpty() ? null : parseIdentifiers(version, bld); } catch (NumberFormatException e) { throw new IllegalArgumentException("Invalid version value: " + version, e); } } private CassandraVersion getFamilyLowerBound() { return patch == 0 && hotfix == NO_HOTFIX && preRelease != null && preRelease.length == 0 && build == null ? this : new CassandraVersion(major, minor, 0, NO_HOTFIX, ArrayUtils.EMPTY_STRING_ARRAY, null); } private static String[] parseIdentifiers(String version, String str) { // Drop initial - or + str = str.substring(1); String[] parts = StringUtils.split(str, ".-"); for (String part : parts) { if (!PATTERN_WORDS.matcher(part).matches()) throw new IllegalArgumentException("Invalid version value: " + version + "; " + part + " not a valid identifier"); } return parts; } public List<String> getPreRelease() { return preRelease != null ? Arrays.asList(preRelease) : Collections.emptyList(); } public List<String> getBuild() { return build != null ? Arrays.asList(build) : Collections.emptyList(); } public int compareTo(CassandraVersion other) { return compareTo(other, false); } public int compareTo(CassandraVersion other, boolean compareToPatchOnly) { if (major < other.major) return -1; if (major > other.major) return 1; if (minor < other.minor) return -1; if (minor > other.minor) return 1; if (patch < other.patch) return -1; if (patch > other.patch) return 1; if (compareToPatchOnly) return 0; int c = Integer.compare(hotfix, other.hotfix); if (c != 0) return c; c = compareIdentifiers(preRelease, other.preRelease, 1); if (c != 0) return c; return compareIdentifiers(build, other.build, -1); } private static int compareIdentifiers(String[] ids1, String[] ids2, int defaultPred) { if (ids1 == null) return ids2 == null ? 0 : defaultPred; else if (ids2 == null) return -defaultPred; int min = Math.min(ids1.length, ids2.length); for (int i = 0; i < min; i++) { Integer i1 = tryParseInt(ids1[i]); Integer i2 = tryParseInt(ids2[i]); if (i1 != null) { // integer have precedence if (i2 == null || i1 < i2) return -1; else if (i1 > i2) return 1; } else { // integer have precedence if (i2 != null) return 1; int c = ids1[i].compareToIgnoreCase(ids2[i]); if (c != 0) return c; } } if (ids1.length < ids2.length) { // If the preRelease is empty it means that it is a family lower bound and that the first identifier is smaller than the second one // (e.g. 4.0.0- < 4.0.0-beta1) if (ids1.length == 0) return -1; // If the difference in length is only due to SNAPSHOT we know that the second identifier is smaller than the first one. // (e.g. 4.0.0-rc1 > 4.0.0-rc1-SNAPSHOT) return (ids2.length - ids1.length) == 1 && ids2[ids2.length - 1].equalsIgnoreCase("SNAPSHOT") ? 1 : -1; } if (ids1.length > ids2.length) { // If the preRelease is empty it means that it is a family lower bound and that the second identifier is smaller than the first one // (e.g. 4.0.0-beta1 > 4.0.0-) if (ids2.length == 0) return 1; // If the difference in length is only due to SNAPSHOT we know that the first identifier is smaller than the second one. // (e.g. 4.0.0-rc1-SNAPSHOT < 4.0.0-rc1) return (ids1.length - ids2.length) == 1 && ids1[ids1.length - 1].equalsIgnoreCase("SNAPSHOT") ? -1 : 1; } return 0; } private static Integer tryParseInt(String str) { try { return Integer.valueOf(str); } catch (NumberFormatException e) { return null; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CassandraVersion that = (CassandraVersion) o; return major == that.major && minor == that.minor && patch == that.patch && hotfix == that.hotfix && Arrays.equals(preRelease, that.preRelease) && Arrays.equals(build, that.build); } @Override public int hashCode() { int result = Objects.hash(major, minor, patch, hotfix); result = 31 * result + Arrays.hashCode(preRelease); result = 31 * result + Arrays.hashCode(build); return result; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(major).append('.').append(minor).append('.').append(patch); if (hotfix != NO_HOTFIX) sb.append('.').append(hotfix); if (preRelease != null) sb.append('-').append(StringUtils.join(preRelease, ".")); if (build != null) sb.append('+').append(StringUtils.join(build, ".")); return sb.toString(); } }
/* * Copyright (c) 2014 VMware, Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of * the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.vmware.vchs.api.samples.services; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import com.vmware.vchs.api.samples.SampleConstants; import com.vmware.vchs.api.samples.services.helper.HttpUtils; import com.vmware.vchs.vms.billabledata.v1.BillableCostsType; import com.vmware.vchs.vms.billabledata.v1.BillableUsageType; /** * This helper class implements API calls to the metering and billing APIs. This particular class * focuses on the metering API calls. */ public class Metering { /** * Gets billable/current usage for the specified L1; Usage is shown only for the duration for * which bill is not yet generated. It will include details like entity details, metric name, * usage, unit, rate, currency and cost If query params are not specified, then it will default * to 'duration=BillToDate' Query params start and end are mutually exclusive with duration * * @param url * the base API url * @param authToken * OAUTH2 token * @param version * version of the API to invoke * @param serviceInstanceId * the service instance ID * @param l1Id * the L1 id * @return an instance of BillableUsageType or null */ public static BillableUsageType getL1BillableUsage(String url, String authToken, String version, String serviceInstanceId, String l1id) { StringBuilder sb = new StringBuilder(url); sb.append(SampleConstants.API_METERING_SERVICE_INSTANCE); sb.append("/"); sb.append(serviceInstanceId); sb.append("/l1"); sb.append(l1id); sb.append("/billable-usage"); HttpResponse response = HttpUtils.httpGet(sb.toString(), authToken, SampleConstants.APPLICATION_XML, null, SampleConstants.CLASS_METERING_BILLABLE_USAGE, version); if (null != response) { // If the response status is 400 - 599 if (response.getStatusLine().getStatusCode() >= HttpStatus.SC_BAD_REQUEST) { // This is here to show when an error occurs, the response should always be // an Error instance Error error = HttpUtils.unmarshal(response.getEntity(), Error.class); // Do something with Error, possibly using Error.getCode() value to // determine the specific reason for the error. } else { if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { BillableUsageType bill = HttpUtils.unmarshal(response.getEntity(), BillableUsageType.class); return bill; } } } if (null != response) { } return null; } /** * Gets billable/current usage for the specified L2; Usage is shown only for durations for which * bill is not yet generated. It will include details like entity details, metric name, usage, * unit, rate, currency and cost If query params are not specified, then it will default to * 'duration=BillToDate' Query params start and end are mutually exclusive with duration * * @param url * the base API url * @param authToken * OAUTH 2 token * @param version * version of the API to invoke * @param serviceInstanceId * the service instance ID * @param l2Id * the L2 id * @return instance of BillableUsageType or null */ public static BillableUsageType getL2BillableUsage(String url, String authToken, String version, String serviceInstanceId, String l2id) { StringBuilder sb = new StringBuilder(url); sb.append(SampleConstants.API_METERING_SERVICE_INSTANCE); sb.append("/"); sb.append(serviceInstanceId); sb.append("/l2"); sb.append(l2id); sb.append("/billable-usage"); HttpResponse response = HttpUtils.httpGet(sb.toString(), authToken, SampleConstants.APPLICATION_XML, null, SampleConstants.CLASS_METERING_BILLABLE_USAGE, version); if (null != response) { // If the response status is 400 - 599 if (response.getStatusLine().getStatusCode() >= HttpStatus.SC_BAD_REQUEST) { // This is here to show when an error occurs, the response should always be // an Error instance Error error = HttpUtils.unmarshal(response.getEntity(), Error.class); // Do something with Error, possibly using Error.getCode() value to // determine the specific reason for the error. } else { if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { BillableUsageType bill = HttpUtils.unmarshal(response.getEntity(), BillableUsageType.class); return bill; } } } return null; } /** * Gets billable/current usage for the specified service instance; Usage is shown only for * durations for which bill is not yet generated. It will include details like entity details, * metric name, usage, unit, rate, currency and cost If query params are not specified, then it * will default to 'duration=BillToDate' Query params start and end are mutually exclusive with * duration * * @param url * the base API url * @param authToken * OAUTH 2 token * @param version * version of the API to invoke * @param serviceInstanceId * the service instance id * @return instance of BillableUsageType or null */ public static BillableUsageType getBillableUsage(String url, String authToken, String version, String serviceInstanceId) { StringBuilder sb = new StringBuilder(url); sb.append(SampleConstants.API_METERING_SERVICE_INSTANCE); sb.append("/"); sb.append(serviceInstanceId); sb.append("/billable-usage"); HttpResponse response = HttpUtils.httpGet(sb.toString(), authToken, SampleConstants.APPLICATION_XML, null, SampleConstants.CLASS_METERING_BILLABLE_USAGE, version); if (null != response) { // If the response status is 400 - 599 if (response.getStatusLine().getStatusCode() >= HttpStatus.SC_BAD_REQUEST) { // This is here to show when an error occurs, the response should always be // an Error instance Error error = HttpUtils.unmarshal(response.getEntity(), Error.class); // Do something with Error, possibly using Error.getCode() value to // determine the specific reason for the error. } else { if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { return HttpUtils.unmarshal(response.getEntity(), BillableUsageType.class); } } } return null; } /** * Represent billable/current value of cost items associated with the specified service group; * Only those cost items are listed which are available after last bill cut/generation date. It * will support the following cost items - Support Cost and Service Credit * * @param url * the base API url * @param authToken * OAUTH 2 token * @param version * version of the API to invoke * @param serviceGroupId * the service group id * @return instance of BillableCostsType or null */ public static BillableCostsType getBillableCosts(String url, String authToken, String version, String serviceGroupId) { StringBuilder sb = new StringBuilder(url); sb.append(SampleConstants.API_METERING_SERVICE_GROUP); sb.append("/"); sb.append(serviceGroupId); sb.append("/billable-costs"); HttpResponse response = HttpUtils.httpGet(sb.toString(), authToken, SampleConstants.APPLICATION_XML, null, SampleConstants.CLASS_METERING_BILLABLE_COSTS, version); if (null != response) { // If the response status is 400 - 599 if (response.getStatusLine().getStatusCode() >= HttpStatus.SC_BAD_REQUEST) { // This is here to show when an error occurs, the response should always be // an Error instance Error error = HttpUtils.unmarshal(response.getEntity(), Error.class); // Do something with Error, possibly using Error.getCode() value to // determine the specific reason for the error. } else { if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { BillableCostsType bill = HttpUtils.unmarshal(response.getEntity(), BillableCostsType.class); return bill; } } } return null; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/test_case.proto package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * Represents a result from running a test case in an agent environment. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.TestCaseResult} */ public final class TestCaseResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.TestCaseResult) TestCaseResultOrBuilder { private static final long serialVersionUID = 0L; // Use TestCaseResult.newBuilder() to construct. private TestCaseResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TestCaseResult() { name_ = ""; environment_ = ""; conversationTurns_ = java.util.Collections.emptyList(); testResult_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TestCaseResult(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TestCaseResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); environment_ = s; break; } case 26: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { conversationTurns_ = new java.util.ArrayList< com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn>(); mutable_bitField0_ |= 0x00000001; } conversationTurns_.add( input.readMessage( com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.parser(), extensionRegistry)); break; } case 32: { int rawValue = input.readEnum(); testResult_ = rawValue; break; } case 42: { com.google.protobuf.Timestamp.Builder subBuilder = null; if (testTime_ != null) { subBuilder = testTime_.toBuilder(); } testTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(testTime_); testTime_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { conversationTurns_ = java.util.Collections.unmodifiableList(conversationTurns_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.class, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENVIRONMENT_FIELD_NUMBER = 2; private volatile java.lang.Object environment_; /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The environment. */ @java.lang.Override public java.lang.String getEnvironment() { java.lang.Object ref = environment_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); environment_ = s; return s; } } /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for environment. */ @java.lang.Override public com.google.protobuf.ByteString getEnvironmentBytes() { java.lang.Object ref = environment_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); environment_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONVERSATION_TURNS_FIELD_NUMBER = 3; private java.util.List<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn> conversationTurns_; /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn> getConversationTurnsList() { return conversationTurns_; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder> getConversationTurnsOrBuilderList() { return conversationTurns_; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ @java.lang.Override public int getConversationTurnsCount() { return conversationTurns_.size(); } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn getConversationTurns(int index) { return conversationTurns_.get(index); } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder getConversationTurnsOrBuilder(int index) { return conversationTurns_.get(index); } public static final int TEST_RESULT_FIELD_NUMBER = 4; private int testResult_; /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @return The enum numeric value on the wire for testResult. */ @java.lang.Override public int getTestResultValue() { return testResult_; } /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @return The testResult. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestResult getTestResult() { @SuppressWarnings("deprecation") com.google.cloud.dialogflow.cx.v3beta1.TestResult result = com.google.cloud.dialogflow.cx.v3beta1.TestResult.valueOf(testResult_); return result == null ? com.google.cloud.dialogflow.cx.v3beta1.TestResult.UNRECOGNIZED : result; } public static final int TEST_TIME_FIELD_NUMBER = 5; private com.google.protobuf.Timestamp testTime_; /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> * * @return Whether the testTime field is set. */ @java.lang.Override public boolean hasTestTime() { return testTime_ != null; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> * * @return The testTime. */ @java.lang.Override public com.google.protobuf.Timestamp getTestTime() { return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getTestTimeOrBuilder() { return getTestTime(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, environment_); } for (int i = 0; i < conversationTurns_.size(); i++) { output.writeMessage(3, conversationTurns_.get(i)); } if (testResult_ != com.google.cloud.dialogflow.cx.v3beta1.TestResult.TEST_RESULT_UNSPECIFIED.getNumber()) { output.writeEnum(4, testResult_); } if (testTime_ != null) { output.writeMessage(5, getTestTime()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, environment_); } for (int i = 0; i < conversationTurns_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, conversationTurns_.get(i)); } if (testResult_ != com.google.cloud.dialogflow.cx.v3beta1.TestResult.TEST_RESULT_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, testResult_); } if (testTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getTestTime()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult other = (com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult) obj; if (!getName().equals(other.getName())) return false; if (!getEnvironment().equals(other.getEnvironment())) return false; if (!getConversationTurnsList().equals(other.getConversationTurnsList())) return false; if (testResult_ != other.testResult_) return false; if (hasTestTime() != other.hasTestTime()) return false; if (hasTestTime()) { if (!getTestTime().equals(other.getTestTime())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER; hash = (53 * hash) + getEnvironment().hashCode(); if (getConversationTurnsCount() > 0) { hash = (37 * hash) + CONVERSATION_TURNS_FIELD_NUMBER; hash = (53 * hash) + getConversationTurnsList().hashCode(); } hash = (37 * hash) + TEST_RESULT_FIELD_NUMBER; hash = (53 * hash) + testResult_; if (hasTestTime()) { hash = (37 * hash) + TEST_TIME_FIELD_NUMBER; hash = (53 * hash) + getTestTime().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents a result from running a test case in an agent environment. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.TestCaseResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.TestCaseResult) com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.class, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getConversationTurnsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; environment_ = ""; if (conversationTurnsBuilder_ == null) { conversationTurns_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { conversationTurnsBuilder_.clear(); } testResult_ = 0; if (testTimeBuilder_ == null) { testTime_ = null; } else { testTime_ = null; testTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_TestCaseResult_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult build() { com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult result = new com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult(this); int from_bitField0_ = bitField0_; result.name_ = name_; result.environment_ = environment_; if (conversationTurnsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { conversationTurns_ = java.util.Collections.unmodifiableList(conversationTurns_); bitField0_ = (bitField0_ & ~0x00000001); } result.conversationTurns_ = conversationTurns_; } else { result.conversationTurns_ = conversationTurnsBuilder_.build(); } result.testResult_ = testResult_; if (testTimeBuilder_ == null) { result.testTime_ = testTime_; } else { result.testTime_ = testTimeBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (!other.getEnvironment().isEmpty()) { environment_ = other.environment_; onChanged(); } if (conversationTurnsBuilder_ == null) { if (!other.conversationTurns_.isEmpty()) { if (conversationTurns_.isEmpty()) { conversationTurns_ = other.conversationTurns_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureConversationTurnsIsMutable(); conversationTurns_.addAll(other.conversationTurns_); } onChanged(); } } else { if (!other.conversationTurns_.isEmpty()) { if (conversationTurnsBuilder_.isEmpty()) { conversationTurnsBuilder_.dispose(); conversationTurnsBuilder_ = null; conversationTurns_ = other.conversationTurns_; bitField0_ = (bitField0_ & ~0x00000001); conversationTurnsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConversationTurnsFieldBuilder() : null; } else { conversationTurnsBuilder_.addAllMessages(other.conversationTurns_); } } } if (other.testResult_ != 0) { setTestResultValue(other.getTestResultValue()); } if (other.hasTestTime()) { mergeTestTime(other.getTestTime()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * The resource name for the test case result. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;/testCases/ * &lt;TestCase ID&gt;/results/&lt;TestCaseResult ID&gt;`. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private java.lang.Object environment_ = ""; /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The environment. */ public java.lang.String getEnvironment() { java.lang.Object ref = environment_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); environment_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for environment. */ public com.google.protobuf.ByteString getEnvironmentBytes() { java.lang.Object ref = environment_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); environment_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The environment to set. * @return This builder for chaining. */ public Builder setEnvironment(java.lang.String value) { if (value == null) { throw new NullPointerException(); } environment_ = value; onChanged(); return this; } /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearEnvironment() { environment_ = getDefaultInstance().getEnvironment(); onChanged(); return this; } /** * * * <pre> * Environment where the test was run. If not set, it indicates the draft * environment. * </pre> * * <code>string environment = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for environment to set. * @return This builder for chaining. */ public Builder setEnvironmentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); environment_ = value; onChanged(); return this; } private java.util.List<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn> conversationTurns_ = java.util.Collections.emptyList(); private void ensureConversationTurnsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { conversationTurns_ = new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn>( conversationTurns_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder> conversationTurnsBuilder_; /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn> getConversationTurnsList() { if (conversationTurnsBuilder_ == null) { return java.util.Collections.unmodifiableList(conversationTurns_); } else { return conversationTurnsBuilder_.getMessageList(); } } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public int getConversationTurnsCount() { if (conversationTurnsBuilder_ == null) { return conversationTurns_.size(); } else { return conversationTurnsBuilder_.getCount(); } } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn getConversationTurns(int index) { if (conversationTurnsBuilder_ == null) { return conversationTurns_.get(index); } else { return conversationTurnsBuilder_.getMessage(index); } } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder setConversationTurns( int index, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn value) { if (conversationTurnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversationTurnsIsMutable(); conversationTurns_.set(index, value); onChanged(); } else { conversationTurnsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder setConversationTurns( int index, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder builderForValue) { if (conversationTurnsBuilder_ == null) { ensureConversationTurnsIsMutable(); conversationTurns_.set(index, builderForValue.build()); onChanged(); } else { conversationTurnsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder addConversationTurns( com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn value) { if (conversationTurnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversationTurnsIsMutable(); conversationTurns_.add(value); onChanged(); } else { conversationTurnsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder addConversationTurns( int index, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn value) { if (conversationTurnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConversationTurnsIsMutable(); conversationTurns_.add(index, value); onChanged(); } else { conversationTurnsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder addConversationTurns( com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder builderForValue) { if (conversationTurnsBuilder_ == null) { ensureConversationTurnsIsMutable(); conversationTurns_.add(builderForValue.build()); onChanged(); } else { conversationTurnsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder addConversationTurns( int index, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder builderForValue) { if (conversationTurnsBuilder_ == null) { ensureConversationTurnsIsMutable(); conversationTurns_.add(index, builderForValue.build()); onChanged(); } else { conversationTurnsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder addAllConversationTurns( java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn> values) { if (conversationTurnsBuilder_ == null) { ensureConversationTurnsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, conversationTurns_); onChanged(); } else { conversationTurnsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder clearConversationTurns() { if (conversationTurnsBuilder_ == null) { conversationTurns_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { conversationTurnsBuilder_.clear(); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public Builder removeConversationTurns(int index) { if (conversationTurnsBuilder_ == null) { ensureConversationTurnsIsMutable(); conversationTurns_.remove(index); onChanged(); } else { conversationTurnsBuilder_.remove(index); } return this; } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder getConversationTurnsBuilder(int index) { return getConversationTurnsFieldBuilder().getBuilder(index); } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder getConversationTurnsOrBuilder(int index) { if (conversationTurnsBuilder_ == null) { return conversationTurns_.get(index); } else { return conversationTurnsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public java.util.List< ? extends com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder> getConversationTurnsOrBuilderList() { if (conversationTurnsBuilder_ != null) { return conversationTurnsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(conversationTurns_); } } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder addConversationTurnsBuilder() { return getConversationTurnsFieldBuilder() .addBuilder(com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.getDefaultInstance()); } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder addConversationTurnsBuilder(int index) { return getConversationTurnsFieldBuilder() .addBuilder( index, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.getDefaultInstance()); } /** * * * <pre> * The conversation turns uttered during the test case replay in chronological * order. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.ConversationTurn conversation_turns = 3; * </code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder> getConversationTurnsBuilderList() { return getConversationTurnsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder> getConversationTurnsFieldBuilder() { if (conversationTurnsBuilder_ == null) { conversationTurnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurn.Builder, com.google.cloud.dialogflow.cx.v3beta1.ConversationTurnOrBuilder>( conversationTurns_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); conversationTurns_ = null; } return conversationTurnsBuilder_; } private int testResult_ = 0; /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @return The enum numeric value on the wire for testResult. */ @java.lang.Override public int getTestResultValue() { return testResult_; } /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @param value The enum numeric value on the wire for testResult to set. * @return This builder for chaining. */ public Builder setTestResultValue(int value) { testResult_ = value; onChanged(); return this; } /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @return The testResult. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestResult getTestResult() { @SuppressWarnings("deprecation") com.google.cloud.dialogflow.cx.v3beta1.TestResult result = com.google.cloud.dialogflow.cx.v3beta1.TestResult.valueOf(testResult_); return result == null ? com.google.cloud.dialogflow.cx.v3beta1.TestResult.UNRECOGNIZED : result; } /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @param value The testResult to set. * @return This builder for chaining. */ public Builder setTestResult(com.google.cloud.dialogflow.cx.v3beta1.TestResult value) { if (value == null) { throw new NullPointerException(); } testResult_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Whether the test case passed in the agent environment. * </pre> * * <code>.google.cloud.dialogflow.cx.v3beta1.TestResult test_result = 4;</code> * * @return This builder for chaining. */ public Builder clearTestResult() { testResult_ = 0; onChanged(); return this; } private com.google.protobuf.Timestamp testTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> testTimeBuilder_; /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> * * @return Whether the testTime field is set. */ public boolean hasTestTime() { return testTimeBuilder_ != null || testTime_ != null; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> * * @return The testTime. */ public com.google.protobuf.Timestamp getTestTime() { if (testTimeBuilder_ == null) { return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_; } else { return testTimeBuilder_.getMessage(); } } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public Builder setTestTime(com.google.protobuf.Timestamp value) { if (testTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } testTime_ = value; onChanged(); } else { testTimeBuilder_.setMessage(value); } return this; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public Builder setTestTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (testTimeBuilder_ == null) { testTime_ = builderForValue.build(); onChanged(); } else { testTimeBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public Builder mergeTestTime(com.google.protobuf.Timestamp value) { if (testTimeBuilder_ == null) { if (testTime_ != null) { testTime_ = com.google.protobuf.Timestamp.newBuilder(testTime_).mergeFrom(value).buildPartial(); } else { testTime_ = value; } onChanged(); } else { testTimeBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public Builder clearTestTime() { if (testTimeBuilder_ == null) { testTime_ = null; onChanged(); } else { testTime_ = null; testTimeBuilder_ = null; } return this; } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public com.google.protobuf.Timestamp.Builder getTestTimeBuilder() { onChanged(); return getTestTimeFieldBuilder().getBuilder(); } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ public com.google.protobuf.TimestampOrBuilder getTestTimeOrBuilder() { if (testTimeBuilder_ != null) { return testTimeBuilder_.getMessageOrBuilder(); } else { return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_; } } /** * * * <pre> * The time that the test was run. * </pre> * * <code>.google.protobuf.Timestamp test_time = 5;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getTestTimeFieldBuilder() { if (testTimeBuilder_ == null) { testTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getTestTime(), getParentForChildren(), isClean()); testTime_ = null; } return testTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.TestCaseResult) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.TestCaseResult) private static final com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult(); } public static com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TestCaseResult> PARSER = new com.google.protobuf.AbstractParser<TestCaseResult>() { @java.lang.Override public TestCaseResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TestCaseResult(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TestCaseResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TestCaseResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticloadbalancing.model; import java.io.Serializable; /** * <p> * Information about the configuration of a back-end server. * </p> */ public class BackendServerDescription implements Serializable, Cloneable { /** * The port on which the back-end server is listening. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 65535<br/> */ private Integer instancePort; /** * The names of the policies enabled for the back-end server. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> policyNames; /** * The port on which the back-end server is listening. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 65535<br/> * * @return The port on which the back-end server is listening. */ public Integer getInstancePort() { return instancePort; } /** * The port on which the back-end server is listening. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 65535<br/> * * @param instancePort The port on which the back-end server is listening. */ public void setInstancePort(Integer instancePort) { this.instancePort = instancePort; } /** * The port on which the back-end server is listening. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 65535<br/> * * @param instancePort The port on which the back-end server is listening. * * @return A reference to this updated object so that method calls can be chained * together. */ public BackendServerDescription withInstancePort(Integer instancePort) { this.instancePort = instancePort; return this; } /** * The names of the policies enabled for the back-end server. * * @return The names of the policies enabled for the back-end server. */ public java.util.List<String> getPolicyNames() { if (policyNames == null) { policyNames = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); policyNames.setAutoConstruct(true); } return policyNames; } /** * The names of the policies enabled for the back-end server. * * @param policyNames The names of the policies enabled for the back-end server. */ public void setPolicyNames(java.util.Collection<String> policyNames) { if (policyNames == null) { this.policyNames = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> policyNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(policyNames.size()); policyNamesCopy.addAll(policyNames); this.policyNames = policyNamesCopy; } /** * The names of the policies enabled for the back-end server. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setPolicyNames(java.util.Collection)} or {@link * #withPolicyNames(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param policyNames The names of the policies enabled for the back-end server. * * @return A reference to this updated object so that method calls can be chained * together. */ public BackendServerDescription withPolicyNames(String... policyNames) { if (getPolicyNames() == null) setPolicyNames(new java.util.ArrayList<String>(policyNames.length)); for (String value : policyNames) { getPolicyNames().add(value); } return this; } /** * The names of the policies enabled for the back-end server. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param policyNames The names of the policies enabled for the back-end server. * * @return A reference to this updated object so that method calls can be chained * together. */ public BackendServerDescription withPolicyNames(java.util.Collection<String> policyNames) { if (policyNames == null) { this.policyNames = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> policyNamesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(policyNames.size()); policyNamesCopy.addAll(policyNames); this.policyNames = policyNamesCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstancePort() != null) sb.append("InstancePort: " + getInstancePort() + ","); if (getPolicyNames() != null) sb.append("PolicyNames: " + getPolicyNames() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstancePort() == null) ? 0 : getInstancePort().hashCode()); hashCode = prime * hashCode + ((getPolicyNames() == null) ? 0 : getPolicyNames().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BackendServerDescription == false) return false; BackendServerDescription other = (BackendServerDescription)obj; if (other.getInstancePort() == null ^ this.getInstancePort() == null) return false; if (other.getInstancePort() != null && other.getInstancePort().equals(this.getInstancePort()) == false) return false; if (other.getPolicyNames() == null ^ this.getPolicyNames() == null) return false; if (other.getPolicyNames() != null && other.getPolicyNames().equals(this.getPolicyNames()) == false) return false; return true; } @Override public BackendServerDescription clone() { try { return (BackendServerDescription) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package sma.pacman.game.character; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import sma.pacman.game.Direction; import sma.pacman.game.Board; import sma.pacman.game.graphics.Animation; import sma.pacman.util.PointUtils; import sma.pacman.util.ResourceUtils; import java.awt.*; import java.io.IOException; import java.util.*; public class Character { public interface Listener { void nextMoveSet(Direction move); } private static final Logger logger = LogManager.getLogger(Character.class.getName()); private static final Float SPAWN_BLINK_INTERVAL = 0.02f; private Point position; private Direction orientation; private Animation currentAnimation; private Boolean alive; private Float lifeTime; private Integer aliveRounds; private Boolean spawnProtection = false; protected Boolean heroBoost = false; protected Set<CharacterEvent> events = new HashSet<CharacterEvent>(); private Map<Direction, Animation> animations = new HashMap<Direction, Animation>(); private Direction nextMove; private Integer score = 0; private String name; private java.util.List<Listener> listeners = new ArrayList<Listener>(); public Character(String name) { alive = false; this.name = name; } public void addListener(Listener listener) { listeners.add(listener); } public void spawn(Point p) { alive = true; lifeTime = 0f; aliveRounds = 0; position = p; orientation = Direction.RIGHT; currentAnimation = animations.get(orientation); setSpawnProtection(true); events.add(CharacterEvent.SPAWN); } protected void loadAnimations(String name, int frameCount, float frameDelay) { logger.trace("Loading character animations resource {}", name); animations.clear(); currentAnimation = null; try { Image[] images = new Image[frameCount]; for(Direction direction: Direction.values()) { for(int i = 0; i < frameCount; ++i) { String resourceName = String.format("/images/character/%s/%s/%d.png", name, direction, (i+ 1)); images[i] = ResourceUtils.getImage(resourceName); } Animation animation = new Animation(images, frameDelay); animation.start(); animations.put(direction, animation); } currentAnimation = animations.get(orientation); } catch (IllegalArgumentException e) { logger.error("Failed to set tile image from resource {}", name, e); } catch (IOException e) { logger.error("Failed to set tile image from resource {}", name, e); } } public void update(float elapsedTime) { if(alive) { currentAnimation.update(elapsedTime); lifeTime += elapsedTime; } } public void draw(Graphics g) { Boolean visible = false; if(alive) { if(hasSpawnProtection()) { visible = (((int)(lifeTime / SPAWN_BLINK_INTERVAL) % 2) != 0); } else { visible = true; } } if(visible) { int tile_width = Board.TILE_WIDTH; int tile_height = Board.TILE_HEIGHT; int character_width = Board.CHARACTER_WIDTH; int character_height = Board.CHARACTER_HEIGHT; int draw_x = position.x * tile_width - (character_width - tile_width)/2; int draw_y = position.y * tile_height - (character_height - tile_height)/2; if(currentAnimation != null) { Image image = currentAnimation.getSprite(); g.drawImage(image, draw_x, draw_y, character_width, character_height, null); } } } public Integer getAliveRounds() { return aliveRounds; } public void setSpawnProtection(Boolean state) { if(state) { events.add(CharacterEvent.PROTECTION_ON); } else { events.add(CharacterEvent.PROTECTION_OFF); } spawnProtection = state; } public Boolean hasSpawnProtection() { return spawnProtection; } public Point getPosition() { return position; } public void setPosition(Point position) { this.position = position; } public void setOrientation(Direction orientation) { this.orientation = orientation; currentAnimation = animations.get(orientation); } public void setNextMove(Direction nextMove) { this.nextMove = nextMove; fireNextMoveSet(); } private void fireNextMoveSet() { for (Listener listener: listeners) { listener.nextMoveSet(nextMove); } } public Direction getNextMove() { return nextMove; } public Boolean hasNextMove() { return (nextMove != null); } public void doNextMove(Boolean blocked) { if(!blocked) { Point newPosition = new Point(position); PointUtils.translateTo(newPosition, nextMove); setPosition(newPosition); } setOrientation(nextMove); nextMove = null; events.clear(); aliveRounds++; } public void setHeroBoost(Boolean state) { if(state) { events.add(CharacterEvent.BOOST_ON); } else { events.add(CharacterEvent.BOOST_OFF); } heroBoost = state; } public Boolean isAlive() { return alive; } public void die() { events.add(CharacterEvent.DEATH); alive = false; } public void addScore(Integer amount) { score += amount; events.add(CharacterEvent.SCORE_UP); } public Integer getScore() { return score; } public String getName() { return name; } public Set<CharacterEvent> getEvents() { return events; } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.cluster; import com.navercorp.pinpoint.common.util.NetUtils; import com.navercorp.pinpoint.rpc.client.PinpointClient; import com.navercorp.pinpoint.rpc.client.PinpointClientFactory; import com.navercorp.pinpoint.rpc.client.SimpleMessageListener; import com.navercorp.pinpoint.test.client.TestPinpointClient; import com.navercorp.pinpoint.test.utils.TestAwaitTaskUtils; import com.navercorp.pinpoint.test.utils.TestAwaitUtils; import com.navercorp.pinpoint.web.cluster.connection.ClusterConnectionManager; import com.navercorp.pinpoint.web.cluster.zookeeper.ZookeeperClusterDataManager; import com.navercorp.pinpoint.web.config.WebConfig; import com.navercorp.pinpoint.web.util.PinpointWebTestUtils; import org.apache.curator.test.TestingServer; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.SocketUtils; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.List; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * @author Taejin Koo */ public class ClusterTest { private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTest.class); private static final Charset UTF_8_CHARSET = StandardCharsets.UTF_8; // some tests may fail when executed in local environment // when failures happen, you have to copy pinpoint-web.properties of resource-test to resource-local. Tests will succeed. private static TestAwaitUtils awaitUtils = new TestAwaitUtils(100, 10000); private static final String DEFAULT_IP = PinpointWebTestUtils.getRepresentationLocalV4Ip(); static ClusterConnectionManager clusterConnectionManager; static ZookeeperClusterDataManager clusterDataManager; private static String CLUSTER_NODE_PATH; private static int acceptorPort; private static int zookeeperPort; private static String acceptorAddress; private static String zookeeperAddress; private static TestingServer ts = null; @BeforeClass public static void setUp() throws Exception { acceptorPort = SocketUtils.findAvailableTcpPort(28000); acceptorAddress = DEFAULT_IP + ":" + acceptorPort; zookeeperPort = SocketUtils.findAvailableTcpPort(acceptorPort + 1); zookeeperAddress = DEFAULT_IP + ":" + zookeeperPort; ts = createZookeeperServer(zookeeperPort); CLUSTER_NODE_PATH = "/pinpoint-cluster/web/" + acceptorAddress; LOGGER.debug("CLUSTER_NODE_PATH:{}", CLUSTER_NODE_PATH); WebConfig config = mock(WebConfig.class); when(config.isClusterEnable()).thenReturn(true); when(config.getClusterTcpPort()).thenReturn(acceptorPort); when(config.getClusterZookeeperAddress()).thenReturn(zookeeperAddress); when(config.getClusterZookeeperRetryInterval()).thenReturn(60000); when(config.getClusterZookeeperSessionTimeout()).thenReturn(3000); clusterConnectionManager = new ClusterConnectionManager(config); clusterConnectionManager.start(); clusterDataManager = new ZookeeperClusterDataManager(config); clusterDataManager.start(); List<String> localV4IpList = NetUtils.getLocalV4IpList(); clusterDataManager.registerWebCluster(acceptorAddress, convertIpListToBytes(localV4IpList, "\r\n")); } @AfterClass public static void tearDown() throws Exception { closeZookeeperServer(ts); try { clusterDataManager.stop(); } catch (Exception ignore) { } try { clusterConnectionManager.stop(); } catch (Exception ignore) { } } private static TestingServer createZookeeperServer(int port) throws Exception { TestingServer mockZookeeperServer = new TestingServer(port); mockZookeeperServer.start(); return mockZookeeperServer; } private static void closeZookeeperServer(TestingServer mockZookeeperServer) throws Exception { try { if (mockZookeeperServer != null) { mockZookeeperServer.close(); } } catch (Exception e) { e.printStackTrace(); } } private static byte[] convertIpListToBytes(List<String> ipList, String delimiter) { StringBuilder stringBuilder = new StringBuilder(); Iterator<String> ipIterator = ipList.iterator(); while (ipIterator.hasNext()) { String eachIp = ipIterator.next(); stringBuilder.append(eachIp); if (ipIterator.hasNext()) { stringBuilder.append(delimiter); } } return stringBuilder.toString().getBytes(UTF_8_CHARSET); } @After public void after() throws Exception { ts.restart(); } @Test public void clusterTest1() throws Exception { ZooKeeper zookeeper = new ZooKeeper(zookeeperAddress, 5000, null); awaitZookeeperConnected(zookeeper); if (zookeeper != null) { zookeeper.close(); } } @Test public void clusterTest2() throws Exception { ZooKeeper zookeeper = new ZooKeeper(zookeeperAddress, 5000, null); awaitZookeeperConnected(zookeeper); ts.stop(); awaitZookeeperDisconnected(zookeeper); try { zookeeper.getData(CLUSTER_NODE_PATH, null, null); Assert.fail(); } catch (KeeperException e) { Assert.assertEquals(KeeperException.Code.CONNECTIONLOSS, e.code()); // TODO Auto-generated catch block e.printStackTrace(); } ts.restart(); getNodeAndCompareContents(zookeeper); if (zookeeper != null) { zookeeper.close(); } } @Test public void clusterTest3() throws Exception { ZooKeeper zookeeper = null; TestPinpointClient testPinpointClient = new TestPinpointClient(SimpleMessageListener.INSTANCE); try { zookeeper = new ZooKeeper(zookeeperAddress, 5000, null); awaitZookeeperConnected(zookeeper); Assert.assertEquals(0, clusterConnectionManager.getClusterList().size()); testPinpointClient.connect(DEFAULT_IP, acceptorPort); awaitPinpointClientConnected(clusterConnectionManager); Assert.assertEquals(1, clusterConnectionManager.getClusterList().size()); } finally { testPinpointClient.closeAll(); if (zookeeper != null) { zookeeper.close(); } } } private void awaitZookeeperConnected(final ZooKeeper zookeeper) { boolean pass = awaitUtils.await(new TestAwaitTaskUtils() { @Override public boolean checkCompleted() { return getNodeAndCompareContents0(zookeeper); } }); Assert.assertTrue(pass); } private void awaitZookeeperDisconnected(final ZooKeeper zookeeper) { boolean pass = awaitUtils.await(new TestAwaitTaskUtils() { @Override public boolean checkCompleted() { return !getNodeAndCompareContents0(zookeeper); } }); Assert.assertTrue(pass); } private void awaitPinpointClientConnected(final ClusterConnectionManager connectionManager) { boolean pass = awaitUtils.await(new TestAwaitTaskUtils() { @Override public boolean checkCompleted() { return !connectionManager.getClusterList().isEmpty(); } }); Assert.assertTrue(pass); } private void getNodeAndCompareContents(ZooKeeper zookeeper) throws KeeperException, InterruptedException { LOGGER.debug("getNodeAndCompareContents() {}", CLUSTER_NODE_PATH); byte[] contents = zookeeper.getData(CLUSTER_NODE_PATH, null, null); String[] registeredIpList = new String(contents).split("\r\n"); List<String> ipList = NetUtils.getLocalV4IpList(); Assert.assertEquals(registeredIpList.length, ipList.size()); for (String ip : registeredIpList) { Assert.assertTrue(ipList.contains(ip)); } } private boolean getNodeAndCompareContents0(ZooKeeper zookeeper) { try { LOGGER.debug("getNodeAndCompareContents() {}", CLUSTER_NODE_PATH); byte[] contents = zookeeper.getData(CLUSTER_NODE_PATH, null, null); if (contents == null) { contents = new byte[0]; } String[] registeredIplist = new String(contents).split("\r\n"); List<String> ipList = NetUtils.getLocalV4IpList(); if (registeredIplist.length != ipList.size()) { return false; } for (String ip : registeredIplist) { if (!ipList.contains(ip)) { return false; } } return true; } catch (Exception e) { LOGGER.warn(e.getMessage(), e); } return false; } private void closePinpointSocket(PinpointClientFactory clientFactory, PinpointClient client) { if (client != null) { client.close(); } if (clientFactory != null) { clientFactory.release(); } } }
// Copyright (c) 2006 Damien Miller <djm@mindrot.org> // // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above // copyright notice and this permission notice appear in all copies. // // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR // ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN // ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF // OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. package jbcrypt; import java.io.UnsupportedEncodingException; import java.security.SecureRandom; /** * BCrypt implements OpenBSD-style Blowfish password hashing using * the scheme described in "A Future-Adaptable Password Scheme" by * Niels Provos and David Mazieres. * <p> * This password hashing system tries to thwart off-line password * cracking using a computationally-intensive hashing algorithm, * based on Bruce Schneier's Blowfish cipher. The work factor of * the algorithm is parameterised, so it can be increased as * computers get faster. * <p> * Usage is really simple. To hash a password for the first time, * call the hashpw method with a random salt, like this: * <p> * <code> * String pw_hash = BCrypt.hashpw(plain_password, BCrypt.gensalt()); <br /> * </code> * <p> * To check whether a plaintext password matches one that has been * hashed previously, use the checkpw method: * <p> * <code> * if (BCrypt.checkpw(candidate_password, stored_hash))<br /> * &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It matches");<br /> * else<br /> * &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It does not match");<br /> * </code> * <p> * The gensalt() method takes an optional parameter (log_rounds) * that determines the computational complexity of the hashing: * <p> * <code> * String strong_salt = BCrypt.gensalt(10)<br /> * String stronger_salt = BCrypt.gensalt(12)<br /> * </code> * <p> * The amount of work increases exponentially (2**log_rounds), so * each increment is twice as much work. The default log_rounds is * 10, and the valid range is 4 to 30. * * @author Damien Miller * @version 0.2 */ public class BCrypt { // BCrypt parameters private static final int GENSALT_DEFAULT_LOG2_ROUNDS = 10; private static final int BCRYPT_SALT_LEN = 16; // Blowfish parameters private static final int BLOWFISH_NUM_ROUNDS = 16; // Initial contents of key schedule private static final int P_orig[] = { 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b }; private static final int S_orig[] = { 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6 }; // bcrypt IV: "OrpheanBeholderScryDoubt". The C implementation calls // this "ciphertext", but it is really plaintext or an IV. We keep // the name to make code comparison easier. static private final int bf_crypt_ciphertext[] = { 0x4f727068, 0x65616e42, 0x65686f6c, 0x64657253, 0x63727944, 0x6f756274 }; // Table for Base64 encoding static private final char base64_code[] = { '.', '/', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; // Table for Base64 decoding static private final byte index_64[] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, -1, -1, -1, -1, -1, -1, -1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, -1, -1, -1, -1, -1, -1, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, -1, -1, -1, -1, -1 }; // Expanded Blowfish key private int P[]; private int S[]; /** * Encode a byte array using bcrypt's slightly-modified base64 * encoding scheme. Note that this is *not* compatible with * the standard MIME-base64 encoding. * * @param d the byte array to encode * @param len the number of bytes to encode * @return base64-encoded string * @exception IllegalArgumentException if the length is invalid */ private static String encode_base64(byte d[], int len) throws IllegalArgumentException { int off = 0; StringBuffer rs = new StringBuffer(); int c1, c2; if (len <= 0 || len > d.length) throw new IllegalArgumentException ("Invalid len"); while (off < len) { c1 = d[off++] & 0xff; rs.append(base64_code[(c1 >> 2) & 0x3f]); c1 = (c1 & 0x03) << 4; if (off >= len) { rs.append(base64_code[c1 & 0x3f]); break; } c2 = d[off++] & 0xff; c1 |= (c2 >> 4) & 0x0f; rs.append(base64_code[c1 & 0x3f]); c1 = (c2 & 0x0f) << 2; if (off >= len) { rs.append(base64_code[c1 & 0x3f]); break; } c2 = d[off++] & 0xff; c1 |= (c2 >> 6) & 0x03; rs.append(base64_code[c1 & 0x3f]); rs.append(base64_code[c2 & 0x3f]); } return rs.toString(); } /** * Look up the 3 bits base64-encoded by the specified character, * range-checking againt conversion table * @param x the base64-encoded value * @return the decoded value of x */ private static byte char64(char x) { if ((int)x < 0 || (int)x > index_64.length) return -1; return index_64[(int)x]; } /** * Decode a string encoded using bcrypt's base64 scheme to a * byte array. Note that this is *not* compatible with * the standard MIME-base64 encoding. * @param s the string to decode * @param maxolen the maximum number of bytes to decode * @return an array containing the decoded bytes * @throws IllegalArgumentException if maxolen is invalid */ private static byte[] decode_base64(String s, int maxolen) throws IllegalArgumentException { StringBuffer rs = new StringBuffer(); int off = 0, slen = s.length(), olen = 0; byte ret[]; byte c1, c2, c3, c4, o; if (maxolen <= 0) throw new IllegalArgumentException ("Invalid maxolen"); while (off < slen - 1 && olen < maxolen) { c1 = char64(s.charAt(off++)); c2 = char64(s.charAt(off++)); if (c1 == -1 || c2 == -1) break; o = (byte)(c1 << 2); o |= (c2 & 0x30) >> 4; rs.append((char)o); if (++olen >= maxolen || off >= slen) break; c3 = char64(s.charAt(off++)); if (c3 == -1) break; o = (byte)((c2 & 0x0f) << 4); o |= (c3 & 0x3c) >> 2; rs.append((char)o); if (++olen >= maxolen || off >= slen) break; c4 = char64(s.charAt(off++)); o = (byte)((c3 & 0x03) << 6); o |= c4; rs.append((char)o); ++olen; } ret = new byte[olen]; for (off = 0; off < olen; off++) ret[off] = (byte)rs.charAt(off); return ret; } /** * Blowfish encipher a single 64-bit block encoded as * two 32-bit halves * @param lr an array containing the two 32-bit half blocks * @param off the position in the array of the blocks */ private final void encipher(int lr[], int off) { int i, n, l = lr[off], r = lr[off + 1]; l ^= P[0]; for (i = 0; i <= BLOWFISH_NUM_ROUNDS - 2;) { // Feistel substitution on left word n = S[(l >> 24) & 0xff]; n += S[0x100 | ((l >> 16) & 0xff)]; n ^= S[0x200 | ((l >> 8) & 0xff)]; n += S[0x300 | (l & 0xff)]; r ^= n ^ P[++i]; // Feistel substitution on right word n = S[(r >> 24) & 0xff]; n += S[0x100 | ((r >> 16) & 0xff)]; n ^= S[0x200 | ((r >> 8) & 0xff)]; n += S[0x300 | (r & 0xff)]; l ^= n ^ P[++i]; } lr[off] = r ^ P[BLOWFISH_NUM_ROUNDS + 1]; lr[off + 1] = l; } /** * Cycically extract a word of key material * @param data the string to extract the data from * @param offp a "pointer" (as a one-entry array) to the * current offset into data * @return the next word of material from data */ private static int streamtoword(byte data[], int offp[]) { int i; int word = 0; int off = offp[0]; for (i = 0; i < 4; i++) { word = (word << 8) | (data[off] & 0xff); off = (off + 1) % data.length; } offp[0] = off; return word; } /** * Initialise the Blowfish key schedule */ private void init_key() { P = (int[])P_orig.clone(); S = (int[])S_orig.clone(); } /** * Key the Blowfish cipher * @param key an array containing the key */ private void key(byte key[]) { int i; int koffp[] = { 0 }; int lr[] = { 0, 0 }; int plen = P.length, slen = S.length; for (i = 0; i < plen; i++) P[i] = P[i] ^ streamtoword(key, koffp); for (i = 0; i < plen; i += 2) { encipher(lr, 0); P[i] = lr[0]; P[i + 1] = lr[1]; } for (i = 0; i < slen; i += 2) { encipher(lr, 0); S[i] = lr[0]; S[i + 1] = lr[1]; } } /** * Perform the "enhanced key schedule" step described by * Provos and Mazieres in "A Future-Adaptable Password Scheme" * http://www.openbsd.org/papers/bcrypt-paper.ps * @param data salt information * @param key password information */ private void ekskey(byte data[], byte key[]) { int i; int koffp[] = { 0 }, doffp[] = { 0 }; int lr[] = { 0, 0 }; int plen = P.length, slen = S.length; for (i = 0; i < plen; i++) P[i] = P[i] ^ streamtoword(key, koffp); for (i = 0; i < plen; i += 2) { lr[0] ^= streamtoword(data, doffp); lr[1] ^= streamtoword(data, doffp); encipher(lr, 0); P[i] = lr[0]; P[i + 1] = lr[1]; } for (i = 0; i < slen; i += 2) { lr[0] ^= streamtoword(data, doffp); lr[1] ^= streamtoword(data, doffp); encipher(lr, 0); S[i] = lr[0]; S[i + 1] = lr[1]; } } /** * Perform the central password hashing step in the * bcrypt scheme * @param password the password to hash * @param salt the binary salt to hash with the password * @param log_rounds the binary logarithm of the number * of rounds of hashing to apply * @param cdata the plaintext to encrypt * @return an array containing the binary hashed password */ public byte[] crypt_raw(byte password[], byte salt[], int log_rounds, int cdata[]) { int rounds, i, j; int clen = cdata.length; byte ret[]; if (log_rounds < 4 || log_rounds > 30) throw new IllegalArgumentException ("Bad number of rounds"); rounds = 1 << log_rounds; if (salt.length != BCRYPT_SALT_LEN) throw new IllegalArgumentException ("Bad salt length"); init_key(); ekskey(salt, password); for (i = 0; i != rounds; i++) { key(password); key(salt); } for (i = 0; i < 64; i++) { for (j = 0; j < (clen >> 1); j++) encipher(cdata, j << 1); } ret = new byte[clen * 4]; for (i = 0, j = 0; i < clen; i++) { ret[j++] = (byte)((cdata[i] >> 24) & 0xff); ret[j++] = (byte)((cdata[i] >> 16) & 0xff); ret[j++] = (byte)((cdata[i] >> 8) & 0xff); ret[j++] = (byte)(cdata[i] & 0xff); } return ret; } /** * Hash a password using the OpenBSD bcrypt scheme * @param password the password to hash * @param salt the salt to hash with (perhaps generated * using BCrypt.gensalt) * @return the hashed password */ public static String hashpw(String password, String salt) { BCrypt B; String real_salt; byte passwordb[], saltb[], hashed[]; char minor = (char)0; int rounds, off = 0; StringBuffer rs = new StringBuffer(); if (salt.charAt(0) != '$' || salt.charAt(1) != '2') throw new IllegalArgumentException ("Invalid salt version"); if (salt.charAt(2) == '$') off = 3; else { minor = salt.charAt(2); if (minor != 'a' || salt.charAt(3) != '$') throw new IllegalArgumentException ("Invalid salt revision"); off = 4; } // Extract number of rounds if (salt.charAt(off + 2) > '$') throw new IllegalArgumentException ("Missing salt rounds"); rounds = Integer.parseInt(salt.substring(off, off + 2)); real_salt = salt.substring(off + 3, off + 25); try { passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { throw new AssertionError("UTF-8 is not supported"); } saltb = decode_base64(real_salt, BCRYPT_SALT_LEN); B = new BCrypt(); hashed = B.crypt_raw(passwordb, saltb, rounds, (int[])bf_crypt_ciphertext.clone()); rs.append("$2"); if (minor >= 'a') rs.append(minor); rs.append("$"); if (rounds < 10) rs.append("0"); if (rounds > 30) { throw new IllegalArgumentException( "rounds exceeds maximum (30)"); } rs.append(Integer.toString(rounds)); rs.append("$"); rs.append(encode_base64(saltb, saltb.length)); rs.append(encode_base64(hashed, bf_crypt_ciphertext.length * 4 - 1)); return rs.toString(); } /** * Generate a salt for use with the BCrypt.hashpw() method * @param log_rounds the log2 of the number of rounds of * hashing to apply - the work factor therefore increases as * 2**log_rounds. * @param random an instance of SecureRandom to use * @return an encoded salt value */ public static String gensalt(int log_rounds, SecureRandom random) { StringBuffer rs = new StringBuffer(); byte rnd[] = new byte[BCRYPT_SALT_LEN]; random.nextBytes(rnd); rs.append("$2a$"); if (log_rounds < 10) rs.append("0"); if (log_rounds > 30) { throw new IllegalArgumentException( "log_rounds exceeds maximum (30)"); } rs.append(Integer.toString(log_rounds)); rs.append("$"); rs.append(encode_base64(rnd, rnd.length)); return rs.toString(); } /** * Generate a salt for use with the BCrypt.hashpw() method * @param log_rounds the log2 of the number of rounds of * hashing to apply - the work factor therefore increases as * 2**log_rounds. * @return an encoded salt value */ public static String gensalt(int log_rounds) { return gensalt(log_rounds, new SecureRandom()); } /** * Generate a salt for use with the BCrypt.hashpw() method, * selecting a reasonable default for the number of hashing * rounds to apply * @return an encoded salt value */ public static String gensalt() { return gensalt(GENSALT_DEFAULT_LOG2_ROUNDS); } /** * Check that a plaintext password matches a previously hashed * one * @param plaintext the plaintext password to verify * @param hashed the previously-hashed password * @return true if the passwords match, false otherwise */ public static boolean checkpw(String plaintext, String hashed) { byte hashed_bytes[]; byte try_bytes[]; try { String try_pw = hashpw(plaintext, hashed); hashed_bytes = hashed.getBytes("UTF-8"); try_bytes = try_pw.getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { return false; } if (hashed_bytes.length != try_bytes.length) return false; byte ret = 0; for (int i = 0; i < try_bytes.length; i++) ret |= hashed_bytes[i] ^ try_bytes[i]; return ret == 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.tasks.mailbox; import org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox.MailboxClosedException; import org.apache.flink.util.function.FunctionWithException; import org.apache.flink.util.function.RunnableWithException; import org.apache.flink.util.function.ThrowingRunnable; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.Queue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.apache.flink.streaming.runtime.tasks.mailbox.TaskMailbox.MAX_PRIORITY; import static org.junit.Assert.assertEquals; /** Unit tests for {@link TaskMailboxImpl}. */ public class TaskMailboxImplTest { private static final RunnableWithException NO_OP = () -> {}; private static final int DEFAULT_PRIORITY = 0; /** Object under test. */ private TaskMailbox taskMailbox; @Before public void setUp() { taskMailbox = new TaskMailboxImpl(); } @After public void tearDown() { taskMailbox.close(); } @Test public void testPutAsHead() throws InterruptedException { Mail mailA = new Mail(() -> {}, MAX_PRIORITY, "mailA"); Mail mailB = new Mail(() -> {}, MAX_PRIORITY, "mailB"); Mail mailC = new Mail(() -> {}, DEFAULT_PRIORITY, "mailC, DEFAULT_PRIORITY"); Mail mailD = new Mail(() -> {}, DEFAULT_PRIORITY, "mailD, DEFAULT_PRIORITY"); taskMailbox.put(mailC); taskMailbox.putFirst(mailB); taskMailbox.put(mailD); taskMailbox.putFirst(mailA); Assert.assertSame(mailA, taskMailbox.take(DEFAULT_PRIORITY)); Assert.assertSame(mailB, taskMailbox.take(DEFAULT_PRIORITY)); Assert.assertSame(mailC, taskMailbox.take(DEFAULT_PRIORITY)); Assert.assertSame(mailD, taskMailbox.take(DEFAULT_PRIORITY)); Assert.assertFalse(taskMailbox.tryTake(DEFAULT_PRIORITY).isPresent()); } @Test public void testContracts() throws InterruptedException { final Queue<Mail> testObjects = new LinkedList<>(); Assert.assertFalse(taskMailbox.hasMail()); for (int i = 0; i < 10; ++i) { final Mail mail = new Mail(NO_OP, DEFAULT_PRIORITY, "mail, DEFAULT_PRIORITY"); testObjects.add(mail); taskMailbox.put(mail); Assert.assertTrue(taskMailbox.hasMail()); } while (!testObjects.isEmpty()) { assertEquals(testObjects.remove(), taskMailbox.take(DEFAULT_PRIORITY)); assertEquals(!testObjects.isEmpty(), taskMailbox.hasMail()); } } /** Test the producer-consumer pattern using the blocking methods on the mailbox. */ @Test public void testConcurrentPutTakeBlocking() throws Exception { testPutTake(mailbox -> mailbox.take(DEFAULT_PRIORITY)); } /** Test the producer-consumer pattern using the non-blocking methods & waits on the mailbox. */ @Test public void testConcurrentPutTakeNonBlockingAndWait() throws Exception { testPutTake( (mailbox -> { Optional<Mail> optionalMail = mailbox.tryTake(DEFAULT_PRIORITY); while (!optionalMail.isPresent()) { optionalMail = mailbox.tryTake(DEFAULT_PRIORITY); } return optionalMail.get(); })); } /** Test that closing the mailbox unblocks pending accesses with correct exceptions. */ @Test public void testCloseUnblocks() throws InterruptedException { testAllPuttingUnblocksInternal(TaskMailbox::close); } /** Test that silencing the mailbox unblocks pending accesses with correct exceptions. */ @Test public void testQuiesceUnblocks() throws InterruptedException { testAllPuttingUnblocksInternal(TaskMailbox::quiesce); } @Test public void testLifeCycleQuiesce() throws InterruptedException { taskMailbox.put(new Mail(NO_OP, DEFAULT_PRIORITY, "NO_OP, DEFAULT_PRIORITY")); taskMailbox.put(new Mail(NO_OP, DEFAULT_PRIORITY, "NO_OP, DEFAULT_PRIORITY")); taskMailbox.quiesce(); testLifecyclePuttingInternal(); taskMailbox.take(DEFAULT_PRIORITY); Assert.assertTrue(taskMailbox.tryTake(DEFAULT_PRIORITY).isPresent()); Assert.assertFalse(taskMailbox.tryTake(DEFAULT_PRIORITY).isPresent()); } @Test public void testLifeCycleClose() throws InterruptedException { taskMailbox.close(); testLifecyclePuttingInternal(); try { taskMailbox.take(DEFAULT_PRIORITY); Assert.fail(); } catch (MailboxClosedException ignore) { } try { taskMailbox.tryTake(DEFAULT_PRIORITY); Assert.fail(); } catch (MailboxClosedException ignore) { } } private void testLifecyclePuttingInternal() { try { taskMailbox.put(new Mail(NO_OP, DEFAULT_PRIORITY, "NO_OP, DEFAULT_PRIORITY")); Assert.fail(); } catch (MailboxClosedException ignore) { } try { taskMailbox.putFirst(new Mail(NO_OP, MAX_PRIORITY, "NO_OP")); Assert.fail(); } catch (MailboxClosedException ignore) { } } private void testAllPuttingUnblocksInternal(Consumer<TaskMailbox> unblockMethod) throws InterruptedException { testUnblocksInternal( () -> taskMailbox.put(new Mail(NO_OP, DEFAULT_PRIORITY, "NO_OP, DEFAULT_PRIORITY")), unblockMethod); setUp(); testUnblocksInternal( () -> taskMailbox.putFirst(new Mail(NO_OP, MAX_PRIORITY, "NO_OP")), unblockMethod); } private void testUnblocksInternal( RunnableWithException testMethod, Consumer<TaskMailbox> unblockMethod) throws InterruptedException { final Thread[] blockedThreads = new Thread[8]; final Exception[] exceptions = new Exception[blockedThreads.length]; CountDownLatch countDownLatch = new CountDownLatch(blockedThreads.length); for (int i = 0; i < blockedThreads.length; ++i) { final int id = i; Thread blocked = new Thread( () -> { try { countDownLatch.countDown(); while (true) { testMethod.run(); } } catch (Exception ex) { exceptions[id] = ex; } }); blockedThreads[i] = blocked; blocked.start(); } countDownLatch.await(); unblockMethod.accept(taskMailbox); for (Thread blockedThread : blockedThreads) { blockedThread.join(); } for (Exception exception : exceptions) { assertEquals(MailboxClosedException.class, exception.getClass()); } } /** * Test producer-consumer pattern through the mailbox in a concurrent setting (n-writer / * 1-reader). */ private void testPutTake( FunctionWithException<TaskMailbox, Mail, InterruptedException> takeMethod) throws Exception { final int numThreads = 10; final int numMailsPerThread = 1000; final int[] results = new int[numThreads]; Thread[] writerThreads = new Thread[numThreads]; for (int i = 0; i < writerThreads.length; ++i) { final int threadId = i; writerThreads[i] = new Thread( ThrowingRunnable.unchecked( () -> { for (int k = 0; k < numMailsPerThread; ++k) { taskMailbox.put( new Mail( () -> ++results[threadId], DEFAULT_PRIORITY, "result " + k)); } })); } for (Thread writerThread : writerThreads) { writerThread.start(); } for (Thread writerThread : writerThreads) { writerThread.join(); } AtomicBoolean isRunning = new AtomicBoolean(true); taskMailbox.put( new Mail( () -> isRunning.set(false), DEFAULT_PRIORITY, "POISON_MAIL, DEFAULT_PRIORITY")); while (isRunning.get()) { takeMethod.apply(taskMailbox).run(); } for (int perThreadResult : results) { assertEquals(numMailsPerThread, perThreadResult); } } @Test public void testPutAsHeadWithPriority() throws InterruptedException { Mail mailA = new Mail(() -> {}, 2, "mailA"); Mail mailB = new Mail(() -> {}, 2, "mailB"); Mail mailC = new Mail(() -> {}, 1, "mailC"); Mail mailD = new Mail(() -> {}, 1, "mailD"); taskMailbox.put(mailC); taskMailbox.put(mailB); taskMailbox.put(mailD); taskMailbox.putFirst(mailA); Assert.assertSame(mailA, taskMailbox.take(2)); Assert.assertSame(mailB, taskMailbox.take(2)); Assert.assertFalse(taskMailbox.tryTake(2).isPresent()); Assert.assertSame(mailC, taskMailbox.take(1)); Assert.assertSame(mailD, taskMailbox.take(1)); Assert.assertFalse(taskMailbox.tryTake(1).isPresent()); } @Test public void testPutWithPriorityAndReadingFromMainMailbox() throws InterruptedException { Mail mailA = new Mail(() -> {}, 2, "mailA"); Mail mailB = new Mail(() -> {}, 2, "mailB"); Mail mailC = new Mail(() -> {}, 1, "mailC"); Mail mailD = new Mail(() -> {}, 1, "mailD"); taskMailbox.put(mailC); taskMailbox.put(mailB); taskMailbox.put(mailD); taskMailbox.putFirst(mailA); // same order for non-priority and priority on top Assert.assertSame(mailA, taskMailbox.take(TaskMailbox.MIN_PRIORITY)); Assert.assertSame(mailC, taskMailbox.take(TaskMailbox.MIN_PRIORITY)); Assert.assertSame(mailB, taskMailbox.take(TaskMailbox.MIN_PRIORITY)); Assert.assertSame(mailD, taskMailbox.take(TaskMailbox.MIN_PRIORITY)); } /** * Tests the interaction of batch and non-batch methods. * * <p>Both {@link TaskMailbox#take(int)} and {@link TaskMailbox#tryTake(int)} consume the batch * but once drained will fetch elements from the remaining mails. * * <p>In contrast, {@link TaskMailbox#tryTakeFromBatch()} will not return any mail once the * batch is drained. */ @Test public void testBatchAndNonBatchTake() throws InterruptedException { final List<Mail> mails = IntStream.range(0, 6) .mapToObj(i -> new Mail(NO_OP, DEFAULT_PRIORITY, String.valueOf(i))) .collect(Collectors.toList()); // create a batch with 3 mails mails.subList(0, 3).forEach(taskMailbox::put); Assert.assertTrue(taskMailbox.createBatch()); // add 3 more mails after the batch mails.subList(3, 6).forEach(taskMailbox::put); // now take all mails in the batch with all available methods assertEquals(Optional.ofNullable(mails.get(0)), taskMailbox.tryTakeFromBatch()); assertEquals(Optional.ofNullable(mails.get(1)), taskMailbox.tryTake(DEFAULT_PRIORITY)); assertEquals(mails.get(2), taskMailbox.take(DEFAULT_PRIORITY)); // batch empty, so only regular methods work assertEquals(Optional.empty(), taskMailbox.tryTakeFromBatch()); assertEquals(Optional.ofNullable(mails.get(3)), taskMailbox.tryTake(DEFAULT_PRIORITY)); assertEquals(mails.get(4), taskMailbox.take(DEFAULT_PRIORITY)); // one unprocessed mail left assertEquals(Collections.singletonList(mails.get(5)), taskMailbox.close()); } @Test public void testBatchDrain() throws Exception { Mail mailA = new Mail(() -> {}, MAX_PRIORITY, "mailA"); Mail mailB = new Mail(() -> {}, MAX_PRIORITY, "mailB"); taskMailbox.put(mailA); Assert.assertTrue(taskMailbox.createBatch()); taskMailbox.put(mailB); assertEquals(Arrays.asList(mailA, mailB), taskMailbox.drain()); } @Test public void testBatchPriority() throws Exception { Mail mailA = new Mail(() -> {}, 1, "mailA"); Mail mailB = new Mail(() -> {}, 2, "mailB"); taskMailbox.put(mailA); Assert.assertTrue(taskMailbox.createBatch()); taskMailbox.put(mailB); assertEquals(mailB, taskMailbox.take(2)); assertEquals(Optional.of(mailA), taskMailbox.tryTakeFromBatch()); } /** Testing that we cannot close while running exclusively. */ @Test public void testRunExclusively() throws InterruptedException { CountDownLatch exclusiveCodeStarted = new CountDownLatch(1); final int numMails = 10; // send 10 mails in an atomic operation new Thread( () -> taskMailbox.runExclusively( () -> { exclusiveCodeStarted.countDown(); for (int index = 0; index < numMails; index++) { try { taskMailbox.put(new Mail(() -> {}, 1, "mailD")); Thread.sleep(1); } catch (Exception e) { } } })) .start(); exclusiveCodeStarted.await(); // make sure that all 10 messages have been actually enqueued. assertEquals(numMails, taskMailbox.close().size()); } }
/* * Copyright (c) 2007, PostgreSQL Global Development Group * See the LICENSE file in the project root for more information. */ package org.postgresql.test.hostchooser; import static java.lang.Integer.parseInt; import static java.util.Arrays.asList; import static java.util.concurrent.TimeUnit.SECONDS; import static org.postgresql.hostchooser.HostRequirement.any; import static org.postgresql.hostchooser.HostRequirement.master; import static org.postgresql.hostchooser.HostRequirement.preferSlave; import static org.postgresql.hostchooser.HostRequirement.slave; import static org.postgresql.hostchooser.HostStatus.ConnectFail; import static org.postgresql.hostchooser.HostStatus.Slave; import static org.postgresql.test.TestUtil.closeDB; import org.postgresql.hostchooser.GlobalHostStatusTracker; import org.postgresql.hostchooser.HostRequirement; import org.postgresql.test.TestUtil; import org.postgresql.util.HostSpec; import org.postgresql.util.PSQLException; import junit.framework.TestCase; import java.lang.reflect.Field; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; public class MultiHostsConnectionTest extends TestCase { static final String user = TestUtil.getUser(); static final String password = TestUtil.getPassword(); static final String master1 = TestUtil.getServer() + ":" + TestUtil.getPort(); static final String slave1 = MultiHostTestSuite.getSlaveServer() + ":" + MultiHostTestSuite.getSlavePort(); static final String fake1 = "127.127.217.217:1"; static String masterIp; static String slaveIp; static String fakeIp = fake1; static Connection con; private static Map<HostSpec, Object> hostStatusMap; static { try { Field field = GlobalHostStatusTracker.class.getDeclaredField("hostStatusMap"); field.setAccessible(true); hostStatusMap = (Map<HostSpec, Object>) field.get(null); con = TestUtil.openDB(); masterIp = getRemoteHostSpec(); closeDB(con); con = MultiHostTestSuite.openSlaveDB(); slaveIp = getRemoteHostSpec(); closeDB(con); } catch (Exception e) { throw new RuntimeException(e); } } private static Connection getConnection(HostRequirement hostType, String... targets) throws SQLException { return getConnection(hostType, true, targets); } private static HostSpec hostSpec(String host) { int split = host.indexOf(':'); return new HostSpec(host.substring(0, split), parseInt(host.substring(split + 1))); } private static Connection getConnection(HostRequirement hostType, boolean reset, String... targets) throws SQLException { return getConnection(hostType, reset, false, targets); } private static Connection getConnection(HostRequirement hostType, boolean reset, boolean lb, String... targets) throws SQLException { TestUtil.closeDB(con); if (reset) { resetGlobalState(); } Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", password); props.setProperty("targetServerType", hostType.name()); props.setProperty("hostRecheckSeconds", "2"); if (lb) { props.setProperty("loadBalanceHosts", "true"); } StringBuilder sb = new StringBuilder(); sb.append("jdbc:postgresql://"); for (String target : targets) { sb.append(target).append(','); } sb.setLength(sb.length() - 1); sb.append("/test"); return con = DriverManager.getConnection(sb.toString(), props); } private static void assertRemote(String expectedHost) throws SQLException { assertEquals(expectedHost, getRemoteHostSpec()); } private static String getRemoteHostSpec() throws SQLException { ResultSet rs = con.createStatement() .executeQuery("select inet_server_addr() || ':' || inet_server_port()"); rs.next(); return rs.getString(1); } public static boolean isMaster(Connection con) throws SQLException { ResultSet rs = con.createStatement().executeQuery("show transaction_read_only"); rs.next(); return "off".equals(rs.getString(1)); } private static void assertGlobalState(String host, String status) { HostSpec spec = hostSpec(host); if (status == null) { assertNull(hostStatusMap.get(spec)); } else { assertEquals(host + "=" + status, hostStatusMap.get(spec).toString()); } } private static void resetGlobalState() { hostStatusMap.clear(); } public static void testConnectToAny() throws SQLException { getConnection(any, fake1, master1); assertRemote(masterIp); assertGlobalState(master1, "ConnectOK"); assertGlobalState(fake1, "ConnectFail"); getConnection(any, fake1, slave1); assertRemote(slaveIp); assertGlobalState(slave1, "ConnectOK"); getConnection(any, fake1, master1); assertRemote(masterIp); assertGlobalState(master1, "ConnectOK"); assertGlobalState(fake1, "ConnectFail"); } public static void testConnectToMaster() throws SQLException { getConnection(master, true, fake1, master1, slave1); assertRemote(masterIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(master1, "Master"); assertGlobalState(slave1, null); getConnection(master, false, fake1, slave1, master1); assertRemote(masterIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(master1, "Master"); assertGlobalState(slave1, "Slave"); } public static void testConnectToSlave() throws SQLException { getConnection(slave, true, fake1, slave1, master1); assertRemote(slaveIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, "Slave"); assertGlobalState(master1, null); getConnection(slave, false, fake1, master1, slave1); assertRemote(slaveIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, "Slave"); assertGlobalState(master1, "Master"); } public static void testConnectToSlaveFirst() throws SQLException { getConnection(preferSlave, true, fake1, slave1, master1); assertRemote(slaveIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, "Slave"); assertGlobalState(master1, null); getConnection(preferSlave, false, fake1, master1, slave1); assertRemote(masterIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, "Slave"); assertGlobalState(master1, "Master"); getConnection(preferSlave, false, fake1, master1, slave1); assertRemote(slaveIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, "Slave"); assertGlobalState(master1, "Master"); } public static void testFailedConnection() throws SQLException { try { getConnection(any, true, fake1); fail(); } catch (PSQLException ex) { } } public static void testLoadBalancing() throws SQLException { Set<String> connectedHosts = new HashSet<String>(); boolean fake1FoundTried = false; for (int i = 0; i < 20; ++i) { getConnection(any, true, true, fake1, master1, slave1); connectedHosts.add(getRemoteHostSpec()); fake1FoundTried |= hostStatusMap.containsKey(hostSpec(fake1)); if (connectedHosts.size() == 2 && fake1FoundTried) { break; } } assertEquals("Never connected to all hosts", new HashSet<String>(asList(masterIp, slaveIp)), connectedHosts); assertTrue("Never tried to connect to fake node", fake1FoundTried); } public static void testHostRechecks() throws SQLException, InterruptedException { getConnection(master, true, fake1, master1, slave1); assertRemote(masterIp); assertGlobalState(fake1, "ConnectFail"); assertGlobalState(slave1, null); GlobalHostStatusTracker.reportHostStatus(hostSpec(master1), ConnectFail); assertGlobalState(master1, "ConnectFail"); try { getConnection(master, false, fake1, slave1, master1); fail(); } catch (SQLException ex) { } SECONDS.sleep(3); getConnection(master, false, slave1, fake1, master1); assertRemote(masterIp); } public static void testNoGoodHostsRechecksEverything() throws SQLException, InterruptedException { GlobalHostStatusTracker.reportHostStatus(hostSpec(master1), Slave); GlobalHostStatusTracker.reportHostStatus(hostSpec(slave1), Slave); GlobalHostStatusTracker.reportHostStatus(hostSpec(fake1), Slave); getConnection(master, false, slave1, fake1, master1); assertRemote(masterIp); } }
/* Copyright 2012 Harri Smatt Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package fi.harism.wallpaper.botz; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.util.Vector; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import android.content.Context; import android.graphics.Matrix; import android.graphics.RectF; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.os.Handler; import android.os.Looper; import android.os.SystemClock; import android.widget.Toast; /** * Renderer class. */ public final class BotzRenderer implements GLSurfaceView.Renderer { private static final float[] COLOR_BG = { .2f, .2f, .2f }; private static final float[] COLOR_BORDER = { .8f, .3f, .2f }; private static final float[] COLOR_BULLET = { .7f, .7f, .7f }; private static final float[] COLOR_ENERGY1 = { .3f, .8f, .2f }; private static final float[] COLOR_ENERGY2 = { .8f, .3f, .2f }; private static final float[] COLOR_EXPLODE = { .7f, .6f, .1f }; private static final float[] COLOR_SHIP = { .2f, .4f, .9f }; private static final int NUM_BULLETS = 40; private static final int NUM_SHIPS = 30; private static final float RADIUS_BULLET = .01f; private static final float RADIUS_SHIP = .1f; private final Vector<Bullet> mArrBullets = new Vector<Bullet>(); private final Vector<BotzParticle> mArrParticles = new Vector<BotzParticle>(); private final Vector<Ship> mArrShips = new Vector<Ship>(); private final float[] mAspectRatio = new float[2]; private ByteBuffer mBufferQuad; private FloatBuffer mBufferShipLines; private Context mContext; private final float[] mMatrixM = new float[9]; private final Matrix mMatrixModel = new Matrix(); private final Matrix mMatrixModelView = new Matrix(); private final Matrix mMatrixView = new Matrix(); private final BotzShader mShaderCircle = new BotzShader(); private final boolean[] mShaderCompilerSupport = new boolean[1]; private final BotzShader mShaderEnergy = new BotzShader(); private final BotzShader mShaderLine = new BotzShader(); private final BotzSolver mSolver = new BotzSolver(); private int mWidth, mHeight; /** * Default constructor. */ public BotzRenderer(Context context) { mContext = context; // Full view quad buffer. final byte[] QUAD = { -1, 1, -1, -1, 1, 1, 1, -1 }; mBufferQuad = ByteBuffer.allocateDirect(8); mBufferQuad.put(QUAD).position(0); // Ship triangle vertices buffer. final float[] SHIP_LINES = { -.4f, -.5f, 0, .7f, .4f, -.5f }; ByteBuffer buf = ByteBuffer.allocateDirect(4 * 2 * 3); mBufferShipLines = buf.order(ByteOrder.nativeOrder()).asFloatBuffer(); mBufferShipLines.put(SHIP_LINES).position(0); // Particle and ship instance generation. for (int i = 0; i < NUM_SHIPS; ++i) { BotzParticle p = new BotzParticle(); p.mRadius = RADIUS_SHIP; mArrParticles.add(p); Ship s = new Ship(p); mArrShips.add(s); } // Bullets generation. for (int i = 0; i < NUM_BULLETS; ++i) { mArrBullets.add(new Bullet()); } } /** * Adds 'gravity' to particles. Used for touch event handling. */ public void addGravity(float dx, float dy) { float t = Math.min(mWidth, mHeight) * .8f; dx /= t; dy /= t; for (BotzParticle p : mArrParticles) { p.mVelocity[0] += dx; p.mVelocity[1] += dy; } } /** * Loads String from raw resources with given id. */ private String loadRawString(int rawId) throws Exception { InputStream is = mContext.getResources().openRawResource(rawId); ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buf = new byte[1024]; int len; while ((len = is.read(buf)) != -1) { baos.write(buf, 0, len); } return baos.toString(); } @Override public void onDrawFrame(GL10 unused) { // Clear view buffer. GLES20.glClearColor(COLOR_BG[0], COLOR_BG[1], COLOR_BG[2], 1f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // If shader compiler not supported return immediately. if (!mShaderCompilerSupport[0]) { return; } // Disable unnecessary OpenGL flags. GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDisable(GLES20.GL_CULL_FACE); long time = SystemClock.uptimeMillis(); // Scale factor. float scale = 1f; // Scaling happens in 20sec cycles. long timeScale = time % 20000; // Calculate smooth transitions between [1f, 2f]. if (timeScale > 17000) { float t = (timeScale - 17000) / 3000f; scale = 2f - t * t * (3 - 2 * t); } else if (timeScale > 10000) { scale = 2f; } else if (timeScale > 7000) { float t = (timeScale - 7000) / 3000f; scale = 1f + t * t * (3 - 2 * t); } // Calculate line width. float lineWidth = Math.max(1f, Math.min(mWidth, mHeight) * 0.008f); GLES20.glLineWidth(lineWidth * scale); // Set up view matrix. mMatrixView.setScale(mAspectRatio[0], mAspectRatio[1]); mMatrixView.postScale(scale, scale); // Animate ships. mSolver.animate(); // Handle bullet movement. for (Bullet b : mArrBullets) { // How long bullet lives after shot. final float BULLET_LIVE_TIME = 700; // If lifetime exceeded generate new shot. if (time - b.mShootTime > BULLET_LIVE_TIME) { // Find random enabled ship particle. BotzParticle p = mArrParticles .get((int) (Math.random() * mArrParticles.size())); while (!p.mEnabled) { p = mArrParticles.get((int) (Math.random() * mArrParticles .size())); } // Calculate velocity normal. float len = (float) Math.sqrt(p.mVelocity[0] * p.mVelocity[0] + p.mVelocity[1] * p.mVelocity[1]); float nx = p.mVelocity[0] / len; float ny = p.mVelocity[1] / len; // Set bullet start position in front of selected ship. Add some // border to avoid collision with itself. b.mPosStart[0] = p.mPosition[0] + nx * (RADIUS_SHIP + .01f); b.mPosStart[1] = p.mPosition[1] + ny * (RADIUS_SHIP + .01f); // Max length for shoot is of length 1. b.mPosEnd[0] = p.mPosition[0] + nx; b.mPosEnd[1] = p.mPosition[1] + ny; b.mShootTime = time; } // Move bullet. float t = (time - b.mShootTime) / 700f; BotzParticle p = b.mParticle; p.mPosition[0] = b.mPosStart[0] + (b.mPosEnd[0] - b.mPosStart[0]) * t; p.mPosition[1] = b.mPosStart[1] + (b.mPosEnd[1] - b.mPosStart[1]) * t; } // Check bullet collisions against all ships. for (Ship s : mArrShips) { // Skip disabled ships / particles. if (!s.mParticle.mEnabled) continue; for (Bullet b : mArrBullets) { if (mSolver.collide(s.mParticle, b.mParticle)) { // This will trigger new bullet shot. b.mShootTime = -1; // Mark ship as colliding. s.mParticle.mCollisionTime = time; } } } // Iterate over ships to see if there are collisions, explosions and // after certain amount of time restore ship back to enabled. for (Ship ship : mArrShips) { // If there was a collision during this render iteration decrease // ship's energy. if (ship.mParticle.mCollisionTime >= time) { ship.mEnergy -= .01f; } // If ship isn't exploding already and energy goes to zero, mark // ship as exploding. if (!ship.mExplode && ship.mEnergy <= 0f) { ship.mExplodeTime = time; ship.mParticle.mEnabled = false; ship.mExplode = true; } // If ship is exploding and certain amount of time has passed mark // ship back to enabled. if (ship.mExplode && time - ship.mExplodeTime > 5000) { ship.mEnergy = 1.0f; ship.mParticle.mEnabled = true; ship.mExplode = false; ship.mVisible = true; } } // Rendering calls. renderBullets(mShaderCircle); renderShipBorders(mShaderCircle, time); renderShipEnergies(mShaderEnergy, time); renderShipLines(mShaderLine); renderShipExplosions(mShaderCircle, time); } @Override public void onSurfaceChanged(GL10 unused, int width, int height) { mWidth = width; mHeight = height; GLES20.glViewport(0, 0, mWidth, mHeight); // Initialize solver with particles and view rectangle. float dx = (float) Math.max(mWidth, mHeight) / mHeight; float dy = (float) Math.max(mWidth, mHeight) / mWidth; mSolver.init(mArrParticles, new RectF(-dx, dy, dx, -dy)); // Store view aspect ratio. mAspectRatio[0] = 1f / dx; mAspectRatio[1] = 1f / dy; // Initialize ships to initial state. for (Ship ship : mArrShips) { ship.mEnergy = 1f; ship.mVisible = true; ship.mExplode = false; } } @Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { // Check if shader compiler is supported. GLES20.glGetBooleanv(GLES20.GL_SHADER_COMPILER, mShaderCompilerSupport, 0); // If not, show user an error message and return immediately. if (mShaderCompilerSupport[0] == false) { String msg = mContext.getString(R.string.error_shader_compiler); showError(msg); return; } // Load vertex and fragment shaders. try { String vertexSource, fragmentSource; vertexSource = loadRawString(R.raw.line_vs); fragmentSource = loadRawString(R.raw.line_fs); mShaderLine.setProgram(vertexSource, fragmentSource); vertexSource = loadRawString(R.raw.energy_vs); fragmentSource = loadRawString(R.raw.energy_fs); mShaderEnergy.setProgram(vertexSource, fragmentSource); vertexSource = loadRawString(R.raw.circle_vs); fragmentSource = loadRawString(R.raw.circle_fs); mShaderCircle.setProgram(vertexSource, fragmentSource); } catch (Exception ex) { showError(ex.getMessage()); } } /** * Renders bullets on current FBO. */ private void renderBullets(BotzShader shader) { shader.useProgram(); int uModelViewM = shader.getHandle("uModelViewM"); int uColor = shader.getHandle("uColor"); int uLimits = shader.getHandle("uLimits"); int aPosition = shader.getHandle("aPosition"); GLES20.glUniform3fv(uColor, 1, COLOR_BULLET, 0); GLES20.glUniform2f(uLimits, 0, 2); GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, mBufferQuad); GLES20.glEnableVertexAttribArray(aPosition); for (Bullet b : mArrBullets) { BotzParticle p = b.mParticle; mMatrixModel.setScale(RADIUS_BULLET, RADIUS_BULLET); mMatrixModel.postTranslate(p.mPosition[0], p.mPosition[1]); mMatrixModelView.set(mMatrixModel); mMatrixModelView.postConcat(mMatrixView); mMatrixModelView.getValues(mMatrixM); GLES20.glUniformMatrix3fv(uModelViewM, 1, false, mMatrixM, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } } /** * Renders ship borders into current FBO. */ private void renderShipBorders(BotzShader shader, long time) { shader.useProgram(); int uModelViewM = shader.getHandle("uModelViewM"); int uColor = shader.getHandle("uColor"); int uLimits = shader.getHandle("uLimits"); int aPosition = shader.getHandle("aPosition"); GLES20.glUniform2f(uLimits, 0.85f, 1.0f); GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, mBufferQuad); GLES20.glEnableVertexAttribArray(aPosition); final float[] color = new float[3]; for (Ship ship : mArrShips) { if (!ship.mVisible) continue; BotzParticle p = ship.mParticle; // Borders show for certain amount of time only. float ct = (time - p.mCollisionTime) / 200f; if (ct < 1f) { mMatrixModel.setScale(RADIUS_SHIP, RADIUS_SHIP); mMatrixModel.postTranslate(p.mPosition[0], p.mPosition[1]); mMatrixModelView.set(mMatrixModel); mMatrixModelView.postConcat(mMatrixView); mMatrixModelView.getValues(mMatrixM); for (int i = 0; i < 3; ++i) { color[i] = COLOR_BORDER[i] + (COLOR_BG[i] - COLOR_BORDER[i]) * ct; } GLES20.glUniformMatrix3fv(uModelViewM, 1, false, mMatrixM, 0); GLES20.glUniform3fv(uColor, 1, color, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } } } /** * Renders ship energy indicators. */ private void renderShipEnergies(BotzShader shader, long time) { shader.useProgram(); int uModelViewM = shader.getHandle("uModelViewM"); int uColor1 = shader.getHandle("uColor1"); int uColor2 = shader.getHandle("uColor2"); int uEnergy = shader.getHandle("uEnergy"); int aPosition = shader.getHandle("aPosition"); GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, mBufferQuad); GLES20.glEnableVertexAttribArray(aPosition); final float[] color1 = new float[3]; final float[] color2 = new float[3]; for (Ship ship : mArrShips) { if (!ship.mVisible) continue; BotzParticle p = ship.mParticle; // Energy shows only for certain amount of time. float ct = (time - p.mCollisionTime) / 400f; if (ct < 1f) { mMatrixModel.setScale(1f, .1f); mMatrixModel.postTranslate(0f, -.9f); mMatrixModel.postScale(RADIUS_SHIP, RADIUS_SHIP); mMatrixModel.postTranslate(p.mPosition[0], p.mPosition[1]); mMatrixModelView.set(mMatrixModel); mMatrixModelView.postConcat(mMatrixView); mMatrixModelView.getValues(mMatrixM); for (int i = 0; i < 3; ++i) { color1[i] = COLOR_ENERGY1[i] + (COLOR_BG[i] - COLOR_ENERGY1[i]) * ct; color2[i] = COLOR_ENERGY2[i] + (COLOR_BG[i] - COLOR_ENERGY2[i]) * ct; } GLES20.glUniformMatrix3fv(uModelViewM, 1, false, mMatrixM, 0); GLES20.glUniform1f(uEnergy, ship.mEnergy); GLES20.glUniform3fv(uColor1, 1, color1, 0); GLES20.glUniform3fv(uColor2, 1, color2, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } } } /** * Renders ship explosions into current FBO. */ private void renderShipExplosions(BotzShader shader, long time) { shader.useProgram(); int uModelViewM = shader.getHandle("uModelViewM"); int uColor = shader.getHandle("uColor"); int uLimits = shader.getHandle("uLimits"); int aPosition = shader.getHandle("aPosition"); GLES20.glUniform3fv(uColor, 1, COLOR_EXPLODE, 0); GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, mBufferQuad); GLES20.glEnableVertexAttribArray(aPosition); for (Ship ship : mArrShips) { if (!ship.mVisible || !ship.mExplode) continue; BotzParticle p = ship.mParticle; // Explosion lasts only for certain amount of time. float ct = (time - ship.mExplodeTime) / 800f; if (ct < 1f) { mMatrixModel.setScale(RADIUS_SHIP * 1.5f, RADIUS_SHIP * 1.5f); mMatrixModel.postTranslate(p.mPosition[0], p.mPosition[1]); mMatrixModelView.set(mMatrixModel); mMatrixModelView.postConcat(mMatrixView); mMatrixModelView.getValues(mMatrixM); GLES20.glUniformMatrix3fv(uModelViewM, 1, false, mMatrixM, 0); GLES20.glUniform2f(uLimits, 0, ct); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } // Hide ship after explosion is done. else { ship.mVisible = false; } } } /** * Renders actual ship into current FBO. */ private void renderShipLines(BotzShader shader) { shader.useProgram(); int uModelViewM = shader.getHandle("uModelViewM"); int uColor = shader.getHandle("uColor"); int aPosition = shader.getHandle("aPosition"); GLES20.glUniform3fv(uColor, 1, COLOR_SHIP, 0); GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_FLOAT, false, 0, mBufferShipLines); GLES20.glEnableVertexAttribArray(aPosition); for (Ship ship : mArrShips) { if (!ship.mVisible) continue; BotzParticle p = ship.mParticle; double tan = Math.atan2(-p.mVelocity[0], p.mVelocity[1]); mMatrixModel.setScale(RADIUS_SHIP, RADIUS_SHIP); mMatrixModel.postRotate((float) (tan * 180 / Math.PI)); mMatrixModel.postTranslate(p.mPosition[0], p.mPosition[1]); mMatrixModelView.set(mMatrixModel); mMatrixModelView.postConcat(mMatrixView); mMatrixModelView.getValues(mMatrixM); GLES20.glUniformMatrix3fv(uModelViewM, 1, false, mMatrixM, 0); GLES20.glDrawArrays(GLES20.GL_LINE_LOOP, 0, 3); } } /** * Shows Toast on screen with given message. */ private void showError(final String errorMsg) { new Handler(Looper.getMainLooper()).post(new Runnable() { @Override public void run() { Toast.makeText(mContext, errorMsg, Toast.LENGTH_LONG).show(); } }); } /** * Private bullet info holder class. */ private final class Bullet { public final BotzParticle mParticle = new BotzParticle(); public final float[] mPosEnd = new float[2]; public final float[] mPosStart = new float[2]; public long mShootTime; } /** * Private ship info holder class. */ private final class Ship { public float mEnergy; public boolean mExplode; public long mExplodeTime; public BotzParticle mParticle; public boolean mVisible; public Ship(BotzParticle particle) { mParticle = particle; } } }
package com.torch2424.broquest; import java.io.File; import java.io.IOException; import java.io.PrintStream; import android.app.Activity; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.graphics.Typeface; import android.graphics.drawable.AnimationDrawable; import android.media.AudioManager; import android.os.Bundle; import android.os.IBinder; import android.view.View; import android.view.WindowManager; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.RadioButton; import android.widget.ScrollView; import android.widget.TextView; import android.widget.Toast; import com.torch2424.battlequest.BGMusic; import com.torch2424.battlequest.BGMusic.MusicBinder; import com.torch2424.battlequest.FontCache; import com.torch2424.battlequest.SaveFileSelect; import com.torch2424.battlequest.Unbind; import com.torch2424.trustinheartdemo.R; public class Creator extends Activity { EditText nameText; RadioButton dude; RadioButton lady; RadioButton dark; RadioButton tan; RadioButton light; RadioButton tank; RadioButton warrior; RadioButton mage; RadioButton rouge; ImageView player; Button saveButton; BGMusic bgMusic; boolean musicBound; boolean musicPaused; //to fix double pause requests boolean noPause; //character models array int[] characters = new int[]{R.drawable.dmm, R.drawable.dmr, R.drawable.dmt, R.drawable.dmw, R.drawable.tmm, R.drawable.tmr, R.drawable.tmt, R.drawable.tmw, R.drawable.lmm, R.drawable.lmr, R.drawable.lmt, R.drawable.lmw, R.drawable.dfm, R.drawable.dfr, R.drawable.dft, R.drawable.dfw, R.drawable.tfm, R.drawable.tfr, R.drawable.tft, R.drawable.tfw, R.drawable.lfm, R.drawable.lfr, R.drawable.lft, R.drawable.lfw}; //ints to add together to get desired model int gender; int playerClass; int skin; //to get player animation working AnimationDrawable monsteranim; //to close app Intent playIntent; //our toast Toast toasty; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_creator); nameText = (EditText) findViewById(R.id.nameText); dude = (RadioButton) findViewById(R.id.dude); lady = (RadioButton) findViewById(R.id.lady); dark = (RadioButton) findViewById(R.id.dark); tan = (RadioButton) findViewById(R.id.tan); light = (RadioButton) findViewById(R.id.light); tank = (RadioButton) findViewById(R.id.tank); warrior = (RadioButton) findViewById(R.id.warrior); mage = (RadioButton) findViewById(R.id.mage); rouge = (RadioButton) findViewById(R.id.rouge); saveButton = (Button) findViewById(R.id.saveButton); player = (ImageView) findViewById(R.id.player); //initializing model ints gender = 0; playerClass = 0; skin = 0; //setting up music playMusic(); noPause = false; setFont(); //aquire wakelock getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); //set up toast toasty = Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT); } //connect to the service ServiceConnection musicConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { MusicBinder binder = (MusicBinder)service; //get service bgMusic = binder.getService(); musicBound = true; if(musicBound) { //already playing //bgMusic.playSong(R.raw.character); } } @Override public void onServiceDisconnected(ComponentName name) { musicBound = false; } }; public void playMusic() { //make sure only affect media playback not ringer setVolumeControlStream(AudioManager.STREAM_MUSIC); musicPaused = false; playIntent = new Intent(this, BGMusic.class); bindService(playIntent, musicConnection, Context.BIND_AUTO_CREATE); } public void setFont() { //get font Typeface tf = FontCache.get(getApplicationContext(), "font"); //get text views TextView nameQuestion = (TextView) findViewById(R.id.nameQuestion); TextView genderQuestion = (TextView) findViewById(R.id.genderQuestion); TextView skinQuestion = (TextView) findViewById(R.id.skinQuestion); TextView statQuestion = (TextView) findViewById(R.id.statQuestion); TextView finish = (TextView) findViewById(R.id.finish); //set the text nameQuestion.setTypeface(tf); genderQuestion.setTypeface(tf); skinQuestion.setTypeface(tf); statQuestion.setTypeface(tf); finish.setTypeface(tf); nameText.setTypeface(tf); dude.setTypeface(tf); lady.setTypeface(tf); dark.setTypeface(tf); tan.setTypeface(tf); light.setTypeface(tf); tank.setTypeface(tf); warrior.setTypeface(tf); rouge.setTypeface(tf); mage.setTypeface(tf); saveButton.setTypeface(tf); } //when save button is clicked, write things into methods public void save (View view) throws IOException { //checking to see if they filled out the whole form, and putting entered text into string String name = nameText.getText().toString(); //maybe add return or enter check here if (name.contentEquals("")) { //toasty.cancel(); toasty.setText("You have to finish everything!"); toasty.show(); } else if(dude.isChecked() == false && lady.isChecked() == false) { //toasty.cancel(); toasty.setText("You have to finish everything!"); toasty.show(); } else if (tank.isChecked() == false && warrior.isChecked() == false && mage.isChecked() == false && rouge.isChecked() == false) { //toasty.cancel(); toasty.setText("You have to finish everything!"); toasty.show(); } else { //getting internal sd File savePath = this.getFilesDir(); //set up our preferences //set up our preferences SharedPreferences prefs = this.getSharedPreferences("TrustInHeartPrefs", 0); Editor editor = prefs.edit(); editor.putString("SAVEFILE", name); editor.commit(); //start wrting text into there, print stream superior to file writer, adds new line after each entered text //write name PrintStream fileStream = new PrintStream(new File(savePath.getAbsolutePath() + "/Trust In Heart-" + name + ".txt")); fileStream.println(name); //writing gender if(dude.isChecked()) { fileStream.println("Boy"); } else if (lady.isChecked()) { fileStream.println("Girl"); } //writing class if (tank.isChecked()) { fileStream.println("Tank"); } else if(warrior.isChecked()) { fileStream.println("Warrior"); } else if (mage.isChecked()) { fileStream.println("Mage"); } else if (rouge.isChecked()) { fileStream.println("Rogue"); } /* * FORMAT * name * gender * class * money * items (all in one line) * purchased equip (all in one line) * head * torso * leg * shoes * weapon * HP * strength * intelligence * dexterity * level * exp to next level * skillpoints * unlockes levels(worlds) * character model * wins * losses * score */ //giving money (guaps) fileStream.println("20"); //giving potion fileStream.println("Water"); //purchased equip giving nothing fileStream.println(""); //giving noob equipment fileStream.println("Baby Hat"); fileStream.println("Baby Shirt"); fileStream.println("Baby Pants"); fileStream.println("Baby Shoes"); fileStream.println("Baby Stick"); //giving stats depending on class //hp if(tank.isChecked()) { fileStream.println("10"); } else { fileStream.println("5"); } //strength if(warrior.isChecked()) { fileStream.println("10"); } else { fileStream.println("5"); } //intelligence if (mage.isChecked()) { fileStream.println("10"); } else { fileStream.println("5"); } //dexterity if(rouge.isChecked()) { fileStream.println("10"); } else { fileStream.println("5"); } //print level fileStream.println("1"); //print exp to next level fileStream.println("10"); //print skillpoints fileStream.println("0"); //print the unlocked levels fileStream.println("1"); //print the character model fileStream.println(Integer.toString(gender + playerClass + skin)); //print the wins, losses, and final score (zero until you beat game) fileStream.println("0"); fileStream.println("0"); fileStream.println("0"); //print yes to blink the jounrnal fileStream.println("1"); fileStream.close(); //renaming file to .sav, so no one easily hacks their save file File broFile = new File(this.getFilesDir().getAbsolutePath() + "/Trust In Heart-" + prefs.getString("SAVEFILE", "character") + ".txt"); File finalFile = new File(this.getFilesDir().getAbsolutePath() + "/Trust In Heart-" + prefs.getString("SAVEFILE", "character") + ".sav"); broFile.renameTo(finalFile); //finish confirmation //toasty.cancel(); toasty.setText("Welcome to Trust In Heart!"); toasty.show(); //stopping music bgMusic.stopSong(); noPause = true; //restarting start screen Intent intent = new Intent(this, StartScreen.class); //clear top since we dont want any history intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); //finish here because you aren't coming back to here //hopfully not finishing helps with closing entire app on new character create //finish(); } } //function call that easily starts the playermodel animation public void animationStart() { //to get animation working monsteranim = (AnimationDrawable) player.getBackground(); monsteranim.start(); } //on pause and on resume to pause and play music @Override public void onPause() { super.onPause(); if(noPause == false) { if(bgMusic != null) { bgMusic.pauseSong(); musicPaused = true; } } } @Override public void onResume() { super.onResume(); if(musicPaused) { bgMusic.resumeSong(); } animationStart(); } //don't ovveride on back pressed, just go to last activity //for radio button clicks public void genderClick(View view) { //get the button id int id = view.getId(); //depending on the button clicked, change the values to appropriate sections if(id == R.id.dude) { gender = 0; player.setBackgroundResource(characters[gender + playerClass + skin]); } else { gender = 12; player.setBackgroundResource(characters[gender + playerClass + skin]); } animationStart(); } public void skinClick(View view) { //get the button id int id = view.getId(); //depending on the button clicked, change the values to appropriate sections if(id == R.id.dark) { skin = 0; player.setBackgroundResource(characters[gender + playerClass + skin]); } else if(id == R.id.tan) { skin = 4; player.setBackgroundResource(characters[gender + playerClass + skin]); } else { skin = 8; player.setBackgroundResource(characters[gender + playerClass + skin]); } animationStart(); } public void classClick(View view) { //get the button id int id = view.getId(); //depending on the button clicked, change the values to appropriate sections if(id == R.id.mage) { playerClass = 0; player.setBackgroundResource(characters[gender + playerClass + skin]); } else if(id == R.id.rouge) { playerClass = 1; player.setBackgroundResource(characters[gender + playerClass + skin]); } else if(id == R.id.tank) { playerClass = 2; player.setBackgroundResource(characters[gender + playerClass + skin]); } else { playerClass = 3; player.setBackgroundResource(characters[gender + playerClass + skin]); } animationStart(); } @Override public void onBackPressed() { super.onBackPressed(); Intent fight = new Intent(this, SaveFileSelect.class); //add this flag to remove all previous activities fight.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(fight); finish(); noPause = true; } //need to add this to avoid service connection leaks @Override public void onDestroy() { super.onDestroy(); unbindService(musicConnection); Unbind.unbindDrawables((ScrollView) findViewById(R.id.container)); System.gc(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.function.BiFunction; import java.util.function.LongPredicate; import javax.annotation.Nullable; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.*; import org.apache.cassandra.db.filter.*; import org.apache.cassandra.db.monitoring.ApproximateTime; import org.apache.cassandra.db.partitions.*; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.db.transform.RTBoundCloser; import org.apache.cassandra.db.transform.RTBoundValidator; import org.apache.cassandra.db.transform.RTBoundValidator.Stage; import org.apache.cassandra.db.transform.StoppingTransformation; import org.apache.cassandra.db.transform.Transformation; import org.apache.cassandra.exceptions.UnknownIndexException; import org.apache.cassandra.index.Index; import org.apache.cassandra.index.IndexNotAvailableException; import org.apache.cassandra.index.IndexRegistry; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.locator.Replica; import org.apache.cassandra.locator.ReplicaCollection; import org.apache.cassandra.metrics.TableMetrics; import org.apache.cassandra.net.MessageOut; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.schema.SchemaConstants; import org.apache.cassandra.schema.TableId; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.service.ClientWarn; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.HashingUtils; import static com.google.common.collect.Iterables.any; import static com.google.common.collect.Iterables.filter; /** * General interface for storage-engine read commands (common to both range and * single partition commands). * <p> * This contains all the informations needed to do a local read. */ public abstract class ReadCommand extends AbstractReadQuery { private static final int TEST_ITERATION_DELAY_MILLIS = Integer.parseInt(System.getProperty("cassandra.test.read_iteration_delay_ms", "0")); protected static final Logger logger = LoggerFactory.getLogger(ReadCommand.class); public static final IVersionedSerializer<ReadCommand> serializer = new Serializer(); private final Kind kind; private final boolean isDigestQuery; private final boolean acceptsTransient; // if a digest query, the version for which the digest is expected. Ignored if not a digest. private int digestVersion; // for data queries, coordinators may request information on the repaired data used in constructing the response private boolean trackRepairedStatus = false; // tracker for repaired data, initialized to singelton null object private static final RepairedDataInfo NULL_REPAIRED_DATA_INFO = new RepairedDataInfo() { void trackPartitionKey(DecoratedKey key){} void trackDeletion(DeletionTime deletion){} void trackRangeTombstoneMarker(RangeTombstoneMarker marker){} void trackRow(Row row){} boolean isConclusive(){ return true; } ByteBuffer getDigest(){ return ByteBufferUtil.EMPTY_BYTE_BUFFER; } }; private RepairedDataInfo repairedDataInfo = NULL_REPAIRED_DATA_INFO; int oldestUnrepairedTombstone = Integer.MAX_VALUE; @Nullable private final IndexMetadata index; protected static abstract class SelectionDeserializer { public abstract ReadCommand deserialize(DataInputPlus in, int version, boolean isDigest, int digestVersion, boolean acceptsTransient, TableMetadata metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) throws IOException; } protected enum Kind { SINGLE_PARTITION (SinglePartitionReadCommand.selectionDeserializer), PARTITION_RANGE (PartitionRangeReadCommand.selectionDeserializer); private final SelectionDeserializer selectionDeserializer; Kind(SelectionDeserializer selectionDeserializer) { this.selectionDeserializer = selectionDeserializer; } } protected ReadCommand(Kind kind, boolean isDigestQuery, int digestVersion, boolean acceptsTransient, TableMetadata metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) { super(metadata, nowInSec, columnFilter, rowFilter, limits); if (acceptsTransient && isDigestQuery) throw new IllegalArgumentException("Attempted to issue a digest response to transient replica"); this.kind = kind; this.isDigestQuery = isDigestQuery; this.digestVersion = digestVersion; this.acceptsTransient = acceptsTransient; this.index = index; } protected abstract void serializeSelection(DataOutputPlus out, int version) throws IOException; protected abstract long selectionSerializedSize(int version); public abstract boolean isLimitedToOnePartition(); /** * Creates a new <code>ReadCommand</code> instance with new limits. * * @param newLimits the new limits * @return a new <code>ReadCommand</code> with the updated limits */ public abstract ReadCommand withUpdatedLimit(DataLimits newLimits); /** * The configured timeout for this command. * * @return the configured timeout for this command. */ public abstract long getTimeout(); /** * Whether this query is a digest one or not. * * @return Whether this query is a digest query. */ public boolean isDigestQuery() { return isDigestQuery; } /** * If the query is a digest one, the requested digest version. * * @return the requested digest version if the query is a digest. Otherwise, this can return * anything. */ public int digestVersion() { return digestVersion; } /** * Sets the digest version, for when digest for that command is requested. * <p> * Note that we allow setting this independently of setting the command as a digest query as * this allows us to use the command as a carrier of the digest version even if we only call * setIsDigestQuery on some copy of it. * * @param digestVersion the version for the digest is this command is used for digest query.. * @return this read command. */ public ReadCommand setDigestVersion(int digestVersion) { this.digestVersion = digestVersion; return this; } /** * @return Whether this query expects only a transient data response, or a full response */ public boolean acceptsTransient() { return acceptsTransient; } /** * Activates repaired data tracking for this command. * * When active, a digest will be created from data read from repaired SSTables. The digests * from each replica can then be compared on the coordinator to detect any divergence in their * repaired datasets. In this context, an sstable is considered repaired if it is marked * repaired or has a pending repair session which has been committed. * In addition to the digest, a set of ids for any pending but as yet uncommitted repair sessions * is recorded and returned to the coordinator. This is to help reduce false positives caused * by compaction lagging which can leave sstables from committed sessions in the pending state * for a time. */ public void trackRepairedStatus() { trackRepairedStatus = true; } /** * Whether or not repaired status of any data read is being tracked or not * * @return Whether repaired status tracking is active for this command */ public boolean isTrackingRepairedStatus() { return trackRepairedStatus; } /** * Returns a digest of the repaired data read in the execution of this command. * * If either repaired status tracking is not active or the command has not yet been * executed, then this digest will be an empty buffer. * Otherwise, it will contain a digest* of the repaired data read, or empty buffer * if no repaired data was read. * @return digest of the repaired data read in the execution of the command */ public ByteBuffer getRepairedDataDigest() { return repairedDataInfo.getDigest(); } /** * Returns a boolean indicating whether any relevant sstables were skipped during the read * that produced the repaired data digest. * * If true, then no pending repair sessions or partition deletes have influenced the extent * of the repaired sstables that went into generating the digest. * This indicates whether or not the digest can reliably be used to infer consistency * issues between the repaired sets across replicas. * * If either repaired status tracking is not active or the command has not yet been * executed, then this will always return true. * * @return boolean to indicate confidence in the dwhether or not the digest of the repaired data can be * reliably be used to infer inconsistency issues between the repaired sets across * replicas. */ public boolean isRepairedDataDigestConclusive() { return repairedDataInfo.isConclusive(); } /** * Index (metadata) chosen for this query. Can be null. * * @return index (metadata) chosen for this query */ @Nullable public IndexMetadata indexMetadata() { return index; } /** * The clustering index filter this command to use for the provided key. * <p> * Note that that method should only be called on a key actually queried by this command * and in practice, this will almost always return the same filter, but for the sake of * paging, the filter on the first key of a range command might be slightly different. * * @param key a partition key queried by this command. * * @return the {@code ClusteringIndexFilter} to use for the partition of key {@code key}. */ public abstract ClusteringIndexFilter clusteringIndexFilter(DecoratedKey key); /** * Returns a copy of this command. * * @return a copy of this command. */ public abstract ReadCommand copy(); /** * Returns a copy of this command with acceptsTransient set to true. */ public ReadCommand copyAsTransientQuery(Replica replica) { Preconditions.checkArgument(replica.isTransient(), "Can't make a transient request on a full replica: " + replica); return copyAsTransientQuery(); } /** * Returns a copy of this command with acceptsTransient set to true. */ public ReadCommand copyAsTransientQuery(Iterable<Replica> replicas) { if (any(replicas, Replica::isFull)) throw new IllegalArgumentException("Can't make a transient request on full replicas: " + Iterables.toString(filter(replicas, Replica::isFull))); return copyAsTransientQuery(); } protected abstract ReadCommand copyAsTransientQuery(); /** * Returns a copy of this command with isDigestQuery set to true. */ public ReadCommand copyAsDigestQuery(Replica replica) { Preconditions.checkArgument(replica.isFull(), "Can't make a digest request on a transient replica " + replica); return copyAsDigestQuery(); } /** * Returns a copy of this command with isDigestQuery set to true. */ public ReadCommand copyAsDigestQuery(Iterable<Replica> replicas) { if (any(replicas, Replica::isTransient)) throw new IllegalArgumentException("Can't make a digest request on a transient replica " + Iterables.toString(filter(replicas, Replica::isTransient))); return copyAsDigestQuery(); } protected abstract ReadCommand copyAsDigestQuery(); protected abstract UnfilteredPartitionIterator queryStorage(ColumnFamilyStore cfs, ReadExecutionController executionController); protected int oldestUnrepairedTombstone() { return oldestUnrepairedTombstone; } @SuppressWarnings("resource") public ReadResponse createResponse(UnfilteredPartitionIterator iterator) { // validate that the sequence of RT markers is correct: open is followed by close, deletion times for both // ends equal, and there are no dangling RT bound in any partition. iterator = RTBoundValidator.validate(iterator, Stage.PROCESSED, true); return isDigestQuery() ? ReadResponse.createDigestResponse(iterator, this) : ReadResponse.createDataResponse(iterator, this); } long indexSerializedSize(int version) { return null != index ? IndexMetadata.serializer.serializedSize(index, version) : 0; } public Index getIndex(ColumnFamilyStore cfs) { return null != index ? cfs.indexManager.getIndex(index) : null; } static IndexMetadata findIndex(TableMetadata table, RowFilter rowFilter) { if (table.indexes.isEmpty() || rowFilter.isEmpty()) return null; ColumnFamilyStore cfs = Keyspace.openAndGetStore(table); Index index = cfs.indexManager.getBestIndexFor(rowFilter); return null != index ? index.getIndexMetadata() : null; } /** * If the index manager for the CFS determines that there's an applicable * 2i that can be used to execute this command, call its (optional) * validation method to check that nothing in this command's parameters * violates the implementation specific validation rules. */ public void maybeValidateIndex() { if (null != index) IndexRegistry.obtain(metadata()).getIndex(index).validate(this); } /** * Executes this command on the local host. * * @param executionController the execution controller spanning this command * * @return an iterator over the result of executing this command locally. */ @SuppressWarnings("resource") // The result iterator is closed upon exceptions (we know it's fine to potentially not close the intermediary // iterators created inside the try as long as we do close the original resultIterator), or by closing the result. public UnfilteredPartitionIterator executeLocally(ReadExecutionController executionController) { long startTimeNanos = System.nanoTime(); ColumnFamilyStore cfs = Keyspace.openAndGetStore(metadata()); Index index = getIndex(cfs); Index.Searcher searcher = null; if (index != null) { if (!cfs.indexManager.isIndexQueryable(index)) throw new IndexNotAvailableException(index); searcher = index.searcherFor(this); Tracing.trace("Executing read on {}.{} using index {}", cfs.metadata.keyspace, cfs.metadata.name, index.getIndexMetadata().name); } if (isTrackingRepairedStatus()) repairedDataInfo = new RepairedDataInfo(); UnfilteredPartitionIterator iterator = (null == searcher) ? queryStorage(cfs, executionController) : searcher.search(executionController); iterator = RTBoundValidator.validate(iterator, Stage.MERGED, false); try { iterator = withStateTracking(iterator); iterator = RTBoundValidator.validate(withoutPurgeableTombstones(iterator, cfs), Stage.PURGED, false); iterator = withMetricsRecording(iterator, cfs.metric, startTimeNanos); // If we've used a 2ndary index, we know the result already satisfy the primary expression used, so // no point in checking it again. RowFilter filter = (null == searcher) ? rowFilter() : index.getPostIndexQueryFilter(rowFilter()); /* * TODO: We'll currently do filtering by the rowFilter here because it's convenient. However, * we'll probably want to optimize by pushing it down the layer (like for dropped columns) as it * would be more efficient (the sooner we discard stuff we know we don't care, the less useless * processing we do on it). */ iterator = filter.filter(iterator, nowInSec()); // apply the limits/row counter; this transformation is stopping and would close the iterator as soon // as the count is observed; if that happens in the middle of an open RT, its end bound will not be included. iterator = limits().filter(iterator, nowInSec(), selectsFullPartition()); // because of the above, we need to append an aritifical end bound if the source iterator was stopped short by a counter. return RTBoundCloser.close(iterator); } catch (RuntimeException | Error e) { iterator.close(); throw e; } } protected abstract void recordLatency(TableMetrics metric, long latencyNanos); public ReadExecutionController executionController() { return ReadExecutionController.forCommand(this); } /** * Wraps the provided iterator so that metrics on what is scanned by the command are recorded. * This also log warning/trow TombstoneOverwhelmingException if appropriate. */ private UnfilteredPartitionIterator withMetricsRecording(UnfilteredPartitionIterator iter, final TableMetrics metric, final long startTimeNanos) { class MetricRecording extends Transformation<UnfilteredRowIterator> { private final int failureThreshold = DatabaseDescriptor.getTombstoneFailureThreshold(); private final int warningThreshold = DatabaseDescriptor.getTombstoneWarnThreshold(); private final boolean respectTombstoneThresholds = !SchemaConstants.isLocalSystemKeyspace(ReadCommand.this.metadata().keyspace); private final boolean enforceStrictLiveness = metadata().enforceStrictLiveness(); private int liveRows = 0; private int tombstones = 0; private DecoratedKey currentKey; @Override public UnfilteredRowIterator applyToPartition(UnfilteredRowIterator iter) { currentKey = iter.partitionKey(); return Transformation.apply(iter, this); } @Override public Row applyToStatic(Row row) { return applyToRow(row); } @Override public Row applyToRow(Row row) { boolean hasTombstones = false; for (Cell cell : row.cells()) { if (!cell.isLive(ReadCommand.this.nowInSec())) { countTombstone(row.clustering()); hasTombstones = true; // allows to avoid counting an extra tombstone if the whole row expired } } if (row.hasLiveData(ReadCommand.this.nowInSec(), enforceStrictLiveness)) ++liveRows; else if (!row.primaryKeyLivenessInfo().isLive(ReadCommand.this.nowInSec()) && row.hasDeletion(ReadCommand.this.nowInSec()) && !hasTombstones) { // We're counting primary key deletions only here. countTombstone(row.clustering()); } return row; } @Override public RangeTombstoneMarker applyToMarker(RangeTombstoneMarker marker) { countTombstone(marker.clustering()); return marker; } private void countTombstone(ClusteringPrefix clustering) { ++tombstones; if (tombstones > failureThreshold && respectTombstoneThresholds) { String query = ReadCommand.this.toCQLString(); Tracing.trace("Scanned over {} tombstones for query {}; query aborted (see tombstone_failure_threshold)", failureThreshold, query); metric.tombstoneFailures.inc(); throw new TombstoneOverwhelmingException(tombstones, query, ReadCommand.this.metadata(), currentKey, clustering); } } @Override public void onClose() { recordLatency(metric, System.nanoTime() - startTimeNanos); metric.tombstoneScannedHistogram.update(tombstones); metric.liveScannedHistogram.update(liveRows); boolean warnTombstones = tombstones > warningThreshold && respectTombstoneThresholds; if (warnTombstones) { String msg = String.format( "Read %d live rows and %d tombstone cells for query %1.512s (see tombstone_warn_threshold)", liveRows, tombstones, ReadCommand.this.toCQLString()); ClientWarn.instance.warn(msg); if (tombstones < failureThreshold) { metric.tombstoneWarnings.inc(); } logger.warn(msg); } Tracing.trace("Read {} live rows and {} tombstone cells{}", liveRows, tombstones, (warnTombstones ? " (see tombstone_warn_threshold)" : "")); } }; return Transformation.apply(iter, new MetricRecording()); } protected class CheckForAbort extends StoppingTransformation<UnfilteredRowIterator> { long lastChecked = 0; protected UnfilteredRowIterator applyToPartition(UnfilteredRowIterator partition) { if (maybeAbort()) { partition.close(); return null; } return Transformation.apply(partition, this); } protected Row applyToRow(Row row) { if (TEST_ITERATION_DELAY_MILLIS > 0) maybeDelayForTesting(); return maybeAbort() ? null : row; } private boolean maybeAbort() { /** * The value returned by ApproximateTime.currentTimeMillis() is updated only every * {@link ApproximateTime.CHECK_INTERVAL_MS}, by default 10 millis. Since MonitorableImpl * relies on ApproximateTime, we don't need to check unless the approximate time has elapsed. */ if (lastChecked == ApproximateTime.currentTimeMillis()) return false; lastChecked = ApproximateTime.currentTimeMillis(); if (isAborted()) { stop(); return true; } return false; } private void maybeDelayForTesting() { if (!metadata().keyspace.startsWith("system")) FBUtilities.sleepQuietly(TEST_ITERATION_DELAY_MILLIS); } } protected UnfilteredPartitionIterator withStateTracking(UnfilteredPartitionIterator iter) { return Transformation.apply(iter, new CheckForAbort()); } /** * Creates a message for this command. */ public abstract MessageOut<ReadCommand> createMessage(); protected abstract void appendCQLWhereClause(StringBuilder sb); // Skip purgeable tombstones. We do this because it's safe to do (post-merge of the memtable and sstable at least), it // can save us some bandwith, and avoid making us throw a TombstoneOverwhelmingException for purgeable tombstones (which // are to some extend an artefact of compaction lagging behind and hence counting them is somewhat unintuitive). protected UnfilteredPartitionIterator withoutPurgeableTombstones(UnfilteredPartitionIterator iterator, ColumnFamilyStore cfs) { class WithoutPurgeableTombstones extends PurgeFunction { public WithoutPurgeableTombstones() { super(nowInSec(), cfs.gcBefore(nowInSec()), oldestUnrepairedTombstone(), cfs.getCompactionStrategyManager().onlyPurgeRepairedTombstones(), iterator.metadata().enforceStrictLiveness()); } protected LongPredicate getPurgeEvaluator() { return time -> true; } } return Transformation.apply(iterator, new WithoutPurgeableTombstones()); } /** * Recreate the CQL string corresponding to this query. * <p> * Note that in general the returned string will not be exactly the original user string, first * because there isn't always a single syntax for a given query, but also because we don't have * all the information needed (we know the non-PK columns queried but not the PK ones as internally * we query them all). So this shouldn't be relied too strongly, but this should be good enough for * debugging purpose which is what this is for. */ public String toCQLString() { StringBuilder sb = new StringBuilder(); sb.append("SELECT ").append(columnFilter()); sb.append(" FROM ").append(metadata().keyspace).append('.').append(metadata().name); appendCQLWhereClause(sb); if (limits() != DataLimits.NONE) sb.append(' ').append(limits()); return sb.toString(); } // Monitorable interface public String name() { return toCQLString(); } private static UnfilteredPartitionIterator withRepairedDataInfo(final UnfilteredPartitionIterator iterator, final RepairedDataInfo repairedDataInfo) { class WithRepairedDataTracking extends Transformation<UnfilteredRowIterator> { protected UnfilteredRowIterator applyToPartition(UnfilteredRowIterator partition) { return withRepairedDataInfo(partition, repairedDataInfo); } } return Transformation.apply(iterator, new WithRepairedDataTracking()); } private static UnfilteredRowIterator withRepairedDataInfo(final UnfilteredRowIterator iterator, final RepairedDataInfo repairedDataInfo) { class WithTracking extends Transformation { protected DecoratedKey applyToPartitionKey(DecoratedKey key) { repairedDataInfo.trackPartitionKey(key); return key; } protected DeletionTime applyToDeletion(DeletionTime deletionTime) { repairedDataInfo.trackDeletion(deletionTime); return deletionTime; } protected RangeTombstoneMarker applyToMarker(RangeTombstoneMarker marker) { repairedDataInfo.trackRangeTombstoneMarker(marker); return marker; } protected Row applyToStatic(Row row) { repairedDataInfo.trackRow(row); return row; } protected Row applyToRow(Row row) { repairedDataInfo.trackRow(row); return row; } } return Transformation.apply(iterator, new WithTracking()); } private static class RepairedDataInfo { private Hasher hasher; private boolean isConclusive = true; ByteBuffer getDigest() { return hasher == null ? ByteBufferUtil.EMPTY_BYTE_BUFFER : ByteBuffer.wrap(getHasher().hash().asBytes()); } boolean isConclusive() { return isConclusive; } void markInconclusive() { isConclusive = false; } void trackPartitionKey(DecoratedKey key) { HashingUtils.updateBytes(getHasher(), key.getKey().duplicate()); } void trackDeletion(DeletionTime deletion) { deletion.digest(getHasher()); } void trackRangeTombstoneMarker(RangeTombstoneMarker marker) { marker.digest(getHasher()); } void trackRow(Row row) { row.digest(getHasher()); } private Hasher getHasher() { if (hasher == null) hasher = Hashing.crc32c().newHasher(); return hasher; } } @SuppressWarnings("resource") // resultant iterators are closed by their callers InputCollector<UnfilteredRowIterator> iteratorsForPartition(ColumnFamilyStore.ViewFragment view) { BiFunction<List<UnfilteredRowIterator>, RepairedDataInfo, UnfilteredRowIterator> merge = (unfilteredRowIterators, repairedDataInfo) -> withRepairedDataInfo(UnfilteredRowIterators.merge(unfilteredRowIterators), repairedDataInfo); return new InputCollector<>(view, repairedDataInfo, merge, isTrackingRepairedStatus()); } @SuppressWarnings("resource") // resultant iterators are closed by their callers InputCollector<UnfilteredPartitionIterator> iteratorsForRange(ColumnFamilyStore.ViewFragment view) { BiFunction<List<UnfilteredPartitionIterator>, RepairedDataInfo, UnfilteredPartitionIterator> merge = (unfilteredPartitionIterators, repairedDataInfo) -> withRepairedDataInfo(UnfilteredPartitionIterators.merge(unfilteredPartitionIterators, UnfilteredPartitionIterators.MergeListener.NOOP), repairedDataInfo); return new InputCollector<>(view, repairedDataInfo, merge, isTrackingRepairedStatus()); } /** * Handles the collation of unfiltered row or partition iterators that comprise the * input for a query. Separates them according to repaired status and of repaired * status is being tracked, handles the merge and wrapping in a digest generator of * the repaired iterators. * * Intentionally not AutoCloseable so we don't mistakenly use this in ARM blocks * as this prematurely closes the underlying iterators */ static class InputCollector<T extends AutoCloseable> { final RepairedDataInfo repairedDataInfo; private final boolean isTrackingRepairedStatus; Set<SSTableReader> repairedSSTables; BiFunction<List<T>, RepairedDataInfo, T> repairedMerger; List<T> repairedIters; List<T> unrepairedIters; InputCollector(ColumnFamilyStore.ViewFragment view, RepairedDataInfo repairedDataInfo, BiFunction<List<T>, RepairedDataInfo, T> repairedMerger, boolean isTrackingRepairedStatus) { this.repairedDataInfo = repairedDataInfo; this.isTrackingRepairedStatus = isTrackingRepairedStatus; if (isTrackingRepairedStatus) { for (SSTableReader sstable : view.sstables) { if (considerRepairedForTracking(sstable)) { if (repairedSSTables == null) repairedSSTables = Sets.newHashSetWithExpectedSize(view.sstables.size()); repairedSSTables.add(sstable); } } } if (repairedSSTables == null) { repairedIters = Collections.emptyList(); unrepairedIters = new ArrayList<>(view.sstables.size()); } else { repairedIters = new ArrayList<>(repairedSSTables.size()); // when we're done collating, we'll merge the repaired iters and add the // result to the unrepaired list, so size that list accordingly unrepairedIters = new ArrayList<>((view.sstables.size() - repairedSSTables.size()) + Iterables.size(view.memtables) + 1); } this.repairedMerger = repairedMerger; } void addMemtableIterator(T iter) { unrepairedIters.add(iter); } void addSSTableIterator(SSTableReader sstable, T iter) { if (repairedSSTables != null && repairedSSTables.contains(sstable)) repairedIters.add(iter); else unrepairedIters.add(iter); } List<T> finalizeIterators() { if (repairedIters.isEmpty()) return unrepairedIters; // merge the repaired data before returning, wrapping in a digest generator unrepairedIters.add(repairedMerger.apply(repairedIters, repairedDataInfo)); return unrepairedIters; } boolean isEmpty() { return repairedIters.isEmpty() && unrepairedIters.isEmpty(); } // For tracking purposes we consider data repaired if the sstable is either: // * marked repaired // * marked pending, but the local session has been committed. This reduces the window // whereby the tracking is affected by compaction backlog causing repaired sstables to // remain in the pending state // If an sstable is involved in a pending repair which is not yet committed, we mark the // repaired data info inconclusive, as the same data on other replicas may be in a // slightly different state. private boolean considerRepairedForTracking(SSTableReader sstable) { if (!isTrackingRepairedStatus) return false; UUID pendingRepair = sstable.getPendingRepair(); if (pendingRepair != ActiveRepairService.NO_PENDING_REPAIR) { if (ActiveRepairService.instance.consistent.local.isSessionFinalized(pendingRepair)) return true; // In the edge case where compaction is backed up long enough for the session to // timeout and be purged by LocalSessions::cleanup, consider the sstable unrepaired // as it will be marked unrepaired when compaction catches up if (!ActiveRepairService.instance.consistent.local.sessionExists(pendingRepair)) return false; repairedDataInfo.markInconclusive(); } return sstable.isRepaired(); } void markInconclusive() { repairedDataInfo.markInconclusive(); } public void close() throws Exception { FBUtilities.closeAll(unrepairedIters); FBUtilities.closeAll(repairedIters); } } private static class Serializer implements IVersionedSerializer<ReadCommand> { private static int digestFlag(boolean isDigest) { return isDigest ? 0x01 : 0; } private static boolean isDigest(int flags) { return (flags & 0x01) != 0; } private static boolean acceptsTransient(int flags) { return (flags & 0x08) != 0; } private static int acceptsTransientFlag(boolean acceptsTransient) { return acceptsTransient ? 0x08 : 0; } // We don't set this flag anymore, but still look if we receive a // command with it set in case someone is using thrift a mixed 3.0/4.0+ // cluster (which is unsupported). This is also a reminder for not // re-using this flag until we drop 3.0/3.X compatibility (since it's // used by these release for thrift and would thus confuse things) private static boolean isForThrift(int flags) { return (flags & 0x02) != 0; } private static int indexFlag(boolean hasIndex) { return hasIndex ? 0x04 : 0; } private static boolean hasIndex(int flags) { return (flags & 0x04) != 0; } public void serialize(ReadCommand command, DataOutputPlus out, int version) throws IOException { out.writeByte(command.kind.ordinal()); out.writeByte( digestFlag(command.isDigestQuery()) | indexFlag(null != command.indexMetadata()) | acceptsTransientFlag(command.acceptsTransient()) ); if (command.isDigestQuery()) out.writeUnsignedVInt(command.digestVersion()); command.metadata().id.serialize(out); out.writeInt(command.nowInSec()); ColumnFilter.serializer.serialize(command.columnFilter(), out, version); RowFilter.serializer.serialize(command.rowFilter(), out, version); DataLimits.serializer.serialize(command.limits(), out, version, command.metadata().comparator); if (null != command.index) IndexMetadata.serializer.serialize(command.index, out, version); command.serializeSelection(out, version); } public ReadCommand deserialize(DataInputPlus in, int version) throws IOException { Kind kind = Kind.values()[in.readByte()]; int flags = in.readByte(); boolean isDigest = isDigest(flags); boolean acceptsTransient = acceptsTransient(flags); // Shouldn't happen or it's a user error (see comment above) but // better complain loudly than doing the wrong thing. if (isForThrift(flags)) throw new IllegalStateException("Received a command with the thrift flag set. " + "This means thrift is in use in a mixed 3.0/3.X and 4.0+ cluster, " + "which is unsupported. Make sure to stop using thrift before " + "upgrading to 4.0"); boolean hasIndex = hasIndex(flags); int digestVersion = isDigest ? (int)in.readUnsignedVInt() : 0; TableMetadata metadata = Schema.instance.getExistingTableMetadata(TableId.deserialize(in)); int nowInSec = in.readInt(); ColumnFilter columnFilter = ColumnFilter.serializer.deserialize(in, version, metadata); RowFilter rowFilter = RowFilter.serializer.deserialize(in, version, metadata); DataLimits limits = DataLimits.serializer.deserialize(in, version, metadata.comparator); IndexMetadata index = hasIndex ? deserializeIndexMetadata(in, version, metadata) : null; return kind.selectionDeserializer.deserialize(in, version, isDigest, digestVersion, acceptsTransient, metadata, nowInSec, columnFilter, rowFilter, limits, index); } private IndexMetadata deserializeIndexMetadata(DataInputPlus in, int version, TableMetadata metadata) throws IOException { try { return IndexMetadata.serializer.deserialize(in, version, metadata); } catch (UnknownIndexException e) { logger.info("Couldn't find a defined index on {}.{} with the id {}. " + "If an index was just created, this is likely due to the schema not " + "being fully propagated. Local read will proceed without using the " + "index. Please wait for schema agreement after index creation.", metadata.keyspace, metadata.name, e.indexId); return null; } } public long serializedSize(ReadCommand command, int version) { return 2 // kind + flags + (command.isDigestQuery() ? TypeSizes.sizeofUnsignedVInt(command.digestVersion()) : 0) + command.metadata().id.serializedSize() + TypeSizes.sizeof(command.nowInSec()) + ColumnFilter.serializer.serializedSize(command.columnFilter(), version) + RowFilter.serializer.serializedSize(command.rowFilter(), version) + DataLimits.serializer.serializedSize(command.limits(), version, command.metadata().comparator) + command.selectionSerializedSize(version) + command.indexSerializedSize(version); } } }
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.tooling.archetype; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.StringWriter; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import io.fabric8.utils.Files; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; public class ArchetypeUtils { public static Logger LOG = LoggerFactory.getLogger(ArchetypeUtils.class); public static final String[] sourceCodeDirNames = new String[] { "java", "groovy", "kotlin", "scala" }; private static final Set<String> excludeExtensions = new HashSet<String>(Arrays.asList("iml", "iws", "ipr")); private static final Set<String> sourceCodeDirPaths = new HashSet<String>(); private DocumentBuilder documentBuilder; private TransformerFactory transformerFactory; static { for (String scdn : sourceCodeDirNames) { sourceCodeDirPaths.add(Files.normalizePath("src/main/" + scdn)); sourceCodeDirPaths.add(Files.normalizePath("src/test/" + scdn)); } sourceCodeDirPaths.addAll(Arrays.asList("target", "build", "pom.xml", "archetype-metadata.xml")); } public ArchetypeUtils() { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); try { this.documentBuilder = dbf.newDocumentBuilder(); this.transformerFactory = TransformerFactory.newInstance(); } catch (ParserConfigurationException e) { throw new RuntimeException(e.getMessage(), e); } } /** * Returns relative path (without leading '/') if <code>nested</code> is inside <code>base</code>. * Returns <code>nested</code> (as absolute path) otherwise. * * @param base * @param nested * @return * @throws IOException */ public String relativePath(File base, File nested) throws IOException { String basePath = base.getCanonicalPath(); String nestedPath = nested.getCanonicalPath(); if (nestedPath.equals(basePath)) { return ""; } else if (nestedPath.startsWith(basePath)) { return nestedPath.substring(basePath.length() + 1); } else { return nestedPath; } } /** * Recursively looks for first nested directory which contains at least one source file * * @param directory * @return */ public File findRootPackage(File directory) throws IOException { if (!directory.isDirectory()) { throw new IllegalArgumentException("Can't find package inside file. Argument should be valid directory."); } File[] children = directory.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return isValidSourceFileOrDir(pathname); } }); if (children != null) { List<File> results = new LinkedList<File>(); for (File it : children) { if (!it.isDirectory()) { // we have file - let's assume we have main project's package results.add(directory); break; } else { File pkg = findRootPackage(it); if (pkg != null) { results.add(pkg); } } } if (results.size() == 1) { return results.get(0); } else { return directory; } } return null; } /** * Returns true if this file is a valid source file; so * excluding things like .svn directories and whatnot */ public boolean isValidSourceFileOrDir(File file) { String name = file.getName(); return !isExcludedDotFile(name) && !excludeExtensions.contains(Files.getExtension(file.getName())); } /** * Lets allow files like .maven-docker-include or .gitignore but lets ignore other files starting with "." */ protected boolean isExcludedDotFile(String name) { return name.startsWith(".") && !name.startsWith(".maven") && !name.equals(".gitignore"); } /** * Is the file a valid file to copy (excludes files starting with a dot, build output * or java/groovy/kotlin/scala source code */ public boolean isValidFileToCopy(File projectDir, File src, Set<String> ignoreFileSet) throws IOException { if (isValidSourceFileOrDir(src)) { if (src.equals(projectDir)) { return true; } String relative = relativePath(projectDir, src); return !sourceCodeDirPaths.contains(relative) && !ignoreFileSet.contains(relative); } return false; } /** * Checks if the passed POM file describes project with packaging other than <code>pom</code>. * * @param pom * @return */ public boolean isValidProjectPom(File pom) { Document doc = null; try { doc = parseXml(new InputSource(new FileReader(pom))); } catch (FileNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } Element root = doc.getDocumentElement(); String packaging = firstElementText(root, "packaging", ""); return packaging == null || !packaging.equals("pom"); } public Document parseXml(InputSource inputSource) { try { return documentBuilder.parse(inputSource); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } /** * Serializes the Document to a File. */ public void writeXmlDocument(Document document, File file) throws IOException { try { Transformer tr = transformerFactory.newTransformer(); tr.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); tr.setOutputProperty(OutputKeys.INDENT, "yes"); FileOutputStream fileOutputStream = new FileOutputStream(file); tr.transform(new DOMSource(document), new StreamResult(fileOutputStream)); fileOutputStream.close(); } catch (Exception e) { throw new IOException(e.getMessage(), e); } } /** * Serializes the Document to a String. */ public String writeXmlDocumentAsString(Document document) throws IOException { try { Transformer tr = transformerFactory.newTransformer(); tr.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); tr.setOutputProperty(OutputKeys.INDENT, "yes"); StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); TransformerFactory tf = TransformerFactory.newInstance(); tr.transform(new DOMSource(document), result); return writer.toString(); } catch (Exception e) { throw new IOException(e.getMessage(), e); } } public String firstElementText(Element root, String elementName, String defaultValue) { // prefer direct children first String answer = null; NodeList children = root.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { if (elementName.equals(children.item(cn).getNodeName())) { answer = children.item(cn).getTextContent(); break; } } if (answer == null) { // fallback to getElementsByTagName children = root.getElementsByTagName(elementName); if (children.getLength() == 0) { answer = defaultValue; } else { Node first = children.item(0); answer = first.getTextContent(); } } return answer == null ? defaultValue : answer; } public static void writeGitIgnore(File gitIgnore) { writeFile(gitIgnore, "src\n", false); } public static void writeFile(File file, String data, boolean append) { try { FileOutputStream fos = new FileOutputStream(file, append); fos.write(data.getBytes()); fos.close(); } catch (Exception e) { // ignore } } public static boolean isEmpty(String s) { if (s == null) { return true; } s = s.trim(); return s.length() == 0; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.models; import java.util.Map; /** * Specifies the options associated with query methods (enumeration operations) * in the Azure Cosmos DB database service. */ public class CosmosQueryRequestOptions { private String sessionToken; private String partitionKeyRangeId; private Boolean scanInQueryEnabled; private Boolean emitVerboseTracesInQuery; private int maxDegreeOfParallelism; private int maxBufferedItemCount; private int responseContinuationTokenLimitInKb; private Integer maxItemCount; private String requestContinuation; private PartitionKey partitionkey; private boolean queryMetricsEnabled; private Map<String, Object> properties; private boolean emptyPagesAllowed; /** * Instantiates a new query request options. */ public CosmosQueryRequestOptions() { this.queryMetricsEnabled = true; } /** * Instantiates a new query request options. * * @param options the options */ CosmosQueryRequestOptions(CosmosQueryRequestOptions options) { this.sessionToken = options.sessionToken; this.partitionKeyRangeId = options.partitionKeyRangeId; this.scanInQueryEnabled = options.scanInQueryEnabled; this.emitVerboseTracesInQuery = options.emitVerboseTracesInQuery; this.maxDegreeOfParallelism = options.maxDegreeOfParallelism; this.maxBufferedItemCount = options.maxBufferedItemCount; this.responseContinuationTokenLimitInKb = options.responseContinuationTokenLimitInKb; this.maxItemCount = options.maxItemCount; this.requestContinuation = options.requestContinuation; this.partitionkey = options.partitionkey; this.queryMetricsEnabled = options.queryMetricsEnabled; this.emptyPagesAllowed = options.emptyPagesAllowed; } /** * Gets the partitionKeyRangeId. * * @return the partitionKeyRangeId. */ String getPartitionKeyRangeIdInternal() { return this.partitionKeyRangeId; } /** * Sets the partitionKeyRangeId. * * @param partitionKeyRangeId the partitionKeyRangeId. * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setPartitionKeyRangeIdInternal(String partitionKeyRangeId) { this.partitionKeyRangeId = partitionKeyRangeId; return this; } /** * Gets the session token for use with session consistency. * * @return the session token. */ public String getSessionToken() { return this.sessionToken; } /** * Sets the session token for use with session consistency. * * @param sessionToken the session token. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setSessionToken(String sessionToken) { this.sessionToken = sessionToken; return this; } /** * Gets the option to allow scan on the queries which couldn't be served as * indexing was opted out on the requested paths. * * @return the option of enable scan in query. */ public Boolean isScanInQueryEnabled() { return this.scanInQueryEnabled; } /** * Sets the option to allow scan on the queries which couldn't be served as * indexing was opted out on the requested paths. * * @param scanInQueryEnabled the option of enable scan in query. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setScanInQueryEnabled(Boolean scanInQueryEnabled) { this.scanInQueryEnabled = scanInQueryEnabled; return this; } /** * Gets the option to allow queries to emit out verbose traces for * investigation. * * @return the emit verbose traces in query. */ Boolean isEmitVerboseTracesInQuery() { return this.emitVerboseTracesInQuery; } /** * Sets the option to allow queries to emit out verbose traces for * investigation. * * @param emitVerboseTracesInQuery the emit verbose traces in query. * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setEmitVerboseTracesInQuery(Boolean emitVerboseTracesInQuery) { this.emitVerboseTracesInQuery = emitVerboseTracesInQuery; return this; } /** * Gets the number of concurrent operations run client side during parallel * query execution. * * @return number of concurrent operations run client side during parallel query * execution. */ public int getMaxDegreeOfParallelism() { return maxDegreeOfParallelism; } /** * Sets the number of concurrent operations run client side during parallel * query execution. * * @param maxDegreeOfParallelism number of concurrent operations. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setMaxDegreeOfParallelism(int maxDegreeOfParallelism) { this.maxDegreeOfParallelism = maxDegreeOfParallelism; return this; } /** * Gets the maximum number of items that can be buffered client side during * parallel query execution. * * @return maximum number of items that can be buffered client side during * parallel query execution. */ public int getMaxBufferedItemCount() { return maxBufferedItemCount; } /** * Sets the maximum number of items that can be buffered client side during * parallel query execution. * * @param maxBufferedItemCount maximum number of items. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setMaxBufferedItemCount(int maxBufferedItemCount) { this.maxBufferedItemCount = maxBufferedItemCount; return this; } /** * Sets the ResponseContinuationTokenLimitInKb request option for item query * requests in the Azure Cosmos DB service. * <p> * ResponseContinuationTokenLimitInKb is used to limit the length of * continuation token in the query response. Valid values are &gt;= 1. * <p> * The continuation token contains both required and optional fields. The * required fields are necessary for resuming the execution from where it was * stooped. The optional fields may contain serialized index lookup work that * was done but not yet utilized. This avoids redoing the work again in * subsequent continuations and hence improve the query performance. Setting the * maximum continuation size to 1KB, the Azure Cosmos DB service will only * serialize required fields. Starting from 2KB, the Azure Cosmos DB service * would serialize as much as it could fit till it reaches the maximum specified * size. * * @param limitInKb continuation token size limit. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setResponseContinuationTokenLimitInKb(int limitInKb) { this.responseContinuationTokenLimitInKb = limitInKb; return this; } /** * Gets the ResponseContinuationTokenLimitInKb request option for item query * requests in the Azure Cosmos DB service. If not already set returns 0. * <p> * ResponseContinuationTokenLimitInKb is used to limit the length of * continuation token in the query response. Valid values are &gt;= 1. * * @return return set ResponseContinuationTokenLimitInKb, or 0 if not set */ public int getResponseContinuationTokenLimitInKb() { return responseContinuationTokenLimitInKb; } /** * Gets the maximum number of items to be returned in the enumeration * operation. * * @return the max number of items. */ Integer getMaxItemCount() { return this.maxItemCount; } /** * Sets the maximum number of items to be returned in the enumeration * operation. * * @param maxItemCount the max number of items. * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setMaxItemCount(Integer maxItemCount) { this.maxItemCount = maxItemCount; return this; } /** * Gets the request continuation token. * * @return the request continuation. */ String getRequestContinuation() { return this.requestContinuation; } /** * Sets the request continuation token. * * @param requestContinuation the request continuation. * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setRequestContinuation(String requestContinuation) { this.requestContinuation = requestContinuation; return this; } /** * Gets the partition key used to identify the current request's target * partition. * * @return the partition key. */ public PartitionKey getPartitionKey() { return this.partitionkey; } /** * Sets the partition key used to identify the current request's target * partition. * * @param partitionkey the partition key value. * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setPartitionKey(PartitionKey partitionkey) { this.partitionkey = partitionkey; return this; } /** * Gets the option to enable populate query metrics. By default query metrics are enabled. * * @return whether to enable populate query metrics (default: true) */ public boolean isQueryMetricsEnabled() { return queryMetricsEnabled; } /** * Sets the option to enable/disable getting metrics relating to query execution on item query requests. * By default query metrics are enabled. * * @param queryMetricsEnabled whether to enable or disable query metrics * @return the CosmosQueryRequestOptions. */ public CosmosQueryRequestOptions setQueryMetricsEnabled(boolean queryMetricsEnabled) { this.queryMetricsEnabled = queryMetricsEnabled; return this; } /** * Gets the properties * * @return Map of request options properties */ Map<String, Object> getProperties() { return properties; } /** * Sets the properties used to identify the request token. * * @param properties the properties. * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setProperties(Map<String, Object> properties) { this.properties = properties; return this; } /** * Gets the option to allow empty result pages in feed response. * * @return whether to enable allow empty pages or not */ boolean isEmptyPagesAllowed() { return emptyPagesAllowed; } /** * Sets the option to allow empty result pages in feed response. Defaults to false * @param emptyPagesAllowed whether to allow empty pages in feed response * @return the CosmosQueryRequestOptions. */ CosmosQueryRequestOptions setEmptyPagesAllowed(boolean emptyPagesAllowed) { this.emptyPagesAllowed = emptyPagesAllowed; return this; } }
/* * #%L * BroadleafCommerce Common Libraries * %% * Copyright (C) 2009 - 2013 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.common.config.service; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Element; import org.apache.commons.lang3.StringUtils; import org.broadleafcommerce.common.config.RuntimeEnvironmentPropertiesManager; import org.broadleafcommerce.common.config.dao.SystemPropertiesDao; import org.broadleafcommerce.common.config.domain.SystemProperty; import org.broadleafcommerce.common.config.service.type.SystemPropertyFieldType; import org.broadleafcommerce.common.extensibility.jpa.SiteDiscriminator; import org.broadleafcommerce.common.extension.ExtensionResultHolder; import org.broadleafcommerce.common.web.BroadleafRequestContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import javax.annotation.Resource; /** * Service that retrieves property settings from the database. If not set in * the DB then returns the value from property files. * * @author bpolster * */ @Service("blSystemPropertiesService") public class SystemPropertiesServiceImpl implements SystemPropertiesService{ protected Cache systemPropertyCache; @Resource(name="blSystemPropertiesDao") protected SystemPropertiesDao systemPropertiesDao; @Resource(name = "blSystemPropertyServiceExtensionManager") protected SystemPropertyServiceExtensionManager extensionManager; @Value("${system.property.cache.timeout}") protected int systemPropertyCacheTimeout; @Autowired protected RuntimeEnvironmentPropertiesManager propMgr; @Override public String resolveSystemProperty(String name, String defaultValue) { String result = resolveSystemProperty(name); if (StringUtils.isBlank(result)) { return defaultValue; } return result; } @Override public String resolveSystemProperty(String name) { if (extensionManager != null) { ExtensionResultHolder holder = new ExtensionResultHolder(); extensionManager.getProxy().resolveProperty(name, holder); if (holder.getResult() != null) { return holder.getResult().toString(); } } String result; // We don't want to utilize this cache for sandboxes if (BroadleafRequestContext.getBroadleafRequestContext().getSandBox() == null) { result = getPropertyFromCache(name); } else { result = null; } if (result != null) { return result; } SystemProperty property = systemPropertiesDao.readSystemPropertyByName(name); if (property == null || StringUtils.isEmpty(property.getValue())) { result = propMgr.getProperty(name); } else { if ("_blank_".equals(property.getValue())) { result = ""; } else { result = property.getValue(); } } if (result != null) { addPropertyToCache(name, result); } return result; } protected void addPropertyToCache(String propertyName, String propertyValue) { String key = buildKey(propertyName); if (systemPropertyCacheTimeout < 0) { getSystemPropertyCache().put(new Element(key, propertyValue)); } else { getSystemPropertyCache().put(new Element(key, propertyValue, systemPropertyCacheTimeout, systemPropertyCacheTimeout)); } } protected String getPropertyFromCache(String propertyName) { String key = buildKey(propertyName); Element cacheElement = getSystemPropertyCache().get(key); if (cacheElement != null && cacheElement.getObjectValue() != null) { return (String) cacheElement.getObjectValue(); } return null; } /** * Properties can vary by site. If a site is found on the request, use the site id as part of the * cache-key. * * @param propertyName * @return */ protected String buildKey(String propertyName) { String key = propertyName; BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext(); if (brc != null) { if (brc.getSite() != null) { key = brc.getSite().getId() + "-" + key; } } return key; } /** * Properties can vary by site. If a site is found on the request, use the site id as part of the * cache-key. * * @param systemProperty * @return */ protected String buildKey(SystemProperty systemProperty) { String key = systemProperty.getName(); if (systemProperty instanceof SiteDiscriminator && ((SiteDiscriminator) systemProperty).getSiteDiscriminator() != null) { key = ((SiteDiscriminator) systemProperty).getSiteDiscriminator() + "-" + key; } return key; } protected Cache getSystemPropertyCache() { if (systemPropertyCache == null) { systemPropertyCache = CacheManager.getInstance().getCache("blSystemPropertyElements"); } return systemPropertyCache; } @Override public SystemProperty findById(Long id) { return systemPropertiesDao.readById(id); } @Override public void removeFromCache(SystemProperty systemProperty) { //Could have come from a cache invalidation service that does not //include the site on the thread, so we should build the key //including the site (if applicable) from the systemProperty itself String key = buildKey(systemProperty); getSystemPropertyCache().remove(key); systemPropertiesDao.removeFromCache(systemProperty); } @Override public int resolveIntSystemProperty(String name) { String systemProperty = resolveSystemProperty(name, "0"); return Integer.valueOf(systemProperty).intValue(); } @Override public int resolveIntSystemProperty(String name, int defaultValue) { String systemProperty = resolveSystemProperty(name, Integer.toString(defaultValue)); return Integer.valueOf(systemProperty).intValue(); } @Override public boolean resolveBooleanSystemProperty(String name) { String systemProperty = resolveSystemProperty(name, "false"); return Boolean.valueOf(systemProperty).booleanValue(); } @Override public boolean resolveBooleanSystemProperty(String name, boolean defaultValue) { String systemProperty = resolveSystemProperty(name, Boolean.toString(defaultValue)); return Boolean.valueOf(systemProperty).booleanValue(); } @Override public long resolveLongSystemProperty(String name) { String systemProperty = resolveSystemProperty(name, "0"); return Long.valueOf(systemProperty).longValue(); } @Override public long resolveLongSystemProperty(String name, long defaultValue) { String systemProperty = resolveSystemProperty(name, Long.toString(defaultValue)); return Long.valueOf(systemProperty).longValue(); } @Override public boolean isValueValidForType(String value, SystemPropertyFieldType type) { if (type.equals(SystemPropertyFieldType.BOOLEAN_TYPE)) { value = value.toUpperCase(); if (value != null && (value.equals("TRUE") || value.equals("FALSE"))) { return true; } } else if (type.equals(SystemPropertyFieldType.INT_TYPE)) { try { Integer.parseInt(value); return true; } catch (Exception e) { // Do nothing - we will fail on validation } } else if (type.equals(SystemPropertyFieldType.LONG_TYPE)) { try { Long.parseLong(value); return true; } catch (Exception e) { // Do nothing - we will fail on validation } } else if (type.equals(SystemPropertyFieldType.DOUBLE_TYPE)) { try { Double.parseDouble(value); return true; } catch (Exception e) { // Do nothing - we will fail on validation } } else if (type.equals(SystemPropertyFieldType.STRING_TYPE)) { return true; } return false; } }
package com.allo.nyt.ui.search; import android.graphics.drawable.Drawable; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ProgressBar; import android.widget.TextView; import com.allo.nyt.R; import com.allo.nyt.model.Article; import com.allo.nyt.model.Multimedia; import com.allo.nyt.ui.utils.DynamicHeightImageView; import com.allo.nyt.utils.Utils; import com.bumptech.glide.Glide; import com.bumptech.glide.load.resource.drawable.GlideDrawable; import com.bumptech.glide.request.animation.GlideAnimation; import com.bumptech.glide.request.target.SimpleTarget; import java.util.ArrayList; import butterknife.BindView; import butterknife.ButterKnife; /** * SearchAdapter * <p/> * Created by ALLO on 24/7/16. */ public class SearchAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private static final String TAG_LOG = SearchAdapter.class.getCanonicalName(); public static int ARTICLE_THUMBNAIL = 1; public static int ARTICLE_HEADLINE = 2; public interface OnArticlesAdapterListener { void didSelectArticle(Article article); } private OnArticlesAdapterListener mListener; private ArrayList<Article> mArticles; public SearchAdapter(ArrayList<Article> articles, OnArticlesAdapterListener listener) { this.mArticles = articles; this.mListener = listener; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { if (viewType == ARTICLE_THUMBNAIL) { View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_articles_multimedia, parent, false); return new ArticleMultimediaViewHolder(view); } else if (viewType == ARTICLE_HEADLINE) { View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_articles_headline, parent, false); return new ArticleHeadlineViewHolder(view); } return null; } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { if (holder != null) { if (holder instanceof ArticleMultimediaViewHolder) { ((ArticleMultimediaViewHolder) holder).configureViewWithArticle(mArticles.get(position)); } else if (holder instanceof ArticleHeadlineViewHolder) { ((ArticleHeadlineViewHolder) holder).configureViewWithArticle(mArticles.get(position)); } } } @Override public int getItemCount() { return this.mArticles != null ? this.mArticles.size() : 0; } @Override public int getItemViewType(int position) { Article article = mArticles.get(position); if (article.hasImages()) { return ARTICLE_THUMBNAIL; } else { return ARTICLE_HEADLINE; } } public void notifyDataSetChanged(ArrayList<Article> articles) { this.mArticles = new ArrayList<>(articles); notifyDataSetChanged(); } class ArticleMultimediaViewHolder extends RecyclerView.ViewHolder { private View view; private Article article; @BindView(R.id.iv_photo) DynamicHeightImageView ivPhoto; @BindView(R.id.pb_image) ProgressBar pbImage; @BindView(R.id.tv_headline) TextView tvHeadline; public ArticleMultimediaViewHolder(View view) { super(view); ButterKnife.bind(this, view); this.view = view; this.view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mListener != null) mListener.didSelectArticle(article); } }); } public void configureViewWithArticle(Article article) { this.article = article; tvHeadline.setText(article.getHeadline().getTitle()); pbImage.setVisibility(View.VISIBLE); ivPhoto.setVisibility(View.VISIBLE); ivPhoto.setImageDrawable(null); if (article.hasImages()) { Multimedia photo = article.getFirstImage(); ivPhoto.setHeightRatio(((double) photo.getHeight()) / photo.getWidth()); /* Picasso.with(view.getContext()).load(article.getFirstImage().getUrl()) .into(ivPhoto, new Callback() { @Override public void onSuccess() { pbImage.setVisibility(View.GONE); } @Override public void onError() { Log.d(TAG_LOG, "error"); } }); */ Glide.with(view.getContext()).load(article.getFirstImage().getUrl()) .into(new SimpleTarget<GlideDrawable>() { @Override public void onResourceReady(GlideDrawable resource, GlideAnimation<? super GlideDrawable> glideAnimation) { ivPhoto.setImageDrawable(resource); pbImage.setVisibility(View.GONE); } @Override public void onLoadFailed(Exception e, Drawable errorDrawable) { Log.d(TAG_LOG, e.getMessage()); } }); } else { pbImage.setVisibility(View.GONE); ivPhoto.setVisibility(View.GONE); } } } class ArticleHeadlineViewHolder extends RecyclerView.ViewHolder { private View view; private Article article; @BindView(R.id.tv_headline) TextView tvHeadline; @BindView(R.id.tv_date) TextView tvDate; @BindView(R.id.tv_author) TextView tvAuthor; public ArticleHeadlineViewHolder(View view) { super(view); ButterKnife.bind(this, view); this.view = view; this.view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mListener != null) mListener.didSelectArticle(article); } }); } public void configureViewWithArticle(Article article) { this.article = article; tvHeadline.setText(article.getHeadline().getTitle()); if (article.getPubDate() != null) { tvDate.setVisibility(View.VISIBLE); tvDate.setText(Utils.formatDateShort(article.getPubDate())); } else { tvDate.setVisibility(View.GONE); } if (article.getByLine() != null) { tvAuthor.setVisibility(View.VISIBLE); if (tvDate.getVisibility() == View.VISIBLE) { tvAuthor.setText(tvAuthor.getContext().getString(R.string.author_with_date, article.getByLine().getOriginal())); } else { tvAuthor.setText(article.getByLine().getOriginal()); } } else { tvAuthor.setVisibility(View.GONE); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.streaming.runtime; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.functions.ReduceFunction; import org.apache.flink.api.common.functions.StoppableFunction; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.client.program.ClusterClient; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.core.testutils.MultiShotLatch; import org.apache.flink.runtime.client.JobStatusMessage; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks; import org.apache.flink.streaming.api.functions.co.CoMapFunction; import org.apache.flink.streaming.api.functions.sink.DiscardingSink; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.ChainingStrategy; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.test.util.MiniClusterResource; import org.apache.flink.test.util.MiniClusterResourceConfiguration; import org.apache.flink.util.TestLogger; import org.junit.Assert; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests for timestamps, watermarks, and event-time sources. */ @SuppressWarnings("serial") public class TimestampITCase extends TestLogger { private static final int NUM_TASK_MANAGERS = 2; private static final int NUM_TASK_SLOTS = 3; private static final int PARALLELISM = NUM_TASK_MANAGERS * NUM_TASK_SLOTS; // this is used in some tests to synchronize static MultiShotLatch latch; @ClassRule public static final MiniClusterResource CLUSTER = new MiniClusterResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(getConfiguration()) .setNumberTaskManagers(NUM_TASK_MANAGERS) .setNumberSlotsPerTaskManager(NUM_TASK_SLOTS) .build()); private static Configuration getConfiguration() { Configuration config = new Configuration(); config.setString(TaskManagerOptions.MANAGED_MEMORY_SIZE, "12m"); return config; } @Before public void setupLatch() { // ensure that we get a fresh latch for each test latch = new MultiShotLatch(); } /** * These check whether custom timestamp emission works at sources and also whether timestamps * arrive at operators throughout a topology. * * <p>This also checks whether watermarks keep propagating if a source closes early. * * <p>This only uses map to test the workings of watermarks in a complete, running topology. All * tasks and stream operators have dedicated tests that test the watermark propagation * behaviour. */ @Test public void testWatermarkPropagation() throws Exception { final int numWatermarks = 10; long initialTime = 0L; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(PARALLELISM); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new MyTimestampSource(initialTime, numWatermarks)); DataStream<Integer> source2 = env.addSource(new MyTimestampSource(initialTime, numWatermarks / 2)); source1.union(source2) .map(new IdentityMap()) .connect(source2).map(new IdentityCoMap()) .transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)) .addSink(new DiscardingSink<Integer>()); env.execute(); // verify that all the watermarks arrived at the final custom operator for (int i = 0; i < PARALLELISM; i++) { // we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the // other source stops emitting after that for (int j = 0; j < numWatermarks / 2; j++) { if (!CustomOperator.finalWatermarks[i].get(j).equals(new Watermark(initialTime + j))) { System.err.println("All Watermarks: "); for (int k = 0; k <= numWatermarks / 2; k++) { System.err.println(CustomOperator.finalWatermarks[i].get(k)); } fail("Wrong watermark."); } } assertEquals(Watermark.MAX_WATERMARK, CustomOperator.finalWatermarks[i].get(CustomOperator.finalWatermarks[i].size() - 1)); } } @Test public void testWatermarkPropagationNoFinalWatermarkOnStop() throws Exception { // for this test to work, we need to be sure that no other jobs are being executed final ClusterClient<?> clusterClient = CLUSTER.getClusterClient(); while (!getRunningJobs(clusterClient).isEmpty()) { Thread.sleep(100); } final int numWatermarks = 10; long initialTime = 0L; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(PARALLELISM); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks)); DataStream<Integer> source2 = env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks / 2)); source1.union(source2) .map(new IdentityMap()) .connect(source2).map(new IdentityCoMap()) .transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)) .addSink(new DiscardingSink<Integer>()); Thread t = new Thread("stopper") { @Override public void run() { try { // try until we get the running jobs List<JobID> running = getRunningJobs(clusterClient); while (running.isEmpty()) { Thread.sleep(10); running = getRunningJobs(clusterClient); } JobID id = running.get(0); // send stop until the job is stopped do { try { clusterClient.stop(id); } catch (Exception e) { if (e.getCause() instanceof IllegalStateException) { // this means the job is not yet ready to be stopped, // for example because it is still in CREATED state // we ignore the exception } else { // other problem throw e; } } Thread.sleep(10); } while (!getRunningJobs(clusterClient).isEmpty()); } catch (Throwable t) { t.printStackTrace(); } } }; t.start(); env.execute(); // verify that all the watermarks arrived at the final custom operator for (List<Watermark> subtaskWatermarks : CustomOperator.finalWatermarks) { // we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the // other source stops emitting after that for (int j = 0; j < subtaskWatermarks.size(); j++) { if (subtaskWatermarks.get(j).getTimestamp() != initialTime + j) { System.err.println("All Watermarks: "); for (int k = 0; k <= numWatermarks / 2; k++) { System.err.println(subtaskWatermarks.get(k)); } fail("Wrong watermark."); } } // if there are watermarks, the final one must not be the MAX watermark if (subtaskWatermarks.size() > 0) { assertNotEquals(Watermark.MAX_WATERMARK, subtaskWatermarks.get(subtaskWatermarks.size() - 1)); } } t.join(); } /** * These check whether timestamps are properly assigned at the sources and handled in * network transmission and between chained operators when timestamps are enabled. */ @Test public void testTimestampHandling() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(PARALLELISM); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new MyTimestampSource(0L, numElements)); DataStream<Integer> source2 = env.addSource(new MyTimestampSource(0L, numElements)); source1 .map(new IdentityMap()) .connect(source2).map(new IdentityCoMap()) .transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new TimestampCheckingOperator()) .addSink(new DiscardingSink<Integer>()); env.execute(); } /** * These check whether timestamps are properly ignored when they are disabled. */ @Test public void testDisabledTimestamps() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); env.setParallelism(PARALLELISM); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new MyNonWatermarkingSource(numElements)); DataStream<Integer> source2 = env.addSource(new MyNonWatermarkingSource(numElements)); source1 .map(new IdentityMap()) .connect(source2).map(new IdentityCoMap()) .transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new DisabledTimestampCheckingOperator()) .addSink(new DiscardingSink<Integer>()); env.execute(); } /** * This tests whether timestamps are properly extracted in the timestamp * extractor and whether watermarks are also correctly forwarded from this with the auto watermark * interval. */ @Test public void testTimestampExtractorWithAutoInterval() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.getConfig().setAutoWatermarkInterval(10); env.setParallelism(1); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new SourceFunction<Integer>() { @Override public void run(SourceContext<Integer> ctx) throws Exception { int index = 1; while (index <= numElements) { ctx.collect(index); latch.await(); index++; } } @Override public void cancel() {} }); DataStream<Integer> extractOp = source1.assignTimestampsAndWatermarks( new AscendingTimestampExtractor<Integer>() { @Override public long extractAscendingTimestamp(Integer element) { return element; } }); extractOp .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)) .transform("Timestamp Check", BasicTypeInfo.INT_TYPE_INFO, new TimestampCheckingOperator()); // verify that extractor picks up source parallelism Assert.assertEquals(extractOp.getTransformation().getParallelism(), source1.getTransformation().getParallelism()); env.execute(); // verify that we get NUM_ELEMENTS watermarks for (int j = 0; j < numElements; j++) { if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) { long wm = CustomOperator.finalWatermarks[0].get(j).getTimestamp(); Assert.fail("Wrong watermark. Expected: " + j + " Found: " + wm + " All: " + CustomOperator.finalWatermarks[0]); } } // the input is finite, so it should have a MAX Watermark assertEquals(Watermark.MAX_WATERMARK, CustomOperator.finalWatermarks[0].get(CustomOperator.finalWatermarks[0].size() - 1)); } /** * This tests whether timestamps are properly extracted in the timestamp * extractor and whether watermark are correctly forwarded from the custom watermark emit * function. */ @Test public void testTimestampExtractorWithCustomWatermarkEmit() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.getConfig().setAutoWatermarkInterval(10); env.setParallelism(1); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new SourceFunction<Integer>() { @Override public void run(SourceContext<Integer> ctx) throws Exception { int index = 1; while (index <= numElements) { ctx.collect(index); latch.await(); index++; } } @Override public void cancel() {} }); source1 .assignTimestampsAndWatermarks(new AssignerWithPunctuatedWatermarks<Integer>() { @Override public long extractTimestamp(Integer element, long currentTimestamp) { return element; } @Override public Watermark checkAndGetNextWatermark(Integer element, long extractedTimestamp) { return new Watermark(extractedTimestamp - 1); } }) .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)) .transform("Timestamp Check", BasicTypeInfo.INT_TYPE_INFO, new TimestampCheckingOperator()); env.execute(); // verify that we get NUM_ELEMENTS watermarks for (int j = 0; j < numElements; j++) { if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) { Assert.fail("Wrong watermark."); } } // the input is finite, so it should have a MAX Watermark assertEquals(Watermark.MAX_WATERMARK, CustomOperator.finalWatermarks[0].get(CustomOperator.finalWatermarks[0].size() - 1)); } /** * This test verifies that the timestamp extractor does not emit decreasing watermarks. */ @Test public void testTimestampExtractorWithDecreasingCustomWatermarkEmit() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.getConfig().setAutoWatermarkInterval(1); env.setParallelism(1); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new SourceFunction<Integer>() { @Override public void run(SourceContext<Integer> ctx) throws Exception { int index = 1; while (index <= numElements) { ctx.collect(index); Thread.sleep(100); ctx.collect(index - 1); latch.await(); index++; } } @Override public void cancel() {} }); source1 .assignTimestampsAndWatermarks(new AssignerWithPunctuatedWatermarks<Integer>() { @Override public long extractTimestamp(Integer element, long previousTimestamp) { return element; } @Override public Watermark checkAndGetNextWatermark(Integer element, long extractedTimestamp) { return new Watermark(extractedTimestamp - 1); } }) .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)) .transform("Timestamp Check", BasicTypeInfo.INT_TYPE_INFO, new TimestampCheckingOperator()); env.execute(); // verify that we get NUM_ELEMENTS watermarks for (int j = 0; j < numElements; j++) { if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) { Assert.fail("Wrong watermark."); } } // the input is finite, so it should have a MAX Watermark assertEquals(Watermark.MAX_WATERMARK, CustomOperator.finalWatermarks[0].get(CustomOperator.finalWatermarks[0].size() - 1)); } /** * This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks. */ @Test public void testTimestampExtractorWithLongMaxWatermarkFromSource() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.getConfig().setAutoWatermarkInterval(1); env.setParallelism(2); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new SourceFunction<Integer>() { @Override public void run(SourceContext<Integer> ctx) throws Exception { int index = 1; while (index <= numElements) { ctx.collectWithTimestamp(index, index); ctx.collectWithTimestamp(index - 1, index - 1); index++; ctx.emitWatermark(new Watermark(index - 2)); } // emit the final Long.MAX_VALUE watermark, do it twice and verify that // we only see one in the result ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); } @Override public void cancel() {} }); source1 .assignTimestampsAndWatermarks(new AssignerWithPunctuatedWatermarks<Integer>() { @Override public long extractTimestamp(Integer element, long currentTimestamp) { return element; } @Override public Watermark checkAndGetNextWatermark(Integer element, long extractedTimestamp) { return null; } }) .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)); env.execute(); Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1); Assert.assertTrue(CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE); } /** * This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks. * * <p>Same test as before, but using a different timestamp extractor. */ @Test public void testTimestampExtractorWithLongMaxWatermarkFromSource2() throws Exception { final int numElements = 10; StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.getConfig().setAutoWatermarkInterval(10); env.setParallelism(2); env.getConfig().disableSysoutLogging(); DataStream<Integer> source1 = env.addSource(new SourceFunction<Integer>() { @Override public void run(SourceContext<Integer> ctx) throws Exception { int index = 1; while (index <= numElements) { ctx.collectWithTimestamp(index, index); ctx.collectWithTimestamp(index - 1, index - 1); index++; ctx.emitWatermark(new Watermark(index - 2)); } // emit the final Long.MAX_VALUE watermark, do it twice and verify that // we only see one in the result ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); } @Override public void cancel() {} }); source1 .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<Integer>() { @Override public long extractTimestamp(Integer element, long currentTimestamp) { return element; } @Override public Watermark getCurrentWatermark() { return null; } }) .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true)); env.execute(); Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1); Assert.assertTrue(CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE); } /** * This verifies that an event time source works when setting stream time characteristic to * processing time. In this case, the watermarks should just be swallowed. */ @Test public void testEventTimeSourceWithProcessingTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); env.getConfig().disableSysoutLogging(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Integer> source1 = env.addSource(new MyTimestampSource(0, 10)); source1 .map(new IdentityMap()) .transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(false)); env.execute(); // verify that we don't get any watermarks, the source is used as watermark source in // other tests, so it normally emits watermarks Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 0); } @Test public void testErrorOnEventTimeOverProcessingTime() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); env.getConfig().disableSysoutLogging(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source1 = env.fromElements(new Tuple2<>("a", 1), new Tuple2<>("b", 2)); source1 .keyBy(0) .window(TumblingEventTimeWindows.of(Time.seconds(5))) .reduce(new ReduceFunction<Tuple2<String, Integer>>() { @Override public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) { return value1; } }) .print(); try { env.execute(); fail("this should fail with an exception"); } catch (Exception e) { // expected } } @Test public void testErrorOnEventTimeWithoutTimestamps() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); env.getConfig().disableSysoutLogging(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); DataStream<Tuple2<String, Integer>> source1 = env.fromElements(new Tuple2<>("a", 1), new Tuple2<>("b", 2)); source1 .keyBy(0) .window(TumblingEventTimeWindows.of(Time.seconds(5))) .reduce(new ReduceFunction<Tuple2<String, Integer>>() { @Override public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) { return value1; } }) .print(); try { env.execute(); fail("this should fail with an exception"); } catch (Exception e) { // expected } } // ------------------------------------------------------------------------ // Custom Operators and Functions // ------------------------------------------------------------------------ @SuppressWarnings("unchecked") private static class CustomOperator extends AbstractStreamOperator<Integer> implements OneInputStreamOperator<Integer, Integer> { List<Watermark> watermarks; public static List<Watermark>[] finalWatermarks = new List[PARALLELISM]; private final boolean timestampsEnabled; public CustomOperator(boolean timestampsEnabled) { setChainingStrategy(ChainingStrategy.ALWAYS); this.timestampsEnabled = timestampsEnabled; } @Override public void processElement(StreamRecord<Integer> element) throws Exception { if (timestampsEnabled) { if (element.getTimestamp() != element.getValue()) { Assert.fail("Timestamps are not properly handled."); } } output.collect(element); } @Override public void processWatermark(Watermark mark) throws Exception { super.processWatermark(mark); for (Watermark previousMark: watermarks) { assertTrue(previousMark.getTimestamp() < mark.getTimestamp()); } watermarks.add(mark); latch.trigger(); output.emitWatermark(mark); } @Override public void open() throws Exception { super.open(); watermarks = new ArrayList<>(); } @Override public void close() throws Exception { super.close(); finalWatermarks[getRuntimeContext().getIndexOfThisSubtask()] = watermarks; } } private static class TimestampCheckingOperator extends AbstractStreamOperator<Integer> implements OneInputStreamOperator<Integer, Integer> { public TimestampCheckingOperator() { setChainingStrategy(ChainingStrategy.ALWAYS); } @Override public void processElement(StreamRecord<Integer> element) throws Exception { if (element.getTimestamp() != element.getValue()) { Assert.fail("Timestamps are not properly handled."); } output.collect(element); } } private static class DisabledTimestampCheckingOperator extends AbstractStreamOperator<Integer> implements OneInputStreamOperator<Integer, Integer> { @Override public void processElement(StreamRecord<Integer> element) throws Exception { if (element.hasTimestamp()) { Assert.fail("Timestamps are not properly handled."); } output.collect(element); } } private static class IdentityCoMap implements CoMapFunction<Integer, Integer, Integer> { @Override public Integer map1(Integer value) throws Exception { return value; } @Override public Integer map2(Integer value) throws Exception { return value; } } private static class IdentityMap implements MapFunction<Integer, Integer> { @Override public Integer map(Integer value) throws Exception { return value; } } private static class MyTimestampSource implements SourceFunction<Integer> { private final long initialTime; private final int numWatermarks; public MyTimestampSource(long initialTime, int numWatermarks) { this.initialTime = initialTime; this.numWatermarks = numWatermarks; } @Override public void run(SourceContext<Integer> ctx) throws Exception { for (int i = 0; i < numWatermarks; i++) { ctx.collectWithTimestamp(i, initialTime + i); ctx.emitWatermark(new Watermark(initialTime + i)); } } @Override public void cancel() {} } private static class MyTimestampSourceInfinite implements SourceFunction<Integer>, StoppableFunction { private final long initialTime; private final int numWatermarks; private volatile boolean running = true; public MyTimestampSourceInfinite(long initialTime, int numWatermarks) { this.initialTime = initialTime; this.numWatermarks = numWatermarks; } @Override public void run(SourceContext<Integer> ctx) throws Exception { for (int i = 0; i < numWatermarks; i++) { ctx.collectWithTimestamp(i, initialTime + i); ctx.emitWatermark(new Watermark(initialTime + i)); } while (running) { Thread.sleep(20); } } @Override public void cancel() { running = false; } @Override public void stop() { running = false; } } private static class MyNonWatermarkingSource implements SourceFunction<Integer> { int numWatermarks; public MyNonWatermarkingSource(int numWatermarks) { this.numWatermarks = numWatermarks; } @Override public void run(SourceContext<Integer> ctx) throws Exception { for (int i = 0; i < numWatermarks; i++) { ctx.collect(i); } } @Override public void cancel() {} } private static List<JobID> getRunningJobs(ClusterClient<?> client) throws Exception { Collection<JobStatusMessage> statusMessages = client.listJobs().get(); return statusMessages.stream() .filter(status -> !status.getJobState().isGloballyTerminalState()) .map(JobStatusMessage::getJobId) .collect(Collectors.toList()); } }
/* * Copyright (c) Joachim Ansorg, mail@ansorg-it.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ansorgit.plugins.bash.lang.psi.impl.arithmetic; import com.ansorgit.plugins.bash.lang.lexer.BashTokenTypes; import com.ansorgit.plugins.bash.lang.psi.BashVisitor; import com.ansorgit.plugins.bash.lang.psi.api.arithmetic.ArithmeticExpression; import com.ansorgit.plugins.bash.lang.psi.impl.BashBaseElement; import com.ansorgit.plugins.bash.lang.psi.util.BashPsiUtils; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiUtilCore; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Base class for arithmetic expressions. * <br> * * @author jansorg */ public abstract class AbstractExpression extends BashBaseElement implements ArithmeticExpression { private final Type type; private final Object stateLock = new Object(); private volatile Boolean isStatic = null; public AbstractExpression(final ASTNode astNode, final String name, Type type) { super(astNode, name); this.type = type; } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof BashVisitor) { ((BashVisitor) visitor).visitArithmeticExpression(this); } else { visitor.visitElement(this); } } public boolean isStatic() { if (isStatic == null) { //no other lock is used in the callees, it's safe to synchronize around the whole calculation synchronized (stateLock) { if (isStatic == null) { Iterator<ArithmeticExpression> iterator = subexpressions().iterator(); boolean allStatic = iterator.hasNext(); while (allStatic && iterator.hasNext()) { allStatic = iterator.next().isStatic(); } isStatic = allStatic; } } } return isStatic; } @Override public void subtreeChanged() { super.subtreeChanged(); synchronized (stateLock) { this.isStatic = null; } } //fixme cache this? @NotNull public List<ArithmeticExpression> subexpressions() { if (getFirstChild() == null) { return Collections.emptyList(); } return Arrays.asList(findChildrenByClass(ArithmeticExpression.class)); } @Nullable protected abstract Long compute(long currentValue, IElementType operator, Long nextExpressionValue); @Override public long computeNumericValue() throws InvalidExpressionValue { List<ArithmeticExpression> childExpressions = subexpressions(); int childSize = childExpressions.size(); if (childSize == 0) { throw new UnsupportedOperationException("unsupported, zero children are not supported"); } ArithmeticExpression firstChild = childExpressions.get(0); long result = firstChild.computeNumericValue(); if (type == Type.PostfixOperand || type == Type.PrefixOperand) { Long computed = compute(result, findOperator(), null); if (computed == null) { throw new UnsupportedOperationException("Can't calculate value for " + getText()); } return computed; } if (type == Type.TwoOperands) { int i = 1; while (i < childSize) { ArithmeticExpression c = childExpressions.get(i); long nextValue = c.computeNumericValue(); PsiElement opElement = BashPsiUtils.findPreviousSibling(c, BashTokenTypes.WHITESPACE); if (opElement != null) { IElementType operator = PsiUtilCore.getElementType(opElement); Long computed = compute(result, operator, nextValue); if (computed == null) { throw new UnsupportedOperationException("Can't calculate value for " + getText()); } result = computed; } i++; } return result; } throw new UnsupportedOperationException("unsupported computation for expression " + getText()); } public ArithmeticExpression findParentExpression() { PsiElement context = getParent(); if (context instanceof ArithmeticExpression) { return (ArithmeticExpression) context; } return null; } /** * Find the first operator which belongs to this expression. * * @return The operator, if available. Null otherwise. */ public IElementType findOperator() { return PsiUtilCore.getElementType(findOperatorElement()); } @Override public PsiElement findOperatorElement() { List<ArithmeticExpression> childs = subexpressions(); int childSize = childs.size(); if (childSize == 0) { return null; } ArithmeticExpression firstChild = childs.get(0); if (type == Type.PostfixOperand) { return BashPsiUtils.findNextSibling(firstChild, BashTokenTypes.WHITESPACE); } if (type == Type.PrefixOperand) { return BashPsiUtils.findPreviousSibling(firstChild, BashTokenTypes.WHITESPACE); } if (type == Type.TwoOperands) { int i = 1; while (i < childSize) { PsiElement opElement = BashPsiUtils.findPreviousSibling(childs.get(i), BashTokenTypes.WHITESPACE); if (opElement != null) { //found return opElement; } i++; } } return null; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.search; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.StubBasedPsiElement; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.ArrayUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; public class LocalSearchScope extends SearchScope { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.search.LocalSearchScope"); @NotNull private final PsiElement[] myScope; private final VirtualFile[] myVirtualFiles; private final boolean myIgnoreInjectedPsi; public static final LocalSearchScope EMPTY = new LocalSearchScope(PsiElement.EMPTY_ARRAY); private String myDisplayName; public LocalSearchScope(@NotNull PsiElement scope) { this(scope, null); } public LocalSearchScope(@NotNull PsiElement scope, @Nullable String displayName) { this(new PsiElement[]{scope}); myDisplayName = displayName; } public LocalSearchScope(@NotNull PsiElement[] scope) { this(scope, null); } public LocalSearchScope(@NotNull PsiElement[] scope, @Nullable String displayName) { this(scope, displayName, false); } public LocalSearchScope(@NotNull PsiElement[] scope, @Nullable final String displayName, final boolean ignoreInjectedPsi) { myIgnoreInjectedPsi = ignoreInjectedPsi; myDisplayName = displayName; Set<PsiElement> localScope = new LinkedHashSet<PsiElement>(scope.length); Set<VirtualFile> virtualFiles = new THashSet<VirtualFile>(scope.length); for (final PsiElement element : scope) { LOG.assertTrue(element != null, "null element"); PsiFile containingFile = element.getContainingFile(); LOG.assertTrue(containingFile != null, element.getClass().getName()); if (element instanceof PsiFile) { for (PsiFile file : ((PsiFile)element).getViewProvider().getAllFiles()) { if (file == null) throw new IllegalArgumentException("file "+element+" returned null in its getAllFiles()"); localScope.add(file); } } else if (element instanceof StubBasedPsiElement || element.getTextRange() != null){ localScope.add(element); } VirtualFile virtualFile = PsiUtilCore.getVirtualFile(containingFile); if (virtualFile != null) { virtualFiles.add(virtualFile); } } myScope = PsiUtilCore.toPsiElementArray(localScope); myVirtualFiles = virtualFiles.isEmpty() ? VirtualFile.EMPTY_ARRAY : virtualFiles.toArray(VirtualFile.EMPTY_ARRAY); } public boolean isIgnoreInjectedPsi() { return myIgnoreInjectedPsi; } @NotNull @Override public String getDisplayName() { return myDisplayName == null ? super.getDisplayName() : myDisplayName; } @NotNull public PsiElement[] getScope() { return myScope; } @NotNull public VirtualFile[] getVirtualFiles() { return myVirtualFiles; } public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof LocalSearchScope)) return false; final LocalSearchScope localSearchScope = (LocalSearchScope)o; if (localSearchScope.myIgnoreInjectedPsi != myIgnoreInjectedPsi) return false; if (localSearchScope.myScope.length != myScope.length) return false; for (final PsiElement scopeElement : myScope) { final PsiElement[] thatScope = localSearchScope.myScope; for (final PsiElement thatScopeElement : thatScope) { if (!Comparing.equal(scopeElement, thatScopeElement)) return false; } } return true; } public int hashCode() { int result = 0; result += myIgnoreInjectedPsi? 1 : 0; for (final PsiElement element : myScope) { result += element.hashCode(); } return result; } @NotNull public LocalSearchScope intersectWith(@NotNull LocalSearchScope scope2){ if (equals(scope2)) return this; return intersection(this, scope2); } private static LocalSearchScope intersection(LocalSearchScope scope1, LocalSearchScope scope2) { List<PsiElement> result = new ArrayList<PsiElement>(); final PsiElement[] elements1 = scope1.myScope; final PsiElement[] elements2 = scope2.myScope; for (final PsiElement element1 : elements1) { for (final PsiElement element2 : elements2) { final PsiElement element = intersectScopeElements(element1, element2); if (element != null) { result.add(element); } } } return new LocalSearchScope(PsiUtilCore.toPsiElementArray(result), null, scope1.myIgnoreInjectedPsi || scope2.myIgnoreInjectedPsi); } @NotNull @Override public SearchScope intersectWith(@NotNull SearchScope scope2) { if (scope2 instanceof LocalSearchScope) { return intersectWith((LocalSearchScope)scope2); } LocalSearchScope nonPhysicalScope = tryIntersectNonPhysicalWith((GlobalSearchScope)scope2); if (nonPhysicalScope != null) return nonPhysicalScope; return ((GlobalSearchScope)scope2).intersectWith(this); } @Nullable private LocalSearchScope tryIntersectNonPhysicalWith(@NotNull GlobalSearchScope scope) { Project project = scope.getProject(); for (PsiElement element : myScope) { PsiFile containingFile = element.getContainingFile(); if (containingFile == null) continue; if (containingFile.getViewProvider().isPhysical()) return null; if (project != null && project != containingFile.getProject()) { return EMPTY; } } return this; } @Nullable private static PsiElement intersectScopeElements(PsiElement element1, PsiElement element2) { if (PsiTreeUtil.isContextAncestor(element1, element2, false)) return element2; if (PsiTreeUtil.isContextAncestor(element2, element1, false)) return element1; if (PsiTreeUtil.isAncestor(element1, element2, false)) return element2; if (PsiTreeUtil.isAncestor(element2, element1, false)) return element1; return null; } public String toString() { StringBuilder result = new StringBuilder(); for (int i = 0; i < myScope.length; i++) { final PsiElement element = myScope[i]; if (i > 0) { result.append(","); } result.append(element); } //noinspection HardCodedStringLiteral return "LocalSearchScope:" + result; } @Override @NotNull public SearchScope union(@NotNull SearchScope scope) { if (scope instanceof LocalSearchScope) return union((LocalSearchScope)scope); return ((GlobalSearchScope)scope).union(this); } public SearchScope union(LocalSearchScope scope2) { if (equals(scope2)) return this; PsiElement[] elements1 = getScope(); PsiElement[] elements2 = scope2.getScope(); boolean[] united = new boolean[elements2.length]; List<PsiElement> result = new ArrayList<PsiElement>(); loop1: for (final PsiElement element1 : elements1) { for (int j = 0; j < elements2.length; j++) { final PsiElement element2 = elements2[j]; final PsiElement unionElement = scopeElementsUnion(element1, element2); if (unionElement != null && unionElement.getContainingFile() != null) { result.add(unionElement); united[j] = true; break loop1; } } result.add(element1); } for (int i = 0; i < united.length; i++) { final boolean b = united[i]; if (!b) { result.add(elements2[i]); } } return new LocalSearchScope(PsiUtilCore.toPsiElementArray(result)); } private static PsiElement scopeElementsUnion(PsiElement element1, PsiElement element2) { if (PsiTreeUtil.isAncestor(element1, element2, false)) return element1; if (PsiTreeUtil.isAncestor(element2, element1, false)) return element2; PsiElement commonParent = PsiTreeUtil.findCommonParent(element1, element2); if (commonParent == null) return null; return commonParent; } public boolean isInScope(VirtualFile file) { return ArrayUtil.indexOf(myVirtualFiles, file) != -1; } public boolean containsRange(PsiFile file, @NotNull TextRange range) { for (PsiElement element : getScope()) { if (file == element.getContainingFile() && element.getTextRange().contains(range)) { return true; } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.client.internal; import static org.apache.geode.distributed.ConfigurationProperties.SECURITY_CLIENT_AUTH_INIT; import java.util.Properties; import org.apache.geode.DataSerializer; import org.apache.geode.InternalGemFireError; import org.apache.geode.annotations.VisibleForTesting; import org.apache.geode.cache.client.ServerOperationException; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.cache.tier.MessageType; import org.apache.geode.internal.cache.tier.sockets.Handshake; import org.apache.geode.internal.cache.tier.sockets.Message; import org.apache.geode.internal.cache.tier.sockets.Part; import org.apache.geode.internal.cache.tier.sockets.command.PutUserCredentials; import org.apache.geode.internal.serialization.ByteArrayDataInput; import org.apache.geode.internal.serialization.KnownVersion; import org.apache.geode.security.AuthenticationExpiredException; import org.apache.geode.security.AuthenticationFailedException; import org.apache.geode.security.AuthenticationRequiredException; import org.apache.geode.security.NotAuthorizedException; /** * Authenticates this client (or a user) on a server. This op ideally should get executed * once-per-server. * * When multiuser-authentication is set to false, this op gets executed immediately after a * client-to-server connection is established. * * When multiuser-authentication is set to true, this op gets executed before the user attempts to * perform an op whose {@link AbstractOp#needsUserId()} returns true. * * @see PutUserCredentials * @see ProxyCache * @since GemFire 6.5 */ public class AuthenticateUserOp { public static final long NOT_A_USER_ID = -1L; /** * Sends the auth credentials to the server. Used in single user mode of authentication. * * @param con The connection to use for this operation. * @param pool The connection pool to use for this operation. * @return Object unique user-id. */ public static Long executeOn(Connection con, ExecutablePool pool) { AbstractOp op = new AuthenticateUserOpImpl(); return (Long) pool.executeOn(con, op); } /** * Sends the auth credentials to the server. Used in single user mode of authentication. * * @param location The ServerLocation instance whose connection instance will be used to perform * the operation. * @param pool The connection pool to use for this operation. * @return Object unique user-id. */ public static Long executeOn(ServerLocation location, ExecutablePool pool) { AbstractOp op = new AuthenticateUserOpImpl(); return (Long) pool.executeOn(location, op); } /** * Sends the auth credentials to the server for a particular user. Used in multiple user mode of * authentication. * * @param location The ServerLocation instance whose connection instance will be used to perform * the operation. * @param pool The connection pool to use for this operation. * @return Object unique user-id. */ public static Long executeOn(ServerLocation location, ExecutablePool pool, Properties securityProps) { AbstractOp op = new AuthenticateUserOpImpl(securityProps); return (Long) pool.executeOn(location, op); } private AuthenticateUserOp() { // no instances allowed } static class AuthenticateUserOpImpl extends AbstractOp { private Properties securityProperties = null; private boolean needsServerLocation = false; AuthenticateUserOpImpl() { super(MessageType.USER_CREDENTIAL_MESSAGE, 1); getMessage().setMessageHasSecurePartFlag(); } AuthenticateUserOpImpl(Properties securityProps) { this(securityProps, false); } AuthenticateUserOpImpl(Properties securityProps, boolean needsServer) { super(MessageType.USER_CREDENTIAL_MESSAGE, 1); securityProperties = securityProps; needsServerLocation = needsServer; getMessage().setMessageHasSecurePartFlag(); } @Override protected void sendMessage(Connection connection) throws Exception { if (securityProperties == null) { securityProperties = getConnectedSystem().getSecurityProperties(); } byte[] credentialBytes = getCredentialBytes(connection, securityProperties); getMessage().addBytesPart(credentialBytes); try (HeapDataOutputStream hdos = new HeapDataOutputStream(16, KnownVersion.CURRENT)) { hdos.writeLong(connection.getConnectionID()); long userId = getUserId(connection); secureLogger.debug("AuthenticateUserOp with uniqueId {}", userId); hdos.writeLong(userId); getMessage().setSecurePart(((ConnectionImpl) connection).encryptBytes(hdos.toByteArray())); } getMessage().send(false); } protected long getUserId(Connection connection) { // single user mode if (UserAttributes.userAttributes.get() == null) { return connection.getServer().getUserId(); } // multi user mode Long id = UserAttributes.userAttributes.get().getServerToId().get(connection.getServer()); if (id == null) { return NOT_A_USER_ID; } return id; } protected InternalDistributedSystem getConnectedSystem() { return InternalDistributedSystem.getConnectedInstance(); } protected byte[] getCredentialBytes(Connection connection, Properties securityProperties) throws Exception { InternalDistributedSystem distributedSystem = getConnectedSystem(); DistributedMember server = new InternalDistributedMember(connection.getSocket().getInetAddress(), connection.getSocket().getPort(), false); String authInitMethod = distributedSystem.getProperties().getProperty(SECURITY_CLIENT_AUTH_INIT); Properties credentials = Handshake.getCredentials(authInitMethod, securityProperties, server, false, distributedSystem.getLogWriter(), distributedSystem.getSecurityLogWriter()); byte[] credentialBytes; try (HeapDataOutputStream heapdos = new HeapDataOutputStream(KnownVersion.CURRENT)) { DataSerializer.writeProperties(credentials, heapdos); credentialBytes = ((ConnectionImpl) connection).encryptBytes(heapdos.toByteArray()); } return credentialBytes; } @Override public Object attempt(Connection connection) throws Exception { if (!connection.getServer().getRequiresCredentials()) { return null; } try { return parentAttempt(connection); } // if login failed for auth expired reason, try again once more catch (AuthenticationExpiredException first) { getMessage().clear(); try { return parentAttempt(connection); } catch (AuthenticationExpiredException second) { throw new AuthenticationFailedException(second.getMessage(), second); } } } @VisibleForTesting Object parentAttempt(Connection connection) throws Exception { return super.attempt(connection); } @Override protected Object processResponse(Message msg, Connection connection) throws Exception { byte[] bytes; Part part = msg.getPart(0); final int msgType = msg.getMessageType(); long userId = -1; if (msgType == MessageType.RESPONSE) { bytes = (byte[]) part.getObject(); if (bytes.length == 0) { connection.getServer().setRequiresCredentials(false); } else { connection.getServer().setRequiresCredentials(true); byte[] decrypted = ((ConnectionImpl) connection).decryptBytes(bytes); try (ByteArrayDataInput dis = new ByteArrayDataInput(decrypted)) { userId = dis.readLong(); } } if (needsServerLocation) { return new Object[] {connection.getServer(), userId}; } else { return userId; } } else if (msgType == MessageType.EXCEPTION) { Object result = part.getObject(); String s = "While performing a remote authenticate"; if (result instanceof AuthenticationFailedException) { final AuthenticationFailedException afe = (AuthenticationFailedException) result; if ("REPLY_REFUSED".equals(afe.getMessage())) { throw new AuthenticationFailedException(s, afe.getCause()); } else { throw new AuthenticationFailedException(s, afe); } } else if (result instanceof AuthenticationExpiredException) { throw (AuthenticationExpiredException) result; } else if (result instanceof AuthenticationRequiredException) { throw new AuthenticationRequiredException(s, (AuthenticationRequiredException) result); } else if (result instanceof NotAuthorizedException) { throw new NotAuthorizedException(s, (NotAuthorizedException) result); } else { throw new ServerOperationException(s, (Throwable) result); } // Get the exception toString part. // This was added for c++ thin client and not used in java } else if (isErrorResponse(msgType)) { throw new ServerOperationException(part.getString()); } else { throw new InternalGemFireError("Unexpected message type " + MessageType.getString(msgType)); } } @Override protected boolean isErrorResponse(int msgType) { return msgType == MessageType.REQUESTDATAERROR; } @Override protected long startAttempt(ConnectionStats stats) { return stats.startGet(); } @Override protected void endSendAttempt(ConnectionStats stats, long start) { stats.endGetSend(start, hasFailed()); } @Override protected void endAttempt(ConnectionStats stats, long start) { stats.endGet(start, hasTimedOut(), hasFailed()); } @Override protected Object processResponse(Message msg) throws Exception { return null; } @Override protected boolean needsUserId() { return false; } } }
/* * Copyright (c) 2014 Wael Chatila / Icegreen Technologies. All Rights Reserved. * This software is released under the Apache license 2.0 * This file has been modified by the copyright holder. * Original file can be found at http://james.apache.org */ package com.icegreen.greenmail.imap.commands; import com.icegreen.greenmail.imap.ImapConstants; import com.icegreen.greenmail.imap.ImapRequestLineReader; import com.icegreen.greenmail.imap.ProtocolException; import com.icegreen.greenmail.store.MessageFlags; import com.sun.mail.imap.protocol.BASE64MailboxDecoder; import javax.mail.Flags; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; /** * @author Darrell DeBoer <darrell@apache.org> * @version $Revision: 109034 $ */ public class CommandParser { /** * Reads an argument of type "atom" from the request. */ public String atom(ImapRequestLineReader request) throws ProtocolException { return consumeWord(request, new ATOM_CHARValidator()); } /** * Reads a command "tag" from the request. */ public String tag(ImapRequestLineReader request) throws ProtocolException { CharacterValidator validator = new TagCharValidator(); return consumeWord(request, validator); } /** * Reads an argument of type "astring" from the request. */ public String astring(ImapRequestLineReader request) throws ProtocolException { char next = request.nextWordChar(); switch (next) { case '"': return consumeQuoted(request); case '{': return consumeLiteral(request); default: return atom(request); } } /** * Reads an argument of type "nstring" from the request. */ public String nstring(ImapRequestLineReader request) throws ProtocolException { char next = request.nextWordChar(); switch (next) { case '"': return consumeQuoted(request); case '{': return consumeLiteral(request); default: String value = atom(request); if ("NIL".equals(value)) { return null; } else { throw new ProtocolException("Invalid nstring value: valid values are '\"...\"', '{12} CRLF *CHAR8', and 'NIL'."); } } } /** * Reads a "mailbox" argument from the request. Not implemented *exactly* as per spec, * since a quoted or literal "inbox" still yeilds "INBOX" * (ie still case-insensitive if quoted or literal). I think this makes sense. * <p/> * mailbox ::= "INBOX" / astring * ;; INBOX is case-insensitive. All case variants of * ;; INBOX (e.g. "iNbOx") MUST be interpreted as INBOX * ;; not as an astring. */ public String mailbox(ImapRequestLineReader request) throws ProtocolException { String mailbox = astring(request); if (mailbox.equalsIgnoreCase(ImapConstants.INBOX_NAME)) { return ImapConstants.INBOX_NAME; } else { return BASE64MailboxDecoder.decode(mailbox); } } /** * Reads a "date-time" argument from the request. */ public Date dateTime(ImapRequestLineReader request) throws ProtocolException { char next = request.nextWordChar(); String dateString; // From https://tools.ietf.org/html/rfc3501 : // date-time = DQUOTE date-day-fixed "-" date-month "-" date-year // SP time SP zone DQUOTE // zone = ("+" / "-") 4DIGIT if (next == '"') { dateString = consumeQuoted(request); } else { throw new ProtocolException("DateTime values must be quoted."); } try { // You can use Z or zzzz return new SimpleDateFormat("dd-MMM-yyyy hh:mm:ss Z", Locale.US).parse(dateString); } catch (ParseException e) { throw new ProtocolException("Invalid date format <" + dateString + ">, should comply to dd-MMM-yyyy hh:mm:ss Z"); } } /** * Reads the next "word from the request, comprising all characters up to the next SPACE. * Characters are tested by the supplied CharacterValidator, and an exception is thrown * if invalid characters are encountered. */ protected String consumeWord(ImapRequestLineReader request, CharacterValidator validator) throws ProtocolException { StringBuilder atom = new StringBuilder(); char next = request.nextWordChar(); while (!isWhitespace(next)) { if (validator.isValid(next)) { atom.append(next); request.consume(); } else { throw new ProtocolException("Invalid character: '" + next + '\''); } next = request.nextChar(); } return atom.toString(); } private boolean isWhitespace(char next) { return next == ' ' || next == '\n' || next == '\r' || next == '\t'; } public long consumeLong(ImapRequestLineReader request) throws ProtocolException { StringBuilder atom = new StringBuilder(); char next = request.nextWordChar(); while (Character.isDigit(next)) { atom.append(next); request.consume(); next = request.nextChar(); } return Long.parseLong(atom.toString()); } /** * Reads an argument of type "literal" from the request, in the format: * "{" charCount "}" CRLF *CHAR8 * Note before calling, the request should be positioned so that nextChar * is '{'. Leading whitespace is not skipped in this method. */ protected String consumeLiteral(ImapRequestLineReader request) throws ProtocolException { return new String(consumeLiteralAsBytes(request)); } protected byte[] consumeLiteralAsBytes(ImapRequestLineReader request) throws ProtocolException { // The 1st character must be '{' consumeChar(request, '{'); StringBuilder digits = new StringBuilder(); char next = request.nextChar(); while (next != '}' && next != '+') { digits.append(next); request.consume(); next = request.nextChar(); } // If the number is *not* suffixed with a '+', we *are* using a synchronized literal, // and we need to send command continuation request before reading data. boolean synchronizedLiteral = true; // '+' indicates a non-synchronized literal (no command continuation request) if (next == '+') { synchronizedLiteral = false; consumeChar(request, '+'); } // Consume the '}' and the newline consumeChar(request, '}'); consumeCRLF(request); if (synchronizedLiteral) { request.commandContinuationRequest(); } int size = Integer.parseInt(digits.toString()); byte[] buffer = new byte[size]; request.read(buffer); return buffer; } /** * Consumes a CRLF from the request. * TODO we're being liberal, the spec insists on \r\n for new lines. * * @param request * @throws ProtocolException */ private void consumeCRLF(ImapRequestLineReader request) throws ProtocolException { char next = request.nextChar(); if (next != '\n') { consumeChar(request, '\r'); } consumeChar(request, '\n'); } /** * Consumes the next character in the request, checking that it matches the * expected one. This method should be used when the */ protected void consumeChar(ImapRequestLineReader request, char expected) throws ProtocolException { char consumed = request.consume(); if (consumed != expected) { throw new ProtocolException("Expected:'" + expected + "' found:'" + consumed + '\''); } } /** * Reads a quoted string value from the request. */ protected String consumeQuoted(ImapRequestLineReader request) throws ProtocolException { // The 1st character must be '"' consumeChar(request, '"'); StringBuilder quoted = new StringBuilder(); char next = request.nextChar(); while (next != '"') { if (next == '\\') { request.consume(); next = request.nextChar(); if (!isQuotedSpecial(next)) { throw new ProtocolException("Invalid escaped character in quote: '" + next + '\''); } } quoted.append(next); request.consume(); next = request.nextChar(); } consumeChar(request, '"'); return quoted.toString(); } /** * Reads a "flags" argument from the request. */ public Flags flagList(ImapRequestLineReader request) throws ProtocolException { Flags flags = new Flags(); request.nextWordChar(); consumeChar(request, '('); CharacterValidator validator = new NoopCharValidator(); String nextWord = consumeWord(request, validator); while (!nextWord.endsWith(")")) { setFlag(nextWord, flags); nextWord = consumeWord(request, validator); } // Got the closing ")", may be attached to a word. if (nextWord.length() > 1) { setFlag(nextWord.substring(0, nextWord.length() - 1), flags); } return flags; } public void setFlag(String flagString, Flags flags) throws ProtocolException { if (flagString.equalsIgnoreCase(MessageFlags.ANSWERED)) { flags.add(Flags.Flag.ANSWERED); } else if (flagString.equalsIgnoreCase(MessageFlags.DELETED)) { flags.add(Flags.Flag.DELETED); } else if (flagString.equalsIgnoreCase(MessageFlags.DRAFT)) { flags.add(Flags.Flag.DRAFT); } else if (flagString.equalsIgnoreCase(MessageFlags.FLAGGED)) { flags.add(Flags.Flag.FLAGGED); } else if (flagString.equalsIgnoreCase(MessageFlags.SEEN)) { flags.add(Flags.Flag.SEEN); } else if (flagString.equalsIgnoreCase(MessageFlags.RECENT)) { flags.add(Flags.Flag.RECENT); } else { // User flag flags.add(flagString); } } /** * Reads an argument of type "number" from the request. */ public long number(ImapRequestLineReader request) throws ProtocolException { String digits = consumeWord(request, new DigitCharValidator()); return Long.parseLong(digits); } /** * Reads an argument of type "nznumber" (a non-zero number) * (NOTE this isn't strictly as per the spec, since the spec disallows * numbers such as "0123" as nzNumbers (although it's ok as a "number". * I think the spec is a bit shonky.) */ public long nzNumber(ImapRequestLineReader request) throws ProtocolException { long number = number(request); if (number == 0) { throw new ProtocolException("Zero value not permitted."); } return number; } private boolean isCHAR(char chr) { return chr >= 0x01 && chr <= 0x7f; } protected boolean isListWildcard(char chr) { return chr == '*' || chr == '%'; } private boolean isQuotedSpecial(char chr) { return chr == '"' || chr == '\\'; } /** * Consumes the request up to and including the eno-of-line. * * @param request The request * @throws ProtocolException If characters are encountered before the endLine. */ public void endLine(ImapRequestLineReader request) throws ProtocolException { request.eol(); } /** * Reads a "message set" argument, and parses into an IdSet. * Currently only supports a single range of values. */ public IdRange[] parseIdRange(ImapRequestLineReader request) throws ProtocolException { CharacterValidator validator = new MessageSetCharValidator(); String nextWord = consumeWord(request, validator); int commaPos = nextWord.indexOf(','); if (commaPos == -1) { return new IdRange[]{parseRange(nextWord)}; } List<IdRange> rangeList = new ArrayList<IdRange>(); int pos = 0; while (commaPos != -1) { String range = nextWord.substring(pos, commaPos); IdRange set = parseRange(range); rangeList.add(set); pos = commaPos + 1; commaPos = nextWord.indexOf(',', pos); } String range = nextWord.substring(pos); rangeList.add(parseRange(range)); return rangeList.toArray(new IdRange[rangeList.size()]); } private IdRange parseRange(String range) throws ProtocolException { int pos = range.indexOf(':'); try { if (pos == -1) { long value = parseLong(range); return new IdRange(value); } else { long lowVal = parseLong(range.substring(0, pos)); long highVal = parseLong(range.substring(pos + 1)); return new IdRange(lowVal, highVal); } } catch (NumberFormatException e) { throw new ProtocolException("Invalid message set."); } } private long parseLong(String value) { if (value.length() == 1 && value.charAt(0) == '*') { return Long.MAX_VALUE; } return Long.parseLong(value); } /** * Provides the ability to ensure characters are part of a permitted set. */ protected interface CharacterValidator { /** * Validates the supplied character. * * @param chr The character to validate. * @return <code>true</code> if chr is valid, <code>false</code> if not. */ boolean isValid(char chr); } protected static class NoopCharValidator implements CharacterValidator { public boolean isValid(char chr) { return true; } } protected class ATOM_CHARValidator implements CharacterValidator { public boolean isValid(char chr) { return isCHAR(chr) && !isAtomSpecial(chr) && !isListWildcard(chr) && !isQuotedSpecial(chr); } private boolean isAtomSpecial(char chr) { return chr == '(' || chr == ')' || chr == '{' || chr == ' ' || chr == Character.CONTROL; } } protected static class DigitCharValidator implements CharacterValidator { public boolean isValid(char chr) { return (chr >= '0' && chr <= '9') || chr == '*'; } } private class TagCharValidator extends ATOM_CHARValidator { public boolean isValid(char chr) { return chr != '+' && super.isValid(chr); } } private static class MessageSetCharValidator implements CharacterValidator { public boolean isValid(char chr) { return isDigit(chr) || chr == ':' || chr == '*' || chr == ','; } private boolean isDigit(char chr) { return '0' <= chr && chr <= '9'; } } }
package DataPatterns; import DataModel.Attribute; import DataModel.EntityProfile; import DataModel.IdDuplicates; import Utilities.LevenshteinSimilarity; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.Set; import java.util.stream.Collectors; import org.apache.jena.ext.com.google.common.collect.HashMultimap; import org.apache.jena.ext.com.google.common.collect.HashMultiset; import org.apache.jena.ext.com.google.common.collect.Multimap; import org.apache.jena.ext.com.google.common.collect.Multiset; /** * * @author vefthym */ public class PropertyWeights extends WeightedJaccardSimilarities { protected Map<String, Integer> urlToEntityIds1; protected Map<String, Integer> urlToEntityIds2; public PropertyWeights(String data1Path, String data2Path, String groundTruthPath) { super(data1Path, data2Path, groundTruthPath); urlToEntityIds1 = getEntityURLtoEntityID(profiles1); if (profiles2 != null) { urlToEntityIds2 = getEntityURLtoEntityID(profiles2); } } public double getPropertySupport(String property, List<EntityProfile> profiles) { double frequency = 0; for (EntityProfile profile : profiles) { if (profile.getAllAttributeNames().contains(property)) { frequency++; } } return frequency / profiles.size(); } /** * Precondition: relation is an object property * @param relation a property used to link entities * @param profiles * @return */ public double getRelationSupport(String relation, List<EntityProfile> profiles) { double frequency = 0; for (EntityProfile profile : profiles) { for (Attribute att : profile.getAttributes()) { if (att.getName().equals(relation)) { frequency++; } } } long profilesSize = profiles.size(); return frequency / (profilesSize * profilesSize); } public double getPropertyDiscriminability(String property, List<EntityProfile> profiles) { Set<String> distinctValues = new HashSet<>(); int frequencyOfProperty = 0; for (EntityProfile profile : profiles) { for (Attribute attribute : profile.getAttributes()) { if (attribute.getName().equals(property)) { distinctValues.add(attribute.getValue()); frequencyOfProperty++; } } } return (double) distinctValues.size() / frequencyOfProperty; } public double getPropertyPairSupport(PropertyPair propertyPair) { Set<String> profiles1Covered = new HashSet<>(); Set<String> profiles2Covered = new HashSet<>(); for (EntityProfile profile1 : profiles1) { if (!profile1.getAllAttributeNames().contains(propertyPair.getProperty1())) { continue; } for (EntityProfile profile2 : profiles2) { if (profile2.getAllAttributeNames().contains(propertyPair.getProperty2())) { profiles1Covered.add(profile1.getEntityUrl()); profiles2Covered.add(profile2.getEntityUrl()); } } } return (double)(profiles1Covered.size()+profiles2Covered.size()) / (profiles1.size()+profiles2.size()); } /* public Map<String,Double> getPropertyPairSupportPerType(String property1, String property2) { Set<String> profiles1Covered = new HashSet<>(); Set<String> profiles2Covered = new HashSet<>(); Multiset<String> type1Counter = HashMultiset.create(); Multiset<String> type2Counter = HashMultiset.create(); Map<String,Double> results = new HashMap<>(); for (EntityProfile profile1 : profiles1) { Set<String> typesOfEntity1 = profile1.getTypes(); if (!profile1.getAllAttributeNames().contains(property1)) { continue; } for (EntityProfile profile2 : profiles2) { if (profile2.getAllAttributeNames().contains(property2)) { profiles1Covered.add(profile1.getEntityUrl()); profiles2Covered.add(profile2.getEntityUrl()); } } } return (double)(profiles1Covered.size()+profiles2Covered.size()) / (profiles1.size()+profiles2.size()); } */ /** * Returns the support of this property per type, as a Map with key: type, value: support of this property * @param property * @param profiles * @return a Map with key: type, value: support of this property */ public Map<String, Double> getPropertySupportPerType(String property, List<EntityProfile> profiles) { Multiset<String> supportPerType = HashMultiset.create(); Multiset<String> typeCounter = HashMultiset.create(); for (EntityProfile profile : profiles) { Set<String> entityTypes = profile.getTypes(); for (String type : entityTypes) { typeCounter.add(type); if (profile.getAllAttributeNames().contains(property)) { supportPerType.add(type); } } } //normalization phase Map<String, Double> finalResults = new HashMap<>(); for (String type : supportPerType.elementSet()) { finalResults.put(type, (double) supportPerType.count(type) / typeCounter.count(type)); } return finalResults; } public double getPropertyPairDiscriminability(String property1, String property2) { Multimap<String,String> links1 = HashMultimap.create(); //key: common value, value: entityURLs with this value Multimap<String,String> links2 = HashMultimap.create(); //key: common value, value: entityURLs with this value for (EntityProfile profile1 : profiles1) { for (Attribute attribute1 : profile1.getAttributes()) { if (attribute1.getName().equals(property1)) { links1.put(attribute1.getValue(), profile1.getEntityUrl()); } } } for (EntityProfile profile2 : profiles2) { for (Attribute attribute2 : profile2.getAttributes()) { if (attribute2.getName().equals(property2)) { links2.put(attribute2.getValue(), profile2.getEntityUrl()); } } } double candidatePairs = 0; Set<String> entities1Total = new HashSet<>(); Set<String> entities2Total = new HashSet<>(); for (String value : links1.keySet()) { Set<String> entities1 = (Set)links1.get(value); if (links2.get(value) != null) { Set<String> entities2 = (Set)links2.get(value); entities1Total.addAll(entities1); entities2Total.addAll(entities2); candidatePairs += entities1.size() * entities2.size(); } } //System.out.println("e1Size:"+entities1Total.size()+", e2Size:"+entities2Total.size()+", candidate pairs:"+candidatePairs); if (candidatePairs == 0) { return 0; } return Math.min(entities1Total.size(), entities2Total.size()) / candidatePairs; } /** * Finds the discriminability of a relation pair, based on the similarity (exact match) of the value of their neighbors on a pair of label properties. * @param relation1 * @param relation2 * @param labelPropertyPerType1 * @param labelPropertyPerType2 * @return */ public double getRelationPairDiscriminability(PropertyPair relationPair, Map<String,String> labelPropertyPerType1, Map<String,String> labelPropertyPerType2) { Multimap<String,String> links1 = HashMultimap.create(); //key: common value, value: entityURLs with this value Multimap<String,String> links2 = HashMultimap.create(); //key: common value, value: entityURLs with this value for (EntityProfile profile1 : profiles1) { for (Attribute attribute1 : profile1.getAttributes()) { if (attribute1.getName().equals(relationPair.getProperty1())) { int neighborId = urlToEntityIds1.get((attribute1.getValue())); EntityProfile neighbor = profiles1.get(neighborId); Set<String> neighborTypes = neighbor.getTypes(); //set, in case it's a multi-type entity for (String type : neighborTypes) { String neighborsLabel = neighbor.getValueOf(labelPropertyPerType1.get(type)); if (neighborsLabel != null) { links1.put(neighborsLabel, profile1.getEntityUrl()); } } } } } for (EntityProfile profile2 : profiles2) { for (Attribute attribute2 : profile2.getAttributes()) { if (attribute2.getName().equals(relationPair.getProperty2())) { int neighborId = urlToEntityIds2.get((attribute2.getValue())); EntityProfile neighbor = profiles2.get(neighborId); Set<String> neighborTypes = neighbor.getTypes(); //set, in case it's a multi-type entity for (String type : neighborTypes) { String neighborsLabel = neighbor.getValueOf(labelPropertyPerType2.get(type)); if (neighborsLabel != null) { links2.put(neighborsLabel, profile2.getEntityUrl()); } } } } } double candidatePairs = 0; Set<String> entities1Total = new HashSet<>(); Set<String> entities2Total = new HashSet<>(); for (String value : links1.keySet()) { Set<String> entities1 = (Set)links1.get(value); entities1Total.addAll(entities1); if (links2.get(value) != null) { Set<String> entities2 = (Set)links2.get(value); entities2Total.addAll(entities2); candidatePairs += entities1.size() * entities2.size(); } } //System.out.println("e1Size:"+entities1Total.size()+", e2Size:"+entities2Total.size()+", candidate pairs:"+candidatePairs); if (candidatePairs == 0) { return 0; } return Math.min(entities1Total.size(), entities2Total.size()) / candidatePairs; } /** * Finds the discriminability of a relation pair, based on the similarity (exact match) of the value of their neighbors on a pair of label properties. * @param relationPair * @param labelPairs * @return */ public double getRelationPairDiscriminability(PropertyPair relationPair, PropertyPair[] labelPairs) { Multimap<String,String> links1 = HashMultimap.create(); //key: common value, value: entityURLs with this value Multimap<String,String> links2 = HashMultimap.create(); //key: common value, value: entityURLs with this value for (EntityProfile profile1 : profiles1) { for (Attribute attribute1 : profile1.getAttributes()) { if (attribute1.getName().equals(relationPair.getProperty1())) { int neighborId = urlToEntityIds1.get((attribute1.getValue())); EntityProfile neighbor = profiles1.get(neighborId); for (int i = 0; i < labelPairs.length; ++i) { String neighborsLabel = neighbor.getValueOf(labelPairs[i].getProperty1()); if (neighborsLabel != null) { links1.put(neighborsLabel, profile1.getEntityUrl()); } } } } } for (EntityProfile profile2 : profiles2) { for (Attribute attribute2 : profile2.getAttributes()) { if (attribute2.getName().equals(relationPair.getProperty2())) { int neighborId = urlToEntityIds2.get((attribute2.getValue())); EntityProfile neighbor = profiles2.get(neighborId); for (int i = 0; i < labelPairs.length; ++i) { String neighborsLabel = neighbor.getValueOf(labelPairs[i].getProperty2()); if (neighborsLabel != null) { links2.put(neighborsLabel, profile2.getEntityUrl()); } } } } } double candidatePairs = 0; Set<String> entities1Total = new HashSet<>(); Set<String> entities2Total = new HashSet<>(); for (String value : links1.keySet()) { Set<String> entities1 = (Set)links1.get(value); entities1Total.addAll(entities1); if (links2.get(value) != null) { Set<String> entities2 = (Set)links2.get(value); entities2Total.addAll(entities2); candidatePairs += entities1.size() * entities2.size(); } } //System.out.println("e1Size:"+entities1Total.size()+", e2Size:"+entities2Total.size()+", candidate pairs:"+candidatePairs); if (candidatePairs == 0) { return 0; } return Math.min(entities1Total.size(), entities2Total.size()) / candidatePairs; } public double getNeighborSimilarityForRelation(EntityProfile e1, EntityProfile e2, PropertyPair[] relationPairs, PropertyPair[] labelPairs) { double result = 0; for (PropertyPair relationPair : relationPairs) { String neighbor1 = e1.getValueOf(relationPair.getProperty1()); String neighbor2 = e2.getValueOf(relationPair.getProperty2()); for (PropertyPair labelPair : labelPairs) { String value1 = profiles1.get(urlToEntityIds1.get(neighbor1)).getValueOf(labelPair.getProperty1()); String value2 = profiles2.get(urlToEntityIds2.get(neighbor2)).getValueOf(labelPair.getProperty2()); double similarity = new LevenshteinSimilarity(value1, value2).getLevenshteinSimilarity(); if (similarity > result) { result = similarity; } } } return result; } public double getNeighborSimilarityForRelation(EntityProfile e1, EntityProfile e2, PropertyPair[] relationPairs) { double similarity = 0; double neighborPairs = 0; for (PropertyPair relationPair : relationPairs) { String neighbor1URL = e1.getValueOf(relationPair.getProperty1()); String neighbor2URL = e2.getValueOf(relationPair.getProperty2()); if (neighbor1URL != null && neighbor2URL != null) { neighborPairs ++; EntityProfile neighbor1 = profiles1.get(urlToEntityIds1.get(neighbor1URL)); EntityProfile neighbor2 = profiles2.get(urlToEntityIds2.get(neighbor2URL)); similarity += getValueSim(neighbor1, neighbor2); } } return (neighborPairs > 0) ? (similarity/neighborPairs) : 0; } public double getNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, String[] relations1, String[] relations2) { double similarity = 0; double neighborPairs = 0; for (String relation1 : relations1) { String neighbor1URL = e1.getValueOf(relation1); if (neighbor1URL == null) { continue; } for (String relation2 : relations2) { String neighbor2URL = e2.getValueOf(relation2); if (neighbor2URL == null) { continue; } Integer neighbor1Id = urlToEntityIds1.get(neighbor1URL); if (neighbor1Id == null) { continue; } EntityProfile neighbor1 = profiles1.get(neighbor1Id); Integer neighbor2Id = urlToEntityIds2.get(neighbor2URL); if (neighbor2Id == null) { continue; } EntityProfile neighbor2 = profiles2.get(neighbor2Id); neighborPairs ++; similarity += getValueSim(neighbor1, neighbor2); } } return (neighborPairs > 0) ? (similarity/neighborPairs) : 0; } private double getAvgNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, String[] relations1, String[] relations2) { double sumSimilarity = 0; int numPairs = 0; for (String relation1 : relations1) { String neighbor1URL = e1.getValueOf(relation1); if (neighbor1URL == null) { continue; } for (String relation2 : relations2) { String neighbor2URL = e2.getValueOf(relation2); if (neighbor2URL == null) { continue; } Integer neighbor1Id = urlToEntityIds1.get(neighbor1URL); if (neighbor1Id == null) { continue; } EntityProfile neighbor1 = profiles1.get(neighbor1Id); Integer neighbor2Id = urlToEntityIds2.get(neighbor2URL); if (neighbor2Id == null) { continue; } EntityProfile neighbor2 = profiles2.get(neighbor2Id); double similarity= getValueSim(neighbor1, neighbor2); sumSimilarity += similarity; numPairs++; } } return (numPairs == 0) ? 0 : sumSimilarity/numPairs; } private double getMaxNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, String[] relations1, String[] relations2) { double maxSimilarity = 0; for (String relation1 : relations1) { String neighbor1URL = e1.getValueOf(relation1); if (neighbor1URL == null) { continue; } for (String relation2 : relations2) { String neighbor2URL = e2.getValueOf(relation2); if (neighbor2URL == null) { continue; } Integer neighbor1Id = urlToEntityIds1.get(neighbor1URL); if (neighbor1Id == null) { continue; } EntityProfile neighbor1 = profiles1.get(neighbor1Id); Integer neighbor2Id = urlToEntityIds2.get(neighbor2URL); if (neighbor2Id == null) { continue; } EntityProfile neighbor2 = profiles2.get(neighbor2Id); double similarity= getValueSim(neighbor1, neighbor2); if (similarity > maxSimilarity) { maxSimilarity = similarity; } } } return maxSimilarity; } private String[] getTopKRelationsPerEntity(EntityProfile e, List<String> relations, int K) { PriorityQueue<CustomRelation> topK1 = new PriorityQueue<>(K); Set<String> allAttributes = e.getAllAttributeNames(); for (String relation : allAttributes) { int relationRank = relations.indexOf(relation); if (relationRank == -1) { //then this is not a relation continue; } /*if (allAttributes.contains(relation.replace("/ontology/", "/property/"))) { //dbpedia contains duplicate properties, keep one continue; }*/ CustomRelation curr = new CustomRelation(relation, relationRank); topK1.add(curr); if (topK1.size() > K) { topK1.poll(); } } String[] result = new String[topK1.size()]; int i = 0; while (!topK1.isEmpty()) { result[i++] = topK1.poll().getString(); } return result; } /** * Keep topK e1's relations and topK e2's relations and call getNeighborSimilarityForRelations without K * @param e1 * @param e2 * @param relations1 * @param relations2 * @param K * @return */ private double getNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, List<String> relations1, List<String> relations2, int K) { String[] topKRelations1 = getTopKRelationsPerEntity(e1, relations1, K); String[] topKRelations2 = getTopKRelationsPerEntity(e2, relations2, K); //System.out.println("The top relations of "+e1.getEntityUrl()+" are: "+Arrays.toString(topKRelations1)); //System.out.println("The top relations of "+e2.getEntityUrl()+" are: "+Arrays.toString(topKRelations2)); return getNeighborSimilarityForRelations(e1, e2, topKRelations1, topKRelations2); } /** * Keep topK e1's relations and topK e2's relations and call getNeighborSimilarityForRelations without K * @param e1 * @param e2 * @param relations1 * @param relations2 * @param K * @return */ private double getMaxNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, List<String> relations1, List<String> relations2, int K) { String[] topKRelations1 = getTopKRelationsPerEntity(e1, relations1, K); String[] topKRelations2 = getTopKRelationsPerEntity(e2, relations2, K); //System.out.println("The top relations of "+e1.getEntityUrl()+" are: "+Arrays.toString(topKRelations1)); //System.out.println("The top relations of "+e2.getEntityUrl()+" are: "+Arrays.toString(topKRelations2)); return getMaxNeighborSimilarityForRelations(e1, e2, topKRelations1, topKRelations2); } /** * Keep topK e1's relations and topK e2's relations and call getNeighborSimilarityForRelations without K * @param e1 * @param e2 * @param relations1 * @param relations2 * @param K * @return */ private double getAvgNeighborSimilarityForRelations(EntityProfile e1, EntityProfile e2, List<String> relations1, List<String> relations2, int K) { String[] topKRelations1 = getTopKRelationsPerEntity(e1, relations1, K); String[] topKRelations2 = getTopKRelationsPerEntity(e2, relations2, K); //System.out.println("The top relations of "+e1.getEntityUrl()+" are: "+Arrays.toString(topKRelations1)); //System.out.println("The top relations of "+e2.getEntityUrl()+" are: "+Arrays.toString(topKRelations2)); return getAvgNeighborSimilarityForRelations(e1, e2, topKRelations1, topKRelations2); } //utility methods protected Set<String> getAllPropertiesFromCollection(List<EntityProfile> profiles) { Set<String> attributeNames = new HashSet<>(); profiles.stream().forEach((profile) -> attributeNames.addAll(profile.getAllAttributeNames())); return attributeNames; } protected Set<String> getAllDatatypePropertiesFromCollection(List<EntityProfile> profiles) { Set<String> datatypeProperties = getAllPropertiesFromCollection(profiles); datatypeProperties.removeAll(getAllEntityRelationsFromCollection(profiles)); return datatypeProperties; } /** * Returns the set of all object properties (not datatype properties) in an entity collection. * An object property can be found if at least one of its values is an entityURL in this collection * (i.e., it appears as a subject of some triples). * @param profiles * @return */ protected Set<String> getAllEntityRelationsFromCollection(List<EntityProfile> profiles) { Set<String> relations = new HashSet<>(); Set<String> entityURLs = getEntityURLs(profiles); Multiset<String> nonRelationCount = HashMultiset.create(); Multiset<String> relationCount = HashMultiset.create(); for (EntityProfile profile : profiles) { for (Attribute att: profile.getAttributes()) { if (entityURLs.contains(att.getValue())) { /* if (relations.add(att.getName())) { System.out.println("Adding relation "+att.getName()+" because it links to "+att.getValue()+" for entity "+profile.getEntityUrl()); } */ // relations.add(att.getName()); relationCount.add(att.getName()); } else { nonRelationCount.add(att.getName()); } } } //majority voting for (String relation : relationCount) { if (relationCount.count(relation) > nonRelationCount.count(relation)) { relations.add(relation); } } return relations; } /** * Get all the relations of the given collection, sorted by the f-measure of support and discriminability * @param profiles1 * @param MIN_SUPPORT * @return */ private List<String> getAllRelationsSorted(List<EntityProfile> profiles, double MIN_SUPPORT) { double max_support = 0; Map<String, Double> supportOfRelation = new HashMap<>(); Set<String> relations = getAllEntityRelationsFromCollection(profiles); int numRelations = relations.size(); int cnt = 0; for (String relation : relations) { if (++cnt % 10 == 0) { System.out.println("Checking relation "+cnt+"/"+numRelations+": "+relation); } double support = getRelationSupport(relation, profiles); if (support > max_support) { max_support = support; } supportOfRelation.put(relation, support); } Map<String, Double> scoredRelations = new HashMap<>(); for (String relation : relations) { double support = supportOfRelation.get(relation) / max_support; if (support > MIN_SUPPORT) { double discrim = getPropertyDiscriminability(relation, profiles); double fMeasure = 2 * support * discrim / (support + discrim); //System.out.print(relation+": "+fMeasure); //System.out.println(". support: "+support+", discriminability: "+discrim); scoredRelations.put(relation, fMeasure); } } Map<String,Double> sortedRelations = sortByValue(scoredRelations); return new ArrayList<>(sortedRelations.keySet()); //sorted in descending score } private Map<String,Integer> getEntityURLtoEntityID(List<EntityProfile> profiles) { Map<String, Integer> urlToEntityIds = new HashMap<>(); for (int i = 0; i < profiles.size(); ++i) { urlToEntityIds.put(profiles.get(i).getEntityUrl(), i); } return urlToEntityIds; } private Set<String> getEntityURLs(List<EntityProfile> profiles) { Set<String> entityURLs = new HashSet<>(); for (EntityProfile profile : profiles) { entityURLs.add(profile.getEntityUrl()); } return entityURLs; } private String[] getTopKRelations(List<EntityProfile> profiles, int K, double MIN_SUPPORT){ double max_support = 0; Map<String, Double> supportOfRelation = new HashMap<>(); Set<String> relations = getAllEntityRelationsFromCollection(profiles); int numRelations = relations.size(); int cnt = 0; for (String relation : relations) { if (++cnt % 10 == 0) { System.out.println("Checking relation "+cnt+"/"+numRelations+": "+relation); } double support = getRelationSupport(relation, profiles); if (support > max_support) { max_support = support; } supportOfRelation.put(relation, support); } PriorityQueue<CustomRelation> topKRelations = new PriorityQueue<>(K); for (String relation : relations) { double support = supportOfRelation.get(relation) / max_support; if (support > MIN_SUPPORT) { double discrim = getPropertyDiscriminability(relation, profiles); double fMeasure = 2 * support * discrim / (support + discrim); System.out.print(relation+": "+fMeasure); System.out.println(". support: "+support+", discriminability: "+discrim); CustomRelation curr = new CustomRelation(relation, fMeasure); topKRelations.add(curr); if (topKRelations.size() > K) { topKRelations.poll(); } } } //System.out.println(Arrays.toString(top3Relations1.toArray(new CustomRelation[top3Relations1.size()]))); String[] topRelations = new String[topKRelations.size()]; for (int i = 0; i < topRelations.length; ++i) { topRelations[i] = topKRelations.poll().getString(); } System.out.println(Arrays.toString(topRelations)); return topRelations; } private String[] getLabels(List<EntityProfile> profiles, int K, double MIN_SUPPORT){ double max_support = 0; Map<String, Double> supportOfProperty = new HashMap<>(); Set<String> properties = getAllDatatypePropertiesFromCollection(profiles); int numProperties = properties.size(); int cnt = 0; for (String relation : properties) { if (++cnt % 10 == 0) { System.out.println("Checking relation "+cnt+"/"+numProperties+": "+relation); } double support = getRelationSupport(relation, profiles); if (support > max_support) { max_support = support; } supportOfProperty.put(relation, support); } PriorityQueue<CustomRelation> topKproperties = new PriorityQueue<>(K); for (String property : properties) { double support = supportOfProperty.get(property) / max_support; if (support > MIN_SUPPORT) { double discrim = getPropertyDiscriminability(property, profiles); double fMeasure = 2 * support * discrim / (support + discrim); System.out.print(property+": "+fMeasure); System.out.println(". support: "+support+", discriminability: "+discrim); CustomRelation curr = new CustomRelation(property, fMeasure); topKproperties.add(curr); if (topKproperties.size() > K) { topKproperties.poll(); } } } //System.out.println(Arrays.toString(top3Relations1.toArray(new CustomRelation[top3Relations1.size()]))); String[] topProperties = new String[topKproperties.size()]; for (int i = 0; i < topProperties.length; ++i) { topProperties[i] = topKproperties.poll().getString(); } Collections.reverse(Arrays.asList(topProperties)); System.out.println(Arrays.toString(topProperties)); return topProperties; } /** * Copied from http://stackoverflow.com/a/2581754/2516301 * @param <K> * @param <V> * @param map * @return */ public static <K, V extends Comparable<? super V>> Map<K, V> sortByValue(Map<K, V> map) { return map.entrySet() .stream() .sorted(Map.Entry.comparingByValue(Collections.reverseOrder())) //Collections.reverseOrder() for descending, comment out for ascending .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e1, LinkedHashMap::new )); } //tests start here public static void main (String[] args) { //Restaurants dataset final String basePath = "C:\\Users\\VASILIS\\Documents\\OAEI_Datasets\\OAEI2010\\restaurant\\"; String dataset1 = basePath+"restaurant1Profiles"; String dataset2 = basePath+"restaurant2Profiles"; String datasetGroundtruth = basePath+"restaurantIdDuplicates"; //Rexa-DBLP // final String basePath = "C:\\Users\\VASILIS\\Documents\\OAEI_Datasets\\rexa-dblp\\"; // String dataset1 = basePath+"rexaProfiles"; // String dataset2 = basePath+"swetodblp_april_2008Profiles"; // String datasetGroundtruth = basePath+"rexa_dblp_goldstandardIdDuplicates"; //BBCmusic-DBpedia dataset // final String basePath = "C:\\Users\\VASILIS\\Documents\\OAEI_Datasets\\bbcMusic\\"; // String dataset1 = basePath+"bbc-musicNewNoRdfProfiles"; // String dataset2 = basePath+"dbpedia37processedNewNoSameAsNoWikipediaSortedProfiles"; // String datasetGroundtruth = basePath+"bbc-music_groundTruthUTF8IdDuplicates"; //YAGO-IMDb dataset // final String basePath = "C:\\Users\\VASILIS\\Documents\\OAEI_Datasets\\yago-imdb\\"; // String dataset1 = basePath+"yagoProfiles"; // String dataset2 = basePath+"imdbProfiles"; // String datasetGroundtruth = null; double MIN_SUPPORT = 0.01; //TODO: tune those parameters int topK =4; //TODO: test those parameters //override input parameters if run in console if (args.length >= 1) { dataset1 = args[0]; if (args.length ==1) { dataset2 = null; datasetGroundtruth = null; } else { dataset2 = args[1]; datasetGroundtruth = args[2]; if (args.length == 5) { MIN_SUPPORT = Double.parseDouble(args[3]); topK = Integer.parseInt(args[4]); } } } // testTopKLocalRelations(dataset1, dataset2, datasetGroundtruth, topK, MIN_SUPPORT); // testTopKGlobal(dataset1, dataset2, datasetGroundtruth, topK, MIN_SUPPORT); testLabelDetection(dataset1, dataset2, topK, MIN_SUPPORT); // PropertyWeights pw = new PropertyWeights(dataset1, dataset2, null); // System.out.println(pw.getAllEntityRelationsFromCollection(pw.getProfiles1()).size()); // System.out.println(pw.getAllEntityRelationsFromCollection(pw.getProfiles2()).size()); //System.out.println(pw.getAllPropertiesFromCollection(pw.getProfiles1()).size()); // System.out.println(pw.getAllPropertiesFromCollection(pw.getProfiles2()).size()); } public static void testLabelDetection (String dataset1, String dataset2, int K, double MIN_SUPPORT) { PropertyWeights pw = new PropertyWeights(dataset1, dataset2, null); List<EntityProfile> profiles1 = pw.getProfiles1(); List<EntityProfile> profiles2 = pw.getProfiles2(); System.out.println("D1"); String[] labels1 = pw.getLabels(profiles1, K, MIN_SUPPORT); System.out.println("D2"); String[] labels2 = pw.getLabels(profiles2, K, MIN_SUPPORT); System.out.println("D1"); System.out.println(Arrays.toString(labels1)); System.out.println("D2"); System.out.println(Arrays.toString(labels2)); } public static void testTopKLocalRelations (String dataset1, String dataset2, String datasetGroundtruth, int K, double MIN_SUPPORT) { PropertyWeights pw = new PropertyWeights(dataset1, dataset2, datasetGroundtruth); List<EntityProfile> profiles1 = pw.getProfiles1(); List<EntityProfile> profiles2 = pw.getProfiles2(); //System.out.println("\n\nDataset 1 relations:\n"); List<String> relations1Sorted = pw.getAllRelationsSorted(profiles1, MIN_SUPPORT); //System.out.println(Arrays.toString(relations1Sorted.toArray())); //System.out.println("\n\nDataset 2 relations:\n"); List <String> relations2Sorted = pw.getAllRelationsSorted(profiles2, MIN_SUPPORT); //System.out.println(Arrays.toString(relations2Sorted.toArray())); //now, get the value and neighbor sim of the matches System.out.println("Creating the models for weighted Jaccard sim..."); pw.createModels(); //used for weighted jaccard value sim System.out.println("valueSim:neighborSim:matchingLabels"); Set<IdDuplicates> duplicates = pw.groundTruth.getDuplicates(); for (IdDuplicates duplicate : duplicates) { EntityProfile e1 = profiles1.get(duplicate.getEntityId1()); EntityProfile e2 = profiles2.get(duplicate.getEntityId2()); double valueSimilarity = pw.getValueSim(e1, e2); double neighborSimilarity = pw.getMaxNeighborSimilarityForRelations(e1, e2, relations1Sorted, relations2Sorted, K); int matchingLabels = pw.haveSameLabels(e1,e2) ? 1 : 0; System.out.println(valueSimilarity+":"+neighborSimilarity+":"+matchingLabels); } } public static void testTopKGlobal (String dataset1, String dataset2, String datasetGroundtruth, int K, double MIN_SUPPORT) { PropertyWeights pw = new PropertyWeights(dataset1, dataset2, datasetGroundtruth); List<EntityProfile> profiles1 = pw.getProfiles1(); List<EntityProfile> profiles2 = pw.getProfiles2(); System.out.println("\n\nDataset 1 relations:\n"); String[] topRelations1 = pw.getTopKRelations(profiles1, K, MIN_SUPPORT); System.out.println("\n\nDataset 2 relations:\n"); String[] topRelations2 = pw.getTopKRelations(profiles2, K, MIN_SUPPORT); //now, get the value and neighbor sim of the matches System.out.println("Creating the models for weighted Jaccard sim..."); pw.createModels(); //used for weighted jaccard value sim System.out.println("valueSim:neighborSim"); Set<IdDuplicates> duplicates = pw.groundTruth.getDuplicates(); for (IdDuplicates duplicate : duplicates) { EntityProfile e1 = profiles1.get(duplicate.getEntityId1()); EntityProfile e2 = profiles2.get(duplicate.getEntityId2()); double valueSimilarity = pw.getValueSim(e1, e2); double neighborSimilarity = pw.getNeighborSimilarityForRelations(e1, e2, topRelations1, topRelations2); System.out.println(valueSimilarity+":"+neighborSimilarity); } } public static void testLinkKeys (String dataset1, String dataset2, String datasetGroundtruth, int K) { PropertyWeights pw = new PropertyWeights(dataset1, dataset2, datasetGroundtruth); List<EntityProfile> profiles1 = pw.getProfiles1(); List<EntityProfile> profiles2 = pw.getProfiles2(); System.out.println("\n\nGetting the support and discriminability of attribute pairs (later used as labels):\n"); PriorityQueue<PropertyPair> labelPairs = new PriorityQueue<>(K, new PropertyPairComparator()); for (String att1 : pw.getAllPropertiesFromCollection(profiles1)) { System.out.println("Checking "+att1+" with all attributes of D2..."); for (String att2 : pw.getAllPropertiesFromCollection(profiles2)) { PropertyPair labelPair = new PropertyPair(att1, att2, 0); double pairSupport = pw.getPropertyPairSupport(labelPair); double pairDiscrim = pw.getPropertyPairDiscriminability(att1, att2); double fMeasure = 2 * pairSupport * pairDiscrim / (pairSupport + pairDiscrim); labelPair.setScore(fMeasure); if (fMeasure > 0) { labelPairs.add(labelPair); if (labelPairs.size() > K) { labelPairs.poll(); } System.out.println(labelPair); System.out.println("support: "+pairSupport); System.out.println("discriminability: "+pairDiscrim); System.out.println(); } } } System.out.println(Arrays.toString(labelPairs.toArray(new PropertyPair[labelPairs.size()]))); System.out.println("\n\nGetting the support and discriminability of relation pairs (later used as key relations):\n"); PriorityQueue<PropertyPair> relationPairs = new PriorityQueue<>(K, new PropertyPairComparator()); for (String relation1 : pw.getAllEntityRelationsFromCollection(profiles1)) { for (String relation2 : pw.getAllEntityRelationsFromCollection(profiles2)) { PropertyPair relationPair = new PropertyPair(relation1, relation2, 0); double pairSupport = pw.getPropertyPairSupport(relationPair); double pairDiscrim = pw.getRelationPairDiscriminability(relationPair, labelPairs.toArray(new PropertyPair[labelPairs.size()])); double fMeasure = 2 * pairSupport * pairDiscrim / (pairSupport + pairDiscrim); relationPair.setScore(fMeasure); if (fMeasure > 0) { relationPairs.add(relationPair); if (relationPairs.size() > K) { relationPairs.poll(); } System.out.println(relationPair); System.out.println("support: "+pairSupport); System.out.println("discriminability: "+pairDiscrim); System.out.println(); } } } System.out.println(Arrays.toString(relationPairs.toArray(new PropertyPair[relationPairs.size()]))); pw.createModels(); //used for weighted jaccard value sim Set<IdDuplicates> duplicates = pw.groundTruth.getDuplicates(); for (IdDuplicates duplicate : duplicates) { EntityProfile e1 = profiles1.get(duplicate.getEntityId1()); EntityProfile e2 = profiles2.get(duplicate.getEntityId2()); double neighborSimilarity = pw.getNeighborSimilarityForRelation( e1, e2, relationPairs.toArray(new PropertyPair[relationPairs.size()]) //, labelPairs.toArray(new PropertyPair[labelPairs.size()]) ); double valueSimilarity = pw.getValueSim(e1, e2); System.out.println(valueSimilarity+":"+neighborSimilarity); } } /** * Copied (and altered) from http://stackoverflow.com/a/16297127/2516301 */ private static class CustomRelation implements Comparable<CustomRelation> { // public final fields ok for this small example public final String string; public double value; public CustomRelation(String string, double value) { this.string = string; this.value = value; } @Override public int compareTo(CustomRelation other) { // define sorting according to double fields return Double.compare(value, other.value); } public String getString(){ return string; } public void setValue(double value) { this.value = value; } @Override public String toString() { return string+":"+value; } } }