gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Set; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; /** * Verify that the HTableDescriptor is updated after * addColumn(), deleteColumn() and modifyTable() operations. */ @Category({MasterTests.class, MediumTests.class}) public class TestTableDescriptorModificationFromClient { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestTableDescriptorModificationFromClient.class); @Rule public TestName name = new TestName(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static TableName TABLE_NAME = null; private static final byte[] FAMILY_0 = Bytes.toBytes("cf0"); private static final byte[] FAMILY_1 = Bytes.toBytes("cf1"); /** * Start up a mini cluster and put a small table of empty regions into it. * * @throws Exception */ @BeforeClass public static void beforeAllTests() throws Exception { TEST_UTIL.startMiniCluster(1); } @Before public void setup() { TABLE_NAME = TableName.valueOf(name.getMethodName()); } @AfterClass public static void afterAllTests() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test public void testModifyTable() throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Create a table with one family TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0); // Modify the table adding another family and verify the descriptor TableDescriptorBuilder.ModifyableTableDescriptor modifiedtableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); modifiedtableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); modifiedtableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); admin.modifyTable(modifiedtableDescriptor); verifyTableDescriptor(TABLE_NAME, FAMILY_0, FAMILY_1); } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testAddColumn() throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Create a table with two families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0); // Modify the table removing one family and verify the descriptor admin.addColumnFamily(TABLE_NAME, new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); verifyTableDescriptor(TABLE_NAME, FAMILY_0, FAMILY_1); } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testAddSameColumnFamilyTwice() throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Create a table with one families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0); // Modify the table removing one family and verify the descriptor admin.addColumnFamily(TABLE_NAME, new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); verifyTableDescriptor(TABLE_NAME, FAMILY_0, FAMILY_1); try { // Add same column family again - expect failure admin.addColumnFamily(TABLE_NAME, new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); Assert.fail("Delete a non-exist column family should fail"); } catch (InvalidFamilyOperationException e) { // Expected. } } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testModifyColumnFamily() throws IOException { Admin admin = TEST_UTIL.getAdmin(); ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor cfDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0); int blockSize = cfDescriptor.getBlocksize(); // Create a table with one families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily(cfDescriptor); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0); int newBlockSize = 2 * blockSize; cfDescriptor.setBlocksize(newBlockSize); // Modify colymn family admin.modifyColumnFamily(TABLE_NAME, cfDescriptor); HTableDescriptor htd = new HTableDescriptor(admin.getDescriptor(TABLE_NAME)); HColumnDescriptor hcfd = htd.getFamily(FAMILY_0); assertTrue(hcfd.getBlocksize() == newBlockSize); } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testModifyNonExistingColumnFamily() throws IOException { Admin admin = TEST_UTIL.getAdmin(); ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor cfDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1); int blockSize = cfDescriptor.getBlocksize(); // Create a table with one families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0); int newBlockSize = 2 * blockSize; cfDescriptor.setBlocksize(newBlockSize); // Modify a column family that is not in the table. try { admin.modifyColumnFamily(TABLE_NAME, cfDescriptor); Assert.fail("Modify a non-exist column family should fail"); } catch (InvalidFamilyOperationException e) { // Expected. } } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testDeleteColumn() throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Create a table with two families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0, FAMILY_1); // Modify the table removing one family and verify the descriptor admin.deleteColumnFamily(TABLE_NAME, FAMILY_1); verifyTableDescriptor(TABLE_NAME, FAMILY_0); } finally { admin.deleteTable(TABLE_NAME); } } @Test public void testDeleteSameColumnFamilyTwice() throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Create a table with two families TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0)); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_1)); admin.createTable(tableDescriptor); admin.disableTable(TABLE_NAME); try { // Verify the table descriptor verifyTableDescriptor(TABLE_NAME, FAMILY_0, FAMILY_1); // Modify the table removing one family and verify the descriptor admin.deleteColumnFamily(TABLE_NAME, FAMILY_1); verifyTableDescriptor(TABLE_NAME, FAMILY_0); try { // Delete again - expect failure admin.deleteColumnFamily(TABLE_NAME, FAMILY_1); Assert.fail("Delete a non-exist column family should fail"); } catch (Exception e) { // Expected. } } finally { admin.deleteTable(TABLE_NAME); } } private void verifyTableDescriptor(final TableName tableName, final byte[]... families) throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Verify descriptor from master HTableDescriptor htd = new HTableDescriptor(admin.getDescriptor(tableName)); verifyTableDescriptor(htd, tableName, families); // Verify descriptor from HDFS MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); Path tableDir = FSUtils.getTableDir(mfs.getRootDir(), tableName); TableDescriptor td = FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), tableDir); verifyTableDescriptor(td, tableName, families); } private void verifyTableDescriptor(final TableDescriptor htd, final TableName tableName, final byte[]... families) { Set<byte[]> htdFamilies = htd.getColumnFamilyNames(); assertEquals(tableName, htd.getTableName()); assertEquals(families.length, htdFamilies.size()); for (byte[] familyName: families) { assertTrue("Expected family " + Bytes.toString(familyName), htdFamilies.contains(familyName)); } } }
/* * The MIT License * Copyright (c) 2012 Microsoft Corporation * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package microsoft.exchange.webservices.data.autodiscover.configuration.outlook; import microsoft.exchange.webservices.data.attribute.EditorBrowsable; import microsoft.exchange.webservices.data.autodiscover.AlternateMailbox; import microsoft.exchange.webservices.data.autodiscover.AlternateMailboxCollection; import microsoft.exchange.webservices.data.autodiscover.enumeration.AutodiscoverResponseType; import microsoft.exchange.webservices.data.autodiscover.response.GetUserSettingsResponse; import microsoft.exchange.webservices.data.core.EwsXmlReader; import microsoft.exchange.webservices.data.core.XmlElementNames; import microsoft.exchange.webservices.data.enumeration.EditorBrowsableState; import microsoft.exchange.webservices.data.enumeration.OutlookProtocolType; import microsoft.exchange.webservices.data.enumeration.UserSettingName; import microsoft.exchange.webservices.data.enumeration.XmlNamespace; import microsoft.exchange.webservices.data.security.XmlNodeType; import java.util.HashMap; import java.util.List; /** * Represents an Outlook configuration settings account. */ @EditorBrowsable(state = EditorBrowsableState.Never) final class OutlookAccount { //region Private constants /** * The Constant Settings. */ private final static String Settings = "settings"; /** * The Constant RedirectAddr. */ private final static String RedirectAddr = "redirectAddr"; /** * The Constant RedirectUrl. */ private final static String RedirectUrl = "redirectUrl"; //endRegion private String accountType; private AutodiscoverResponseType responseType; //region Private fields /** * The protocols. */ private HashMap<OutlookProtocolType, OutlookProtocol> protocols; private AlternateMailboxCollection alternateMailboxes; private String redirectTarget; //endRegion /** * Initializes a new instance of the OutlookAccount class. */ protected OutlookAccount() { this.protocols = new HashMap<OutlookProtocolType, OutlookProtocol>(); this.alternateMailboxes = new AlternateMailboxCollection(); } /** * Parses the specified reader. * * @param reader The reader. * @throws Exception the exception */ protected void loadFromXml(EwsXmlReader reader) throws Exception { do { reader.read(); if (reader.getNodeType().getNodeType() == XmlNodeType.START_ELEMENT) { if (reader.getLocalName().equals(XmlElementNames.AccountType)) { this.setAccountType(reader.readElementValue()); } else if (reader.getLocalName().equals(XmlElementNames.Action)) { String xmlResponseType = reader.readElementValue(); if (xmlResponseType.equals(OutlookAccount.Settings)) { this.setResponseType(AutodiscoverResponseType.Success); } else if (xmlResponseType .equals(OutlookAccount.RedirectUrl)) { this.setResponseType(AutodiscoverResponseType. RedirectUrl); } else if (xmlResponseType .equals(OutlookAccount.RedirectAddr)) { this.setResponseType( AutodiscoverResponseType.RedirectAddress); } else { this.setResponseType(AutodiscoverResponseType.Error); } } else if (reader.getLocalName().equals( XmlElementNames.Protocol)) { OutlookProtocol protocol = new OutlookProtocol(); protocol.loadFromXml(reader); this.protocols.put( protocol.getProtocolType(), protocol); } else if (reader.getLocalName().equals( XmlElementNames.RedirectAddr)) { this.setRedirectTarget(reader.readElementValue()); } else if (reader.getLocalName().equals( XmlElementNames.RedirectUrl)) { this.setRedirectTarget(reader.readElementValue()); } else if (reader.getLocalName().equals( XmlElementNames.AlternateMailboxes)) { AlternateMailbox alternateMailbox = AlternateMailbox. loadFromXml(reader); this.alternateMailboxes.getEntries().add(alternateMailbox); } else { reader.skipCurrentElement(); } } } while (!reader.isEndElement(XmlNamespace.NotSpecified, XmlElementNames.Account)); } /** * Gets the type of the account. */ protected void convertToUserSettings(List<UserSettingName> requestedSettings, GetUserSettingsResponse response) { for (OutlookProtocol protocol : this.protocols.values()) { protocol.convertToUserSettings(requestedSettings, response); } if (requestedSettings.contains(UserSettingName.AlternateMailboxes)) { response.getSettings().put(UserSettingName. AlternateMailboxes, this.alternateMailboxes); } } /** * Gets the type of the account. * * @return the account type */ protected String getAccountType() { return accountType; } /** * Gets the type of the account. */ protected void setAccountType(String value) { this.accountType = value; } /** * Gets the type of the response. * * @return the response type */ protected AutodiscoverResponseType getResponseType() { return responseType; } /** * Sets the response type. * * @param value the new response type */ protected void setResponseType(AutodiscoverResponseType value) { this.responseType = value; } /** * Gets the redirect target. * * @return the redirect target */ protected String getRedirectTarget() { return redirectTarget; } /** * Sets the redirect target. * * @param value the new redirect target */ protected void setRedirectTarget(String value) { this.redirectTarget = value; } }
// Copyright (c) 2009 Shardul Deo // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Generated by the protocol buffer compiler. DO NOT EDIT! package com.googlecode.protobuf.socketrpc; public final class SocketRpcProtos { private SocketRpcProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public enum ErrorReason implements com.google.protobuf.ProtocolMessageEnum { BAD_REQUEST_DATA(0, 0), BAD_REQUEST_PROTO(1, 1), SERVICE_NOT_FOUND(2, 2), METHOD_NOT_FOUND(3, 3), RPC_ERROR(4, 4), RPC_FAILED(5, 5), INVALID_REQUEST_PROTO(6, 6), BAD_RESPONSE_PROTO(7, 7), UNKNOWN_HOST(8, 8), IO_ERROR(9, 9), ; public final int getNumber() { return value; } public static ErrorReason valueOf(int value) { switch (value) { case 0: return BAD_REQUEST_DATA; case 1: return BAD_REQUEST_PROTO; case 2: return SERVICE_NOT_FOUND; case 3: return METHOD_NOT_FOUND; case 4: return RPC_ERROR; case 5: return RPC_FAILED; case 6: return INVALID_REQUEST_PROTO; case 7: return BAD_RESPONSE_PROTO; case 8: return UNKNOWN_HOST; case 9: return IO_ERROR; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ErrorReason> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<ErrorReason> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ErrorReason>() { public ErrorReason findValueByNumber(int number) { return ErrorReason.valueOf(number) ; } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.getDescriptor().getEnumTypes().get(0); } private static final ErrorReason[] VALUES = { BAD_REQUEST_DATA, BAD_REQUEST_PROTO, SERVICE_NOT_FOUND, METHOD_NOT_FOUND, RPC_ERROR, RPC_FAILED, INVALID_REQUEST_PROTO, BAD_RESPONSE_PROTO, UNKNOWN_HOST, IO_ERROR, }; public static ErrorReason valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private ErrorReason(int index, int value) { this.index = index; this.value = value; } static { com.googlecode.protobuf.socketrpc.SocketRpcProtos.getDescriptor(); } } public static final class Request extends com.google.protobuf.GeneratedMessage { // Use Request.newBuilder() to construct. private Request() {} private static final Request defaultInstance = new Request(); public static Request getDefaultInstance() { return defaultInstance; } public Request getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.internal_static_protobuf_socketrpc_Request_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.internal_static_protobuf_socketrpc_Request_fieldAccessorTable; } // required string service_name = 1; public static final int SERVICE_NAME_FIELD_NUMBER = 1; private boolean hasServiceName; private java.lang.String serviceName_ = ""; public boolean hasServiceName() { return hasServiceName; } public java.lang.String getServiceName() { return serviceName_; } // required string method_name = 2; public static final int METHOD_NAME_FIELD_NUMBER = 2; private boolean hasMethodName; private java.lang.String methodName_ = ""; public boolean hasMethodName() { return hasMethodName; } public java.lang.String getMethodName() { return methodName_; } // required bytes request_proto = 3; public static final int REQUEST_PROTO_FIELD_NUMBER = 3; private boolean hasRequestProto; private com.google.protobuf.ByteString requestProto_ = com.google.protobuf.ByteString.EMPTY; public boolean hasRequestProto() { return hasRequestProto; } public com.google.protobuf.ByteString getRequestProto() { return requestProto_; } public final boolean isInitialized() { if (!hasServiceName) return false; if (!hasMethodName) return false; if (!hasRequestProto) return false; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (hasServiceName()) { output.writeString(1, getServiceName()); } if (hasMethodName()) { output.writeString(2, getMethodName()); } if (hasRequestProto()) { output.writeBytes(3, getRequestProto()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasServiceName()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getServiceName()); } if (hasMethodName()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getMethodName()); } if (hasRequestProto()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getRequestProto()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request result; // Construct using com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request(); return builder; } protected com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.getDescriptor(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request getDefaultInstanceForType() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request) { return mergeFrom((com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request other) { if (other == com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.getDefaultInstance()) return this; if (other.hasServiceName()) { setServiceName(other.getServiceName()); } if (other.hasMethodName()) { setMethodName(other.getMethodName()); } if (other.hasRequestProto()) { setRequestProto(other.getRequestProto()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setServiceName(input.readString()); break; } case 18: { setMethodName(input.readString()); break; } case 26: { setRequestProto(input.readBytes()); break; } } } } // required string service_name = 1; public boolean hasServiceName() { return result.hasServiceName(); } public java.lang.String getServiceName() { return result.getServiceName(); } public Builder setServiceName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasServiceName = true; result.serviceName_ = value; return this; } public Builder clearServiceName() { result.hasServiceName = false; result.serviceName_ = getDefaultInstance().getServiceName(); return this; } // required string method_name = 2; public boolean hasMethodName() { return result.hasMethodName(); } public java.lang.String getMethodName() { return result.getMethodName(); } public Builder setMethodName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasMethodName = true; result.methodName_ = value; return this; } public Builder clearMethodName() { result.hasMethodName = false; result.methodName_ = getDefaultInstance().getMethodName(); return this; } // required bytes request_proto = 3; public boolean hasRequestProto() { return result.hasRequestProto(); } public com.google.protobuf.ByteString getRequestProto() { return result.getRequestProto(); } public Builder setRequestProto(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } result.hasRequestProto = true; result.requestProto_ = value; return this; } public Builder clearRequestProto() { result.hasRequestProto = false; result.requestProto_ = getDefaultInstance().getRequestProto(); return this; } } static { com.googlecode.protobuf.socketrpc.SocketRpcProtos.getDescriptor(); } static { com.googlecode.protobuf.socketrpc.SocketRpcProtos.internalForceInit(); } } public static final class Response extends com.google.protobuf.GeneratedMessage { // Use Response.newBuilder() to construct. private Response() {} private static final Response defaultInstance = new Response(); public static Response getDefaultInstance() { return defaultInstance; } public Response getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.internal_static_protobuf_socketrpc_Response_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.internal_static_protobuf_socketrpc_Response_fieldAccessorTable; } // optional bytes response_proto = 1; public static final int RESPONSE_PROTO_FIELD_NUMBER = 1; private boolean hasResponseProto; private com.google.protobuf.ByteString responseProto_ = com.google.protobuf.ByteString.EMPTY; public boolean hasResponseProto() { return hasResponseProto; } public com.google.protobuf.ByteString getResponseProto() { return responseProto_; } // optional string error = 2; public static final int ERROR_FIELD_NUMBER = 2; private boolean hasError; private java.lang.String error_ = ""; public boolean hasError() { return hasError; } public java.lang.String getError() { return error_; } // optional bool callback = 3 [default = false]; public static final int CALLBACK_FIELD_NUMBER = 3; private boolean hasCallback; private boolean callback_ = false; public boolean hasCallback() { return hasCallback; } public boolean getCallback() { return callback_; } // optional .protobuf.socketrpc.ErrorReason error_reason = 4; public static final int ERROR_REASON_FIELD_NUMBER = 4; private boolean hasErrorReason; private com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason errorReason_ = com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason.BAD_REQUEST_DATA; public boolean hasErrorReason() { return hasErrorReason; } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason getErrorReason() { return errorReason_; } public final boolean isInitialized() { return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (hasResponseProto()) { output.writeBytes(1, getResponseProto()); } if (hasError()) { output.writeString(2, getError()); } if (hasCallback()) { output.writeBool(3, getCallback()); } if (hasErrorReason()) { output.writeEnum(4, getErrorReason().getNumber()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasResponseProto()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getResponseProto()); } if (hasError()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getError()); } if (hasCallback()) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, getCallback()); } if (hasErrorReason()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, getErrorReason().getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeDelimitedFrom(input, extensionRegistry) .buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response result; // Construct using com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response(); return builder; } protected com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.getDescriptor(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response getDefaultInstanceForType() { return com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response) { return mergeFrom((com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response other) { if (other == com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.getDefaultInstance()) return this; if (other.hasResponseProto()) { setResponseProto(other.getResponseProto()); } if (other.hasError()) { setError(other.getError()); } if (other.hasCallback()) { setCallback(other.getCallback()); } if (other.hasErrorReason()) { setErrorReason(other.getErrorReason()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setResponseProto(input.readBytes()); break; } case 18: { setError(input.readString()); break; } case 24: { setCallback(input.readBool()); break; } case 32: { int rawValue = input.readEnum(); com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason value = com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { setErrorReason(value); } break; } } } } // optional bytes response_proto = 1; public boolean hasResponseProto() { return result.hasResponseProto(); } public com.google.protobuf.ByteString getResponseProto() { return result.getResponseProto(); } public Builder setResponseProto(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } result.hasResponseProto = true; result.responseProto_ = value; return this; } public Builder clearResponseProto() { result.hasResponseProto = false; result.responseProto_ = getDefaultInstance().getResponseProto(); return this; } // optional string error = 2; public boolean hasError() { return result.hasError(); } public java.lang.String getError() { return result.getError(); } public Builder setError(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasError = true; result.error_ = value; return this; } public Builder clearError() { result.hasError = false; result.error_ = getDefaultInstance().getError(); return this; } // optional bool callback = 3 [default = false]; public boolean hasCallback() { return result.hasCallback(); } public boolean getCallback() { return result.getCallback(); } public Builder setCallback(boolean value) { result.hasCallback = true; result.callback_ = value; return this; } public Builder clearCallback() { result.hasCallback = false; result.callback_ = false; return this; } // optional .protobuf.socketrpc.ErrorReason error_reason = 4; public boolean hasErrorReason() { return result.hasErrorReason(); } public com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason getErrorReason() { return result.getErrorReason(); } public Builder setErrorReason(com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason value) { if (value == null) { throw new NullPointerException(); } result.hasErrorReason = true; result.errorReason_ = value; return this; } public Builder clearErrorReason() { result.hasErrorReason = false; result.errorReason_ = com.googlecode.protobuf.socketrpc.SocketRpcProtos.ErrorReason.BAD_REQUEST_DATA; return this; } } static { com.googlecode.protobuf.socketrpc.SocketRpcProtos.getDescriptor(); } static { com.googlecode.protobuf.socketrpc.SocketRpcProtos.internalForceInit(); } } private static com.google.protobuf.Descriptors.Descriptor internal_static_protobuf_socketrpc_Request_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_protobuf_socketrpc_Request_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_protobuf_socketrpc_Response_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_protobuf_socketrpc_Response_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\trpc.proto\022\022protobuf.socketrpc\"K\n\007Reque" + "st\022\024\n\014service_name\030\001 \002(\t\022\023\n\013method_name\030" + "\002 \002(\t\022\025\n\rrequest_proto\030\003 \002(\014\"\201\001\n\010Respons" + "e\022\026\n\016response_proto\030\001 \001(\014\022\r\n\005error\030\002 \001(\t" + "\022\027\n\010callback\030\003 \001(\010:\005false\0225\n\014error_reaso" + "n\030\004 \001(\0162\037.protobuf.socketrpc.ErrorReason" + "*\331\001\n\013ErrorReason\022\024\n\020BAD_REQUEST_DATA\020\000\022\025" + "\n\021BAD_REQUEST_PROTO\020\001\022\025\n\021SERVICE_NOT_FOU" + "ND\020\002\022\024\n\020METHOD_NOT_FOUND\020\003\022\r\n\tRPC_ERROR\020" + "\004\022\016\n\nRPC_FAILED\020\005\022\031\n\025INVALID_REQUEST_PRO", "TO\020\006\022\026\n\022BAD_RESPONSE_PROTO\020\007\022\020\n\014UNKNOWN_" + "HOST\020\010\022\014\n\010IO_ERROR\020\tB4\n!com.googlecode.p" + "rotobuf.socketrpcB\017SocketRpcProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_protobuf_socketrpc_Request_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_protobuf_socketrpc_Request_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_protobuf_socketrpc_Request_descriptor, new java.lang.String[] { "ServiceName", "MethodName", "RequestProto", }, com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.class, com.googlecode.protobuf.socketrpc.SocketRpcProtos.Request.Builder.class); internal_static_protobuf_socketrpc_Response_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_protobuf_socketrpc_Response_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_protobuf_socketrpc_Response_descriptor, new java.lang.String[] { "ResponseProto", "Error", "Callback", "ErrorReason", }, com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.class, com.googlecode.protobuf.socketrpc.SocketRpcProtos.Response.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } public static void internalForceInit() {} }
/* * Copyright (c) 2017. Kaede <kidhaibara@gmail.com)> */ package moe.studio.log; import android.content.Context; import android.support.annotation.WorkerThread; import java.io.File; import java.util.Date; import java.util.List; /** * BLog is an Android LogCat extended Utility. It can simplify the way you use * {@link android.util.Log}, as well as write our log message into file for after support. * * @author Kaede * @version date 16/9/22 */ @SuppressWarnings({"WeakerAccess", "unused"}) public class BLog { private static LogEngine sLogEngine; private BLog() { } private static boolean checkInit() { boolean init = sLogEngine != null; if (!init) { throw new RuntimeException("Pls call Blog.initialize first!"); } return true; } /** * You should call this method before using BLog. */ public static void initialize(Context context) { if (context == null) { throw new RuntimeException("Context is null."); } initialize(new LogSetting.Builder(context).build()); } /** * You should call this method before using BLog. * * @param setting Custom config */ public static void initialize(LogSetting setting) { if (setting == null) { throw new RuntimeException("Setting is null."); } if (sLogEngine == null) { synchronized (BLog.class) { if (sLogEngine == null) { sLogEngine = new LogEngine(setting); } } } } /** * You should call this method before you call {@link BLog#initialize(Context)} again. */ public static void shutdown() { if (checkInit()) { sLogEngine.shutdown(); sLogEngine = null; } } /** * Verbose log. */ public static void v(String message) { if (checkInit()) { sLogEngine.verbose(null, message); } } public static void v(String tag, String message) { if (checkInit()) { sLogEngine.verbose(tag, message); } } public static void v(String message, Throwable throwable) { if (checkInit()) { sLogEngine.verbose(null, throwable, message); } } public static void v(String tag, String message, Throwable throwable) { if (checkInit()) { sLogEngine.verbose(tag, throwable, message); } } @SuppressWarnings("SpellCheckingInspection") public static void vfmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.verbose(tag, fmt, args); } } /** * Debug log. */ public static void d(String message) { if (checkInit()) { sLogEngine.debug(null, message); } } public static void d(String tag, String message) { if (checkInit()) { sLogEngine.debug(tag, message); } } public static void d(String message, Throwable throwable) { if (checkInit()) { sLogEngine.debug(null, throwable, message); } } public static void d(String tag, String message, Throwable throwable) { if (checkInit()) { sLogEngine.debug(tag, throwable, message); } } @SuppressWarnings("SpellCheckingInspection") public static void dfmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.debug(tag, fmt, args); } } /** * info **/ public static void i(String message) { if (checkInit()) { sLogEngine.info(null, message); } } public static void i(String tag, String message) { if (checkInit()) { sLogEngine.info(tag, message); } } public static void i(String message, Throwable throwable) { if (checkInit()) { sLogEngine.verbose(null, throwable, message); } } public static void i(String tag, String message, Throwable throwable) { if (checkInit()) { sLogEngine.info(tag, throwable, message); } } @SuppressWarnings("SpellCheckingInspection") public static void ifmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.info(tag, fmt, args); } } /** * warning **/ public static void w(String message) { if (checkInit()) { sLogEngine.warn(null, message); } } public static void w(String tag, String message) { if (checkInit()) { sLogEngine.warn(tag, message); } } public static void w(String message, Throwable throwable) { if (checkInit()) { sLogEngine.warn(null, throwable, message); } } public static void w(String tag, String message, Throwable throwable) { if (checkInit()) { sLogEngine.warn(tag, throwable, message); } } @SuppressWarnings("SpellCheckingInspection") public static void wfmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.warn(tag, fmt, args); } } /** * warning **/ public static void e(String message) { if (checkInit()) { sLogEngine.error(null, message); } } public static void e(String tag, String message) { if (checkInit()) { sLogEngine.error(tag, message); } } public static void e(String message, Throwable throwable) { if (checkInit()) { sLogEngine.error(null, throwable, message); } } public static void e(String tag, String message, Throwable throwable) { if (checkInit()) { sLogEngine.error(tag, throwable, message); } } @SuppressWarnings("SpellCheckingInspection") public static void efmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.error(tag, fmt, args); } } /** * wtf **/ public static void wtf(String message) { if (checkInit()) { sLogEngine.wtf(null, message); } } public static void wtf(String tag, String message) { if (checkInit()) { sLogEngine.wtf(tag, message); } } @SuppressWarnings("SpellCheckingInspection") public static void wtffmt(String tag, String fmt, Object... args) { if (checkInit()) { sLogEngine.wtf(tag, fmt, args); } } /** * Log event, logging message in an unique file. * Note that this api will log message in logcat according to {@link LogSetting#getEventPriority()}. **/ public static void event(String message) { if (checkInit()) { sLogEngine.event(null, message); } } /** * See {@linkplain #event(String)}. **/ public static void event(String tag, String message) { if (checkInit()) { sLogEngine.event(tag, message); } } /** * Sync log message to file. */ @WorkerThread public static void syncLog(int priority, String message) { if (checkInit()) { sLogEngine.syncLog(priority, null, message); } } /** * See {@linkplain #syncLog(int, String)}. **/ @WorkerThread public static void syncLog(int priority, String tag, String message) { if (checkInit()) { sLogEngine.syncLog(priority, tag, message); } } /** * others **/ /** * get all log files * * @param mode mode for filtering log files, support '|' operation, * see {@link LogSetting#LOG}, {@link LogSetting#EVENT} */ public static File[] getLogFiles(int mode) { if (checkInit()) { return sLogEngine.queryFiles(mode); } return null; } /** * get log files by day * * @param date retain null if today */ public static File[] getLogFilesByDate(int mode, Date date) { if (checkInit()) { if (date == null) { date = new Date(); // today } return sLogEngine.queryFilesByDate(mode, date.getTime()); } return null; } @Deprecated @WorkerThread public static File zippingLogFiles(int mode) { return zippingLogFiles(mode, null); } @Deprecated @WorkerThread public static File zippingLogFilesByDate(int mode, Date date) { return zippingLogFilesByDate(mode, date, null); } /** * Zipping log files and return the zip file. */ @WorkerThread public static File zippingLogFiles(int mode, List<File> attaches) { if (checkInit()) { return sLogEngine.zippingFiles(mode, attaches); } return null; } /** * See {@linkplain #zippingLogFiles(int)}. **/ @WorkerThread public static File zippingLogFilesByDate(int mode, Date date, List<File> attaches) { if (checkInit()) { if (date == null) { date = new Date(); // today } return sLogEngine.zippingFiles(mode, date.getTime(), attaches); } return null; } /** * Get log file's directory. */ public static File getLogDir() { if (checkInit()) { return sLogEngine.getSetting().getLogDirectory(); } return null; } /** * Delete existing log files. */ public static void deleteLogs() { if (checkInit()) { InternalUtils.delete(sLogEngine.getSetting().getLogDirectory()); } } /** * Package accessible for testcase. */ static LogEngine getLogger() { return sLogEngine; } /** * Package accessible for testcase. */ static LogSetting getSetting() { if (checkInit()) { return sLogEngine.getSetting(); } return null; } }
package skaianet.die.middle; import skaianet.die.ast.*; import skaianet.die.front.Color; import skaianet.die.front.ColoredIdentifier; import skaianet.die.instructions.*; import java.util.ArrayList; import java.util.HashMap; public class Compiler { private final ArrayList<Instruction> output = new ArrayList<>(); private HashMap<Integer, String> debugging; private int nextFreeVar, maxVars; private ClosureScope closure; public CompiledProcedure compile(Statement procedure, ColoredIdentifier[] arguments, ClosureScope closure) throws CompilingException { if (!output.isEmpty()) { throw new CompilingException("Malformed state of Compiler!"); } this.closure = closure; debugging = new HashMap<>(); Scope outermost = new Scope(0); maxVars = nextFreeVar = 1; // First variable is always the root energy context. for (ColoredIdentifier argument : arguments) { outermost.defineVar(argument, nextFreeVar++); } Integer returned; if (procedure.type == StatementType.RETURN || procedure.type == StatementType.COMPOUND_RETURN) { returned = compileReturnStatement(procedure, outermost, null); } else { compileStatement(procedure, outermost, null); returned = null; } Instruction[] instructions = output.toArray(new Instruction[output.size()]); output.clear(); return new CompiledProcedure(arguments.length, maxVars, instructions, debugging, returned); } private void compileStatement(Statement statement, Scope scope, Color executionThread) throws CompilingException { debugging.put(label(), statement.getTraceInfo()); try { switch (statement.type) { case EMPTY: { statement.checkSize(0); // No code to generate. break; } case COMPOUND: { int prevDepth = nextFreeVar; Scope inner = new Scope(scope); for (GenericNode<?, ?> node : statement) { compileStatement((Statement) node, inner, executionThread); } if (nextFreeVar < prevDepth) { throw new CompilingException("Internal error: lost parent scope variables!"); } nextFreeVar = prevDepth; break; } case EXPRESSION: { compileExpression((Expression) statement.get(), scope, statement.getThread(executionThread)); freeVar(); break; } case ATHLOOP: { executionThread = statement.getThread(executionThread); statement.checkSize(3); int condition = nextFreeVar; compileExpression((Expression) statement.get(0), scope, executionThread); EnterLoopInstruction enterInstruction = new EnterLoopInstruction(executionThread, condition); output.add(enterInstruction); int loopTop = label(); compileStatement((Statement) statement.get(1), scope, executionThread); output.add(new ExitLoopInstruction(executionThread, condition, scope.getEnergyRef(), loopTop)); compileStatement((Statement) statement.get(2), new Scope(scope, condition), executionThread); enterInstruction.bind(label()); freeVar(condition); break; } case IMPORT: { statement.checkSize(2); int out = nextVar(); output.add(new ImportInstruction(statement.getThread(executionThread), out, (ColoredIdentifier) statement.get(0).getAssoc(), (ColoredIdentifier) statement.get(1).getAssoc())); scope.defineVar((ColoredIdentifier) statement.get(1).getAssoc(), out); break; } case ASSIGN: { statement.checkSize(2); int temp = nextFreeVar; compileExpression((Expression) statement.get(1), scope, statement.getThread(executionThread)); assignToExpression((Expression) statement.get(0), scope, statement.getThread(executionThread), temp); break; } case UTILDEF: { statement.checkSize(3); ColoredIdentifier name = (ColoredIdentifier) statement.get(0).getAssoc(); Compiler compiler = new Compiler(); Expression children = (Expression) statement.get(1); ColoredIdentifier[] arguments = new ColoredIdentifier[children.size()]; for (int i = 0; i < arguments.length; i++) { arguments[i] = (ColoredIdentifier) children.get(i).getAssoc(); } ClosureScope closure = new ClosureScope(scope, this.closure); CompiledProcedure procedure = compiler.compile((Statement) statement.get(2), arguments, closure); int out = nextVar(); if (closure.hasAny()) { output.add(new ClosureInstruction(statement.getThread(executionThread), out, procedure, closure.getMapping())); } else { output.add(new ConstantInstruction(statement.getThread(executionThread), out, procedure)); } scope.defineVar(name, out); break; } case BIFURCATE_THREAD: { statement.checkSize(2); Expression a = (Expression) statement.get(0), b = (Expression) statement.get(1); if (a.type != ExpressionType.THIS || b.type != ExpressionType.THIS) { throw new CompilingException("Unexpected non-THIS in BIFURCATE_THREAD children."); } Color colorA = (Color) a.getAssoc(), colorB = (Color) b.getAssoc(); output.add(new ThreadBifurcationInstruction(statement.getThread(executionThread), colorA, colorB)); break; } default: throw new CompilingException("Unhandled statement: " + statement.type); } } catch (CompilingException ex) { ex.addTrace(statement.getTraceInfo()); throw ex; } } private void assignToExpression(Expression target, Scope scope, Color thread, int source) throws CompilingException { switch (target.type) { case VARIABLE: if (scope.isDefined((ColoredIdentifier) target.getAssoc())) { output.add(new MoveInstruction(thread, source, scope.get((ColoredIdentifier) target.getAssoc()))); freeVar(source); } else { scope.defineVar((ColoredIdentifier) target.getAssoc(), source); } break; case FIELDREF: target.checkSize(2); int objectId = nextFreeVar; compileExpression((Expression) target.get(0), scope, thread); output.add(new FieldStoreInstruction(thread, objectId, (ColoredIdentifier) target.get(1).getAssoc(), source)); freeVar(objectId); freeVar(source); break; case ARRAYREF: target.checkSize(2); int arrayId = nextFreeVar; compileExpression((Expression) target.get(0), scope, thread); int index = nextFreeVar; compileExpression((Expression) target.get(1), scope, thread); output.add(new ArrayStoreInstruction(thread, arrayId, index, source)); freeVar(index); freeVar(arrayId); freeVar(source); break; default: throw new CompilingException("Unassignable expression type: " + target.type); } } // Callers must throw away any extra variables! private int compileReturnStatement(Statement statement, Scope scope, Color executionThread) throws CompilingException { debugging.put(label(), statement.getTraceInfo()); try { switch (statement.type) { case RETURN: { statement.checkSize(1); int out = nextFreeVar; compileExpression((Expression) statement.get(0), scope, statement.getThread(executionThread)); return out; } case COMPOUND_RETURN: { executionThread = statement.getThread(executionThread); Scope inner = new Scope(scope); for (int i = 0; i < statement.size() - 1; i++) { compileStatement((Statement) statement.get(i), inner, executionThread); } return compileReturnStatement((Statement) statement.get(statement.size() - 1), inner, executionThread); } default: throw new CompilingException("Unhandled return statement: " + statement.type); } } catch (CompilingException ex) { ex.addTrace(statement.getTraceInfo()); throw ex; } } private int label() { return output.size(); } private void freeVar(int var) throws CompilingException { if (var != --nextFreeVar) { throw new CompilingException("Malformed variable tracking: " + var + " instead of " + nextFreeVar); } } private void freeVar() { --nextFreeVar; } private int nextVar() { int out = nextFreeVar++; maxVars = Math.max(maxVars, nextFreeVar); return out; } private void compileExpression(Expression expression, Scope scope, Color executionThread) throws CompilingException { debugging.put(label(), expression.getTraceInfo()); int varOut = nextVar(); try { switch (expression.type) { case VARIABLE: ColoredIdentifier identifier = (ColoredIdentifier) expression.getAssoc(); if (!scope.isDefined(identifier) && closure != null && closure.provides(identifier)) { output.add(new ClosureFetchInstruction(executionThread, closure.get(identifier), varOut)); } else { output.add(new MoveInstruction(executionThread, scope.get(identifier), varOut)); } return; case CONST_INTEGER: output.add(new ConstantInstruction(executionThread, varOut, expression.getAssoc())); return; case CONST_DOUBLE: output.add(new ConstantInstruction(executionThread, varOut, expression.getAssoc())); return; case CONST_STRING: output.add(new ConstantInstruction(executionThread, varOut, expression.getAssoc())); return; case INVOKE: --nextFreeVar; for (GenericNode<?, ?> exp : expression) { compileExpression((Expression) exp, scope, executionThread); } output.add(new InvokeInstruction(executionThread, varOut, nextFreeVar - varOut - 1, scope.getEnergyRef())); nextFreeVar = varOut + 1; return; case FIELDREF: expression.checkSize(2); --nextFreeVar; compileExpression((Expression) expression.get(0), scope, executionThread); output.add(new FieldFetchInstruction(executionThread, varOut, (ColoredIdentifier) expression.get(1).getAssoc())); return; case ARRAYREF: expression.checkSize(2); --nextFreeVar; compileExpression((Expression) expression.get(0), scope, executionThread); int index = nextFreeVar; compileExpression((Expression) expression.get(1), scope, executionThread); --nextFreeVar; output.add(new ArrayFetchInstruction(executionThread, varOut, index)); return; case ARRAYCONST: --nextFreeVar; for (GenericNode<?, ?> node : expression) { compileExpression((Expression) node, scope, executionThread); } output.add(new ArrayConstantInstruction(executionThread, varOut, expression.size())); nextFreeVar = varOut + 1; return; case ADD: case SUBTRACT: case MULTIPLY: case DIVIDE: case REMAINDER: case BIAND: case BIXOR: case BIOR: case RASHIFT: case RLSHIFT: case LSHIFT: case CONCAT: case LOR: case LAND: case CMPLT: case CMPLE: case CMPNE: case CMPEQ: case CMPGE: case CMPGT: // Binary operators if (expression.size() != 1 || (expression.type != ExpressionType.ADD && expression.type != ExpressionType.SUBTRACT)) { // Add and subtract can be unary expression.checkSize(2); --nextFreeVar; compileExpression((Expression) expression.get(0), scope, executionThread); int param = nextFreeVar; compileExpression((Expression) expression.get(1), scope, executionThread); --nextFreeVar; output.add(new MathInstruction(executionThread, varOut, param, expression.type.getMathOp())); return; } case NOT: // Unary operators expression.checkSize(1); --nextFreeVar; compileExpression((Expression) expression.get(0), scope, executionThread); output.add(new MathInstruction(executionThread, varOut, -1, expression.type.getMathOp())); return; case NULL: output.add(new ConstantInstruction(executionThread, varOut, null)); return; case TRUE: output.add(new ConstantInstruction(executionThread, varOut, true)); return; case FALSE: output.add(new ConstantInstruction(executionThread, varOut, false)); return; case THIS: output.add(new ThisRefInstruction(executionThread, varOut, (Color) expression.getAssoc())); return; default: throw new CompilingException("Unsupported expression: " + expression.type); } } catch (CompilingException ex) { ex.addTrace(expression.getTraceInfo()); throw ex; } } }
/* Copyright 2012-2015 Stefano Cappa, Jacopo Bulla, Davide Caio Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package it.swim.servlet.profilo.azioni; import it.swim.util.UtenteCollegatoUtil; import java.io.IOException; import java.util.List; import javax.ejb.EJB; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import sessionBeans.localInterfaces.GestioneCollaborazioniLocal; import entityBeans.Collaborazione; import exceptions.LoginException; import lombok.extern.slf4j.Slf4j; /** * Servlet implementation class CollaborazioniServlet */ @Slf4j public class CollaborazioniServlet extends HttpServlet { private static final long serialVersionUID = 1L; @EJB private GestioneCollaborazioniLocal gestioneCollab; /** * @see HttpServlet#HttpServlet() */ public CollaborazioniServlet() { super(); // TODO Auto-generated constructor stub } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // ottengo l'email dell'utente collegato dalla sessione, appoggiandomi // ad una classe di utilita' String emailUtenteCollegato = (String) UtenteCollegatoUtil.getEmailUtenteCollegato(request); // se e' null e' perche' l'utente non e' collegato e allora devo fare il // redirect alla home if (emailUtenteCollegato == null) { response.sendRedirect("../../home"); return; } try { List<Collaborazione> collaborazioniDaTerminare = gestioneCollab.getCollaborazioniDaTerminare(emailUtenteCollegato); if(collaborazioniDaTerminare==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniDaTerminare", "Impossibile ottenere le collaborazioni"); } if(collaborazioniDaTerminare.size()>=1) { request.setAttribute("collaborazioniDaTerminare", collaborazioniDaTerminare); } else { request.setAttribute("nonCiSonoCollaborazioniDaTerminare", "Non ci sono collaborazioni in corso"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniDaTerminare", "Impossibile ottenere le collaborazioni in corso"); } try { List<Collaborazione> collaborazioniDaRilasciareFeedBack = gestioneCollab.getCollaborazioniCreateFeedbackNonRilasciato(emailUtenteCollegato); if(collaborazioniDaRilasciareFeedBack==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni senza feedback"); getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); return; } if(collaborazioniDaRilasciareFeedBack.size()>=1) { request.setAttribute("collaborazioniDaRilasciareFeedBack", collaborazioniDaRilasciareFeedBack); } else { request.setAttribute("collaborazioniDaRilasciareFeedBack", collaborazioniDaRilasciareFeedBack); request.setAttribute("nonCiSonoCollaborazioniSenzaFeedback", "Non ci sono collaborazioni senza feedback"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni senza feedback"); } try { List<Collaborazione> collaborazioniTerminateConFeedBack = gestioneCollab.getCollaborazioniTerminateConFeedBack(emailUtenteCollegato); if(collaborazioniTerminateConFeedBack==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniTerminateConFeedBack", "Impossibile ottenere le collaborazioni terminate con feedback"); getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); return; } if(collaborazioniTerminateConFeedBack.size()>=1) { request.setAttribute("collaborazioniTerminateConFeedBack", collaborazioniTerminateConFeedBack); } else { request.setAttribute("collaborazioniTerminateConFeedBack", collaborazioniTerminateConFeedBack); request.setAttribute("nonCiSonoCollaborazioniTerminateConFeedBack", "Non ci sono collaborazioni terminate con feedback"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni terminate con feedback"); } getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { gestioneCollab.terminaCollaborazione(Long.valueOf(request.getParameter("tipo"))); } catch (NumberFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (LoginException e) { // TODO Auto-generated catch block e.printStackTrace(); } // ottengo l'email dell'utente collegato dalla sessione, appoggiandomi // ad una classe di utilita' String emailUtenteCollegato = (String) UtenteCollegatoUtil.getEmailUtenteCollegato(request); // se e' null e' perche' l'utente non e' collegato e allora devo fare il // redirect alla home if (emailUtenteCollegato == null) { response.sendRedirect("../../home"); return; } try { List<Collaborazione> collaborazioniDaTerminare = gestioneCollab.getCollaborazioniDaTerminare(emailUtenteCollegato); if(collaborazioniDaTerminare==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniDaTerminare", "Impossibile ottenere le collaborazioni"); } if(collaborazioniDaTerminare.size()>=1) { request.setAttribute("collaborazioniDaTerminare", collaborazioniDaTerminare); } else { request.setAttribute("nonCiSonoCollaborazioniDaTerminare", "Non ci sono collaborazioni in corso"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniDaTerminare", "Impossibile ottenere le collaborazioni in corso"); } try { List<Collaborazione> collaborazioniDaRilasciareFeedBack = gestioneCollab.getCollaborazioniCreateFeedbackNonRilasciato(emailUtenteCollegato); if(collaborazioniDaRilasciareFeedBack==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni senza feedback"); getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); return; } if(collaborazioniDaRilasciareFeedBack.size()>=1) { request.setAttribute("collaborazioniDaRilasciareFeedBack", collaborazioniDaRilasciareFeedBack); } else { request.setAttribute("collaborazioniDaRilasciareFeedBack", collaborazioniDaRilasciareFeedBack); request.setAttribute("nonCiSonoCollaborazioniSenzaFeedback", "Non ci sono collaborazioni senza feedback"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni senza feedback"); } try { List<Collaborazione> collaborazioniTerminateConFeedBack = gestioneCollab.getCollaborazioniTerminateConFeedBack(emailUtenteCollegato); if(collaborazioniTerminateConFeedBack==null) { //TODO attenzione PERCHE' l'ho scritto qui???????????????????????????????????? request.setAttribute("erroreGetCollaborazioniTerminateConFeedBack", "Impossibile ottenere le collaborazioni terminate con feedback"); getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); return; } if(collaborazioniTerminateConFeedBack.size()>=1) { request.setAttribute("collaborazioniTerminateConFeedBack", collaborazioniTerminateConFeedBack); } else { request.setAttribute("collaborazioniTerminateConFeedBack", collaborazioniTerminateConFeedBack); request.setAttribute("nonCiSonoCollaborazioniTerminateConFeedBack", "Non ci sono collaborazioni terminate con feedback"); } } catch (LoginException e) { log.error(e.getMessage(), e); request.setAttribute("erroreGetCollaborazioniSenzaFeedback", "Impossibile ottenere le collaborazioni terminate con feedback"); } getServletConfig().getServletContext().getRequestDispatcher("/jsp/utenti/profilo/collaborazioni.jsp").forward(request, response); } }
package io.github.TidyTracks; import javafx.scene.control.Alert; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.ext.LexicalHandler; import org.xml.sax.helpers.DefaultHandler; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.file.Files; import java.util.HashMap; import java.util.Map; import java.util.Set; class GpxParser extends DefaultHandler implements LexicalHandler, TrackParser { private final String CREATOR = "Tidy Tracks " + Main.VERSION; private final String INDENT = " "; private Controller controller; private Set<String> parserOptions; private StringBuffer stringBuffer; private Writer outputFileWriter; private int indentCount; private int lastIndentCount; private boolean suppressOutput; private String suppressOutputTag; private long startExecutionTime; private long endExecutionTime; private boolean inCDATA; private boolean printedStartCDATA; private Map<String, Integer> counts = new HashMap<>(); GpxParser(Controller controller, Set<String> parserOptions) { this.controller = controller; this.parserOptions = parserOptions; } /* * Content Handler */ @Override public void startDocument() throws SAXException { startExecutionTime = System.currentTimeMillis(); print("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\" ?>"); counts.clear(); } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { // catch special case if (stringBuffer != null) { if (!suppressOutput) { print(stringBuffer.toString().trim()); } stringBuffer = null; } if (!suppressOutput && !inCDATA && printedStartCDATA) { print("]]>"); printedStartCDATA = false; } String name = qName; if (name.isEmpty()) { // should never happen, parser namespace-prefixes = true name = localName; } if (parserOptions.contains(name)) { if (!suppressOutput) { suppressOutput = true; suppressOutputTag = name; } Integer value = counts.get(name); if (value == null) { counts.put(name, 1); } else { int count = value; count++; counts.put(name, count); } return; } if (suppressOutput) { return; } newLine(); print("<" + name); for (int i = 0; i < attributes.getLength(); i++) { String attributeName = attributes.getQName(i); if (attributeName.isEmpty()) { // should never happen, parser namespace-prefixes = true attributeName = attributes.getLocalName(i); } if (name.equals("gpx") && attributeName.equals("creator")) { print(" " + "creator=\"" + CREATOR + "\""); } else { print(" " + attributeName + "=\"" + attributes.getValue(i) + "\""); } } print(">"); indentCount++; lastIndentCount = indentCount; } @Override public void characters(char[] ch, int start, int length) throws SAXException { if (stringBuffer == null) { stringBuffer = new StringBuffer(); } stringBuffer.append(ch, start, length); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (stringBuffer != null) { if (!suppressOutput) { print(stringBuffer.toString().trim()); } stringBuffer = null; } if (!suppressOutput && !inCDATA && printedStartCDATA) { print("]]>"); printedStartCDATA = false; } String name = qName; if (name.isEmpty()) { // should never happen, parser namespace-prefixes = true name = localName; } if (suppressOutput) { if (name.equals(suppressOutputTag)) { suppressOutput = false; } return; } // print on correct line for nested elements if (indentCount < lastIndentCount) { indentCount--; newLine(); print("</" + name + ">"); } else { print("</" + name + ">"); indentCount--; } } @Override public void endDocument() throws SAXException { try { outputFileWriter.flush(); } catch (IOException e) { e.printStackTrace(); } finally { try { outputFileWriter.close(); } catch (IOException e) { e.printStackTrace(); } } endExecutionTime = System.currentTimeMillis(); showCountsDialog(); } private void print(String s) throws SAXException { try { outputFileWriter.write(s); } catch (IOException e) { throw new SAXException(e); } } private void newLine() throws SAXException { String nl = System.lineSeparator(); print(nl); for (int i = 0; i < indentCount; i++) { print(INDENT); } } /* * Lexical Handler */ @Override public void startDTD(String name, String publicId, String systemId) throws SAXException { // Do Nothing } @Override public void endDTD() throws SAXException { // Do Nothing } @Override public void startEntity(String name) throws SAXException { // Do Nothing } @Override public void endEntity(String name) throws SAXException { // Do Nothing } @Override public void startCDATA() throws SAXException { inCDATA = true; if (!suppressOutput) { print("<![CDATA["); printedStartCDATA = true; } } @Override public void endCDATA() throws SAXException { inCDATA = false; } @Override public void comment(char[] ch, int start, int length) throws SAXException { // Do Nothing } /* * Error Handler */ @Override public void warning(SAXParseException e) throws SAXException { System.err.println("SAX Parser Warning: " + e.getMessage()); } @Override public void error(SAXParseException e) throws SAXException { String message = "Error reading GPX file on line " + e.getLineNumber() + ", column " + e.getColumnNumber() + ". " + e.getMessage(); System.err.println("SAX Parser Error: " + message); //controller.showDialog(Alert.AlertType.ERROR, // "GPX Parsing Error", "Error Reading GPX File", message); } @Override public void fatalError(SAXParseException e) throws SAXException { throw new SAXException(e); } private void showCountsDialog() { StringBuilder message = new StringBuilder(); if (counts.isEmpty()) { message.append("Nothing was removed from the track"); } else { message.append("Removed the following from the track:"); message.append(System.lineSeparator()); for (String key : counts.keySet()) { boolean oneEntry = false; int entry = counts.get(key); message.append(String.format("%,d", entry)); if (entry == 1) { oneEntry = true; } switch (key) { case "ele": message.append(" Elevation"); break; case "time": message.append(" Time"); break; case "metadata": message.append(" Metadata"); break; case "keywords": message.append(" Keyword"); break; case "cmt": message.append(" Comment"); break; case "desc": message.append(" Description"); break; case "name": message.append(" Name"); break; case "link": message.append(" Link"); break; case "author": message.append(" Author"); break; case "copyright": message.append(" Copyright"); break; case "bounds": message.append(" Bounds"); break; case "extensions": message.append(" Extension"); break; case "trk": message.append(" Track"); break; case "wpt": message.append(" Waypoint"); break; case "rte": message.append(" Route"); break; case "magvar": message.append(" Magnetic Variation"); break; case "geoidheight": message.append("Geoid Height"); break; case "src": message.append(" Source"); break; case "sym": message.append(" Symbol"); break; case "fix": message.append(" Fix"); break; case "sat": message.append(" Satellite"); break; case "hdop": message.append(" Horizontal Dilution"); break; case "vdop": message.append(" Vertical Dilution"); break; case "pdop": message.append(" Position Dilution"); break; case "ageofdgpsdata": message.append(" Age of DGPS"); break; case "dgpsid": message.append("DGPS ID"); break; default: message.append(" Unknown"); } if (oneEntry) { message.append(" entry"); } else { message.append(" entries"); } message.append(System.lineSeparator()); } } long executionTime = endExecutionTime - startExecutionTime; controller.showDialog(Alert.AlertType.INFORMATION, "Tidy Up Successful", "Tidy Up Completed Successfully in " + executionTime + " ms", message.toString()); } private void tidyUpFailed(File outputFile) { controller.showDialog(Alert.AlertType.ERROR, "Tidy Up Failed", null, "Tidy Tracks failed to tidy track :("); try { outputFileWriter.close(); Files.delete(outputFile.toPath()); } catch (IOException e) { e.printStackTrace(); } } @Override public void parser(File inputFile, File outputFile) { if (!inputFile.exists()) { controller.showDialog(Alert.AlertType.ERROR, "Input File Error", null, "Input file cannot be found"); return; } if (!inputFile.canRead()) { controller.showDialog(Alert.AlertType.ERROR, "Input File Error", null, "Cannot read the input file"); return; } if (!FileUtil.getFileExtension(outputFile.getAbsolutePath()) .equals(FileUtil .getFileExtension(inputFile.getAbsolutePath()))) { controller.showDialog(Alert.AlertType.WARNING, "Cannot Convert File Format", "Cannot convert the current file format", "Tidy Tracks cannot convert the between file formats. " + "Please save the output track with the \".gpx\" " + "file extension"); return; } if (parserOptions.isEmpty()) { controller .showDialog(Alert.AlertType.WARNING, "No Options Selected", "No options have been selected", "Please check at least one option to continue."); return; } try { outputFileWriter = new BufferedWriter(new OutputStreamWriter (new FileOutputStream(outputFile), "UTF8")); } catch (IOException e) { controller.showDialog(Alert.AlertType.ERROR, "Output File Error", "Error writing to output file", e.getMessage()); return; } try { SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); saxParserFactory.setFeature( "http://xml.org/sax/features/namespace-prefixes", true); saxParserFactory.setValidating(true); SAXParser saxParser = saxParserFactory.newSAXParser(); // Set property to validate with schema defined in the document //import com.sun.org.apache.xerces.internal.jaxp.JAXPConstants; // JAXPConstants.JAXP_SCHEMA_LANGUAGE final String JAXP_SCHEMA_LANGUAGE = "http://java.sun.com/xml/jaxp/properties/schemaLanguage"; final String W3C_XML_SCHEMA = "http://www.w3.org/2001/XMLSchema"; saxParser.setProperty(JAXP_SCHEMA_LANGUAGE, W3C_XML_SCHEMA); XMLReader xmlReader = saxParser.getXMLReader(); xmlReader.setContentHandler(this); xmlReader.setErrorHandler(this); xmlReader.setProperty( "http://xml.org/sax/properties/lexical-handler", this); xmlReader.parse(inputFile.toURI().toString()); } catch (SAXException e) { // Unwrap SAXException Exception ex = e.getException(); if (ex == null) { ex = e; } if (ex instanceof IOException) { controller.showDialog(Alert.AlertType.ERROR, "I/O Error", "I/O Error", e.getMessage()); tidyUpFailed(outputFile); return; } if (ex instanceof SAXParseException) { String message = "Error reading GPX file on line " + ((SAXParseException) ex).getLineNumber() + ", column " + ((SAXParseException) ex).getColumnNumber() + ". " + ex.getMessage(); controller.showDialog(Alert.AlertType.ERROR, "GPX Parsing Error", "Error Reading GPX File", message); tidyUpFailed(outputFile); return; } // Unknown error controller.showExceptionDialog("Parsing Exception", "Parsing Exception", "Unknown SAXException", ex); tidyUpFailed(outputFile); } catch (IOException e) { controller.showExceptionDialog("I/O Error", "I/O Error", "Unknown IOException", e); tidyUpFailed(outputFile); } catch (ParserConfigurationException e) { controller.showExceptionDialog("Parser Configuration Error", null, "XML parser does not support the required configuration", e); tidyUpFailed(outputFile); } finally { try { outputFileWriter.close(); } catch (IOException e) { // Nothing left to do e.printStackTrace(); } } } }
package org.marketcetera.photon.internal.strategy.ui; import static org.eclipse.swtbot.swt.finder.waits.Conditions.shellCloses; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import java.io.ByteArrayInputStream; import java.text.MessageFormat; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceRunnable; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swtbot.eclipse.finder.SWTWorkbenchBot; import org.eclipse.swtbot.swt.finder.SWTBot; import org.eclipse.swtbot.swt.finder.widgets.SWTBotButton; import org.eclipse.swtbot.swt.finder.widgets.SWTBotShell; import org.eclipse.swtbot.swt.finder.widgets.SWTBotStyledText; import org.eclipse.swtbot.swt.finder.widgets.SWTBotText; import org.eclipse.swtbot.swt.finder.widgets.SWTBotTreeItem; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.ide.IDE; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.marketcetera.photon.commons.ui.databinding.RequiredFieldSupportTest; import org.marketcetera.photon.commons.ui.databinding.SWTBotControlDecoration; import org.marketcetera.photon.internal.strategy.ui.AbstractNewStrategyWizard; import org.marketcetera.photon.test.AbstractUIRunner; import org.marketcetera.photon.test.PhotonTestBase; import org.marketcetera.photon.test.WorkbenchRunner; import org.marketcetera.photon.test.AbstractUIRunner.ThrowableRunnable; import org.marketcetera.photon.test.AbstractUIRunner.UI; /* $License$ */ /** * Tests {@link AbstractNewStrategyWizard}. * * @author <a href="mailto:will@marketcetera.com">Will Horn</a> * @version $Id: AbstractNewStrategyWizardTestBase.java 16154 2012-07-14 16:34:05Z colin $ * @since 2.0.0 */ @RunWith(WorkbenchRunner.class) public abstract class AbstractNewStrategyWizardTestBase<T extends AbstractNewStrategyWizard> extends PhotonTestBase { private WizardDialog mDialog; private IWorkspace mWorkspace; protected abstract T createWizard(); protected abstract Fixture createFixture(); protected abstract String getFileNameForMyStrategy(); protected abstract String[] getInvalidClassNames(); protected abstract String getInvalidClassNameError(); @Before @UI public void before() throws Exception { IDE.registerAdapters(); mWorkspace = ResourcesPlugin.getWorkspace(); mWorkspace.run(new IWorkspaceRunnable() { @Override public void run(IProgressMonitor monitor) throws CoreException { mWorkspace.getRoot().delete(true, null); } }, null); } @After @UI public void after() throws Exception { mWorkspace.run(new IWorkspaceRunnable() { @Override public void run(IProgressMonitor monitor) throws CoreException { mWorkspace.getRoot().delete(true, null); } }, null); if (mDialog != null) { mDialog.close(); } } @Test public void testAbstractNewStrategyWizard() throws Exception { new TestTemplate() { private IProject mTestProject; @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { mTestProject = workspace.getRoot().getProject("test"); mTestProject.create(null); mTestProject.open(null); } @Override protected IStructuredSelection getInitialSelection() { return new StructuredSelection(mTestProject); } @Override protected void testWizard(Fixture fixture) throws Exception { String containerMessage = RequiredFieldSupportTest .getRequiredValueMessage("Folder"); String classNameMessage = RequiredFieldSupportTest .getRequiredValueMessage("Class Name"); assertThat(fixture.getFolderText().getText(), is("/test")); fixture.getFolderDecoration().assertHidden(); fixture.getClassNameDecoration().assertRequired( classNameMessage); assertThat(fixture.getFinishButton().isEnabled(), is(false)); fixture.getFolderText().setText(""); fixture.getFolderDecoration().assertRequired(containerMessage); fixture.getClassNameDecoration().assertRequired( classNameMessage); assertThat(fixture.getFinishButton().isEnabled(), is(false)); fixture.getFolderText().setText("/test"); fixture.getFolderDecoration().assertHidden(); fixture.getClassNameDecoration().assertRequired( classNameMessage); assertThat(fixture.getFinishButton().isEnabled(), is(false)); fixture.getClassNameText().setText("MyStrategy"); fixture.getFolderDecoration().assertHidden(); fixture.getClassNameDecoration().assertHidden(); assertThat(fixture.getFinishButton().isEnabled(), is(true)); fixture.getFolderText().setText(""); fixture.getFolderDecoration().assertRequired(containerMessage); fixture.getClassNameDecoration().assertHidden(); assertThat(fixture.getFinishButton().isEnabled(), is(false)); fixture.getFolderText().setText("/test"); fixture.getFinishButton().click(); SWTWorkbenchBot bot = new SWTWorkbenchBot(); bot.waitUntil(shellCloses(fixture.getShell()), 10000, 500); SWTBotStyledText editorText = bot.editorByTitle( getFileNameForMyStrategy()).bot().styledText(); assertThat(editorText.getText(), containsString("MyStrategy")); } protected void validateWorkspace(IWorkspace workspace) throws CoreException { assertThat(workspace.getRoot().getFile( new Path("/test/" + getFileNameForMyStrategy())) .exists(), is(true)); }; }; } @Test public void testContainerDoesNotExist() throws Exception { new TestTemplate() { @Override protected void testWizard(Fixture fixture) throws Exception { fixture.getFolderText().setText("bogus"); fixture.getClassNameText().setText("MyStrategy"); fixture.getFinishButton().click(); fixture.dismissMissingContainerError(); fixture.getCancelButton().click(); } }; } @Test public void testInvalidClassName() throws Exception { new TestTemplate() { @Override protected void testWizard(Fixture fixture) throws Exception { fixture.getFolderText().setText("bogus"); for (String string : getInvalidClassNames()) { fixture.getClassNameText().setText(string); fixture.getClassNameDecoration().assertError( getInvalidClassNameError()); } fixture.getCancelButton().click(); } }; } @Test public void testFileExists() throws Exception { new TestTemplate() { @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { IProject testProject = workspace.getRoot().getProject("test"); testProject.create(null); testProject.open(null); testProject.getFile(getFileNameForMyStrategy()).create( new ByteArrayInputStream("abc".getBytes()), true, null); } @Override protected void testWizard(Fixture fixture) throws Exception { fixture.getFolderText().setText("/test"); fixture.getClassNameText().setText("MyStrategy"); fixture.getFinishButton().click(); fixture.dismissFileExistsError(getFileNameForMyStrategy()); fixture.getCancelButton().click(); } }; } @Test public void testFileSelectionIntializesToContainer() throws Exception { new TestTemplate() { private IFile mFile; @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { IProject project = workspace.getRoot().getProject("test"); project.create(null); project.open(null); mFile = project.getFile("xyz.txt"); mFile.create(new ByteArrayInputStream("abc".getBytes()), true, null); } @Override protected IStructuredSelection getInitialSelection() { return new StructuredSelection(mFile); } @Override protected void testWizard(Fixture fixture) throws Exception { assertThat(fixture.getFolderText().getText(), is("/test")); fixture.getCancelButton().click(); } }; } @Test public void testBrowseButton() throws Exception { new TestTemplate() { @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { IProject project = workspace.getRoot().getProject("test"); project.create(null); project.open(null); project = workspace.getRoot().getProject("test2"); project.create(null); project.open(null); project.getFolder("folder").create(true, true, null); } @Override protected void testWizard(Fixture fixture) throws Exception { fixture.getBrowseButton().click(); SWTBot bot = new SWTBot(); bot.shell("Folder Selection"); bot.label("Select new folder:"); SWTBotTreeItem item = bot.tree().getTreeItem("test2"); item.expand(); item.getNode("folder").select(); bot.button("OK").click(); assertThat(fixture.getFolderText().getText(), is("/test2/folder")); fixture.getCancelButton().click(); } }; } @Test public void testCancel() throws Exception { new TestTemplate() { private IProject mTestProject; @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { mTestProject = workspace.getRoot().getProject("test"); mTestProject.create(null); mTestProject.open(null); } @Override protected IStructuredSelection getInitialSelection() { return new StructuredSelection(mTestProject); } @Override protected void testWizard(Fixture fixture) throws Exception { fixture.getClassNameText().setText("MyStrategy"); fixture.getCancelButton().click(); } @Override protected void validateWorkspace(IWorkspace workspace) throws CoreException { assertThat(workspace.getRoot().getFile( new Path("/test/" + getFileNameForMyStrategy())) .exists(), is(false)); } }; } @Test public void testMultipleSelection() throws Exception { new TestTemplate() { private IProject mProject1; private IProject mProject2; @Override protected void initWorkspace(IWorkspace workspace) throws CoreException { mProject1 = workspace.getRoot().getProject("test"); mProject1.create(null); mProject1.open(null); mProject2 = workspace.getRoot().getProject("test2"); mProject2.create(null); mProject2.open(null); } @Override protected IStructuredSelection getInitialSelection() { return new StructuredSelection(new Object[] { mProject1, mProject2 }); } @Override protected void testWizard(Fixture fixture) throws Exception { assertThat(fixture.getFolderText().getText(), is("/test")); fixture.getCancelButton().click(); } }; } protected abstract class TestTemplate { public TestTemplate() throws Exception { run(); } private void run() throws Exception { mWorkspace.run(new IWorkspaceRunnable() { @Override public void run(IProgressMonitor monitor) throws CoreException { mWorkspace.getRoot().delete(true, null); initWorkspace(mWorkspace); } }, null); AbstractUIRunner.syncRun(new ThrowableRunnable() { @Override public void run() throws Throwable { T wizard = createWizard(); IStructuredSelection selection = getInitialSelection(); if (selection != null) { wizard.init(PlatformUI.getWorkbench(), selection); } mDialog = new WizardDialog(PlatformUI.getWorkbench() .getActiveWorkbenchWindow().getShell(), wizard); mDialog.setBlockOnOpen(false); mDialog.open(); } }); testWizard(createFixture()); mWorkspace.run(new IWorkspaceRunnable() { @Override public void run(IProgressMonitor monitor) throws CoreException { validateWorkspace(mWorkspace); } }, null); } protected void initWorkspace(IWorkspace workspace) throws CoreException { } protected IStructuredSelection getInitialSelection() { return null; } protected abstract void testWizard(Fixture fixture) throws Exception; protected void validateWorkspace(IWorkspace workspace) throws CoreException { } } protected static class Fixture { private final SWTBot mBot = new SWTBot(); private final SWTBotShell mShell; private final SWTBotText mFolderText; private final SWTBotControlDecoration mFolderDecoration; private final SWTBotText mClassNameText; private final SWTBotControlDecoration mClassNameDecoration; private final SWTBotButton mBrowseButton; private final SWTBotButton mFinishButton; private final SWTBotButton mCancelButton; public Fixture(String title) { mShell = mBot.shell(title); mBot.label(title); mBot.text("Create a new strategy script from a template."); assertThat(mBot.label("Folder:").getToolTipText(), is("The project or folder in which to create the script")); assertThat(mBot.label("Class Name:").getToolTipText(), is("The strategy class name to use")); mFolderText = mBot.textWithLabel("Folder:"); mFolderDecoration = new SWTBotControlDecoration(mFolderText); mClassNameText = mBot.textWithLabel("Class Name:"); mClassNameDecoration = new SWTBotControlDecoration(mClassNameText); mBrowseButton = mBot.button("Browse..."); mFinishButton = mBot.button("Finish"); mCancelButton = mBot.button("Cancel"); } public void dismissMissingContainerError() { mBot.shell("Operation Failed"); mBot.label(MessageFormat.format("Folder ''{0}'' does not exist.", getFolderText().getText())); mBot.button("OK").click(); } public void dismissFileExistsError(String file) { mBot.shell("Operation Failed"); mBot.label(MessageFormat.format( "There is already a file named ''{0}''.", file)); mBot.button("OK").click(); } public SWTBotShell getShell() { return mShell; } public SWTBotText getFolderText() { return mFolderText; } public SWTBotControlDecoration getFolderDecoration() { return mFolderDecoration; } public SWTBotText getClassNameText() { return mClassNameText; } public SWTBotControlDecoration getClassNameDecoration() { return mClassNameDecoration; } public SWTBotButton getBrowseButton() { return mBrowseButton; } public SWTBotButton getFinishButton() { return mFinishButton; } public SWTBotButton getCancelButton() { return mCancelButton; } } }
package com.lybe; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import android.app.Activity; import android.app.AlertDialog; import android.content.ComponentName; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; public class SettingActivity extends Activity { private Button rightBtn = null; private Button leftBtn = null; private CheckBox cbBoot = null; private CheckBox cbChargeDialog = null; private CheckBox cbCharge = null; private EditText etDelay = null; private Spinner spinner = null; private TextView tvReset = null; private TextView tvHelp = null; private TextView tvAbout = null; private AlertDialog.Builder resetDialog; private AlertDialog.Builder aboutDialog; private ArrayAdapter<CharSequence> adapter; private boolean isOpen; private boolean cdMark; private boolean chargeMark; private int delay; private int delayInText; private int timeUnit; private int spinNum; private int count; private SharedPreferences.Editor editor = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setting); rightBtn = (Button)findViewById(R.id.rightBtn); rightBtn.setOnClickListener(new BackBtnListener()); leftBtn = (Button)findViewById(R.id.leftBtn); leftBtn.setOnClickListener(new SaveBtnListener()); cbBoot = (CheckBox)findViewById(R.id.cbBoot); cbChargeDialog = (CheckBox)findViewById(R.id.cbChargeDialog); cbCharge = (CheckBox)findViewById(R.id.cbCharge); etDelay = (EditText)findViewById(R.id.etDelay); spinner = (Spinner)findViewById(R.id.spinner); tvReset = (TextView)findViewById(R.id.tvReset); tvReset.setOnClickListener(new ResetBtnListener()); tvHelp = (TextView)findViewById(R.id.tvHelp); tvHelp.setOnClickListener(new HelpBtnListener()); tvAbout = (TextView)findViewById(R.id.tvAbout); tvAbout.setOnClickListener(new AboutBtnListener()); resetDialog = new AlertDialog.Builder(this); resetDialog.setTitle(R.string.dialog_reset_title); resetDialog.setPositiveButton(R.string.dialog_yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { delay = 4000; timeUnit = 1000; spinNum = 0; delayInText = 4; isOpen = true; cdMark = true; chargeMark = true; spinner.setSelection(spinNum); etDelay.setText(""+delayInText); cbBoot.setChecked(isOpen); cbChargeDialog.setChecked(!cdMark); } }); resetDialog.setNegativeButton(R.string.dialog_cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); aboutDialog = new AlertDialog.Builder(this); aboutDialog.setTitle(R.string.about_detail); aboutDialog.setPositiveButton(R.string.dialog_gotit, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); adapter = ArrayAdapter.createFromResource(this, R.array.time, android.R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(new SpinnerSelectedListener()); SharedPreferences sp = getSharedPreferences("delay",Activity.MODE_PRIVATE); editor = getSharedPreferences("delay",Activity.MODE_PRIVATE).edit(); isOpen = sp.getBoolean("ISOPEN", true); cdMark = sp.getBoolean("cdMark", true); chargeMark = sp.getBoolean("chargeMark", true); delay = sp.getInt("delay", 4000); timeUnit = sp.getInt("timeUnit", 1000); spinNum = sp.getInt("spinNum", 0); delayInText = delay/timeUnit; cbBoot.setChecked(isOpen); cbChargeDialog.setChecked(!cdMark); cbCharge.setChecked(chargeMark); spinner.setSelection(spinNum); etDelay.setText(""+delayInText); count = getIntent().getExtras().getInt("count"); cbBoot.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener(){ @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { PackageManager pm = SettingActivity.this.getPackageManager(); ComponentName name = new ComponentName(SettingActivity.this, BootReceiver.class); if (isChecked) { isOpen = true; pm.setComponentEnabledSetting(name, PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP); } else { isOpen = false; pm.setComponentEnabledSetting(name, PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); } } }); cbChargeDialog.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener(){ @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { cdMark = false; } else { cdMark = true; } } }); cbCharge.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener(){ @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { chargeMark = true; } else { chargeMark = false; } } }); } class SpinnerSelectedListener implements OnItemSelectedListener{ public void onItemSelected (AdapterView<?> parent, View view, int position, long id) { if(0 == position) { timeUnit = 1000; spinNum = 0; } else if(1 == position) { timeUnit = 60000; spinNum = 1; } else if(2 == position) { timeUnit = 3600000; spinNum = 2; } } public void onNothingSelected(AdapterView<?> arg0) { } } class BackBtnListener implements OnClickListener{ @Override public void onClick(View v) { Intent intent = new Intent(SettingActivity.this, PreActivity.class); intent.putExtra("count", count); startActivity(intent); SettingActivity.this.finish(); } } class SaveBtnListener implements OnClickListener{ @Override public void onClick(View v) { delayInText = Integer.valueOf(etDelay.getText().toString()); delay = delayInText * timeUnit; editor.putBoolean("ISOPEN", isOpen); editor.putBoolean("cdMark", cdMark); editor.putBoolean("chargeMark", chargeMark); editor.putInt("delay", delay); editor.putInt("timeUnit", timeUnit); editor.putInt("spinNum", spinNum); editor.commit(); etDelay.setText(""+delayInText); cbBoot.setChecked(isOpen); cbChargeDialog.setChecked(!cdMark); cbCharge.setChecked(chargeMark); Intent intent = new Intent(SettingActivity.this, PreActivity.class); intent.putExtra("count", count); startActivity(intent); SettingActivity.this.finish(); } } class ResetBtnListener implements OnClickListener{ @Override public void onClick(View v) { resetDialog.show(); } } class HelpBtnListener implements OnClickListener{ @Override public void onClick(View v) { startActivity(new Intent(SettingActivity.this, HelpActivity.class)); } } class AboutBtnListener implements OnClickListener{ @Override public void onClick(View v) { aboutDialog.show(); } } @Override public void onBackPressed(){ Intent intent = new Intent(SettingActivity.this, PreActivity.class); intent.putExtra("count", count); startActivity(intent); SettingActivity.this.finish(); } }
/* * Copyright 2012 Christian Vielma <cvielma@librethinking.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.librethinking.simmodsys.models.pesm; import com.librethinking.simmodsys.models.pesm.parameters.AssetAmount; import com.librethinking.simmodsys.models.pesm.parameters.LiabilitiesAmount; import com.librethinking.simmodsys.models.pesm.parameters.ExpenseDesiredExpenses; import com.librethinking.simmodsys.models.pesm.parameters.Period; import com.librethinking.simmodsys.models.pesm.parameters.IncomeFixedEarnings; import com.librethinking.simmodsys.models.pesm.parameters.ExpenseFixed; import com.librethinking.simmodsys.models.pesm.parameters.AssetMinSavings; import com.librethinking.simmodsys.models.pesm.parameters.IncomeYearly; import com.librethinking.simmodsys.models.pesm.parameters.ExpenseYearly; import com.librethinking.simmodsys.SIMParameter; import com.librethinking.simmodsys.exceptions.NullOrInvalidStateException; import com.librethinking.simmodsys.exceptions.StateValidationException; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * This is an utility class to validate the stop rules of a model. * This class is used by <code>PESMModel</code> to determine if a model have * reached its final <code>State</code>. This class implements Singleton pattern. * * @author Christian Vielma <cvielma@librethinking.com> */ class PESMModelStopRulesValidator { /**To reflect validation methods*/ private static String REACHPREFIX = "reached"; private static volatile PESMModelStopRulesValidator instance=null; private PESMModelStopRulesValidator() {} /**This determines which parameters can be validated.*/ protected final static Class[] FINALPARAMETERS = {AssetAmount.class, ExpenseFixed.class, IncomeFixedEarnings.class, AssetMinSavings.class, ExpenseDesiredExpenses.class, LiabilitiesAmount.class, Period.class}; /** This specifies some parameters that need others to get validated */ private final static Class[] SPECIALPARAMETERS = {AssetMinSavings.class}; protected static PESMModelStopRulesValidator getInstance(){ synchronized (PESMModelStopRulesValidator.class){ if(PESMModelStopRulesValidator.instance == null){ PESMModelStopRulesValidator.instance = new PESMModelStopRulesValidator(); } } return instance; } /** This method is used to determine if a model have reached its final state. * * Depending on how is the difference between <code>initalS</code> and * <code>finalS</code>, this method will determine if <code>currentS</code> * has reached the final State (i.e.: if initial amount is less than final amount * the model should stop when current amount is greater or equal to final amount). * * A state is reached when any of its parameters is reached. * * @param initialS Initial state of the model. * @param currentS Current state to be compared. * @param finalS Final state of the model. * * @returns <code>true</code> if <code>finalS</code> was reached, false otherwise. * * @throws StateValidationException if an unexpected runtime error occurs during the validation or * this class have problems resolving the validation method for a specified parameter. * @throws RuntimeException for some errors regarding the reflection of methods. This should never happen. * */ public boolean finalReached(PESMState initialS, PESMState currentS, PESMState finalS) throws StateValidationException{ boolean reached = false; Set<Set<SIMParameter>> iParameters = initialS.getParameters(); List<Class> myParameters = Arrays.asList(FINALPARAMETERS); List<Class> specialParams = Arrays.asList(SPECIALPARAMETERS); for(Set<SIMParameter> currParamSet : iParameters){ SIMParameter firstInSet = (SIMParameter) currParamSet.toArray()[0]; if(myParameters.contains(firstInSet.getClass())){ StringBuilder sb = new StringBuilder(REACHPREFIX); sb.append(firstInSet.getClass().getSimpleName()); try { //If parameter requires other parameter to get validated if(specialParams.contains(firstInSet.getClass())){ reached = (Boolean) this.getClass() .getDeclaredMethod(sb.toString(), PESMState.class, PESMState.class, PESMState.class) .invoke(this, initialS, currentS, finalS); } //if parameter is unique, then parameters are reflected else if(firstInSet.isUnique() && !specialParams.contains(firstInSet.getClass())){ SIMParameter currentP = PESMUtil.getUniqueParam(currentS, firstInSet); SIMParameter finalP = PESMUtil.getUniqueParam(finalS, firstInSet); reached = (Boolean) this.getClass() .getDeclaredMethod(sb.toString(), SIMParameter.class, SIMParameter.class, SIMParameter.class) .invoke(this, firstInSet, currentP, finalP); } //if the parameter is not unique, it needs the set of values else if(!firstInSet.isUnique() && !specialParams.contains(firstInSet.getClass())){ reached = (Boolean) this.getClass() .getDeclaredMethod(sb.toString(), Set.class, Set.class, Set.class) .invoke(this, currParamSet, currentS.getParameter(firstInSet.getName()), finalS.getParameter(firstInSet.getName())); } else{throw new RuntimeException("THIS SHOULD NEVER HAPPEN");} } catch (NoSuchMethodException|SecurityException|IllegalAccessException| IllegalArgumentException ex) { throw new StateValidationException("Error finding or " + "calling method for validation of: '" +firstInSet.getClass().getSimpleName()+"'.", ex); } catch(InvocationTargetException ex){ throw new StateValidationException("Unexpected error inside method: '" + sb.toString()+"'.", ex.getCause()); } catch (RuntimeException ex) { throw new StateValidationException("Unexpected error validating " + "parameter: '" +firstInSet.getClass().getSimpleName()+"'.", ex); } if(reached){return true;} } } return false; } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedAssetAmount(SIMParameter initialP, SIMParameter currentP, SIMParameter finalP){ if(finalP==null){return false;} //in case it wasn't stablished in final state, it means that is not a stop condition double ip= ((AssetAmount) initialP).getAmount(); double cp= ((AssetAmount) currentP).getAmount(); double fp= ((AssetAmount) finalP).getAmount(); if(ip < fp && cp >= fp){return true;} else if(ip >= fp && cp <=fp){return true;} else{return false;} } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedExpenseFixed(Set<SIMParameter> initialS, Set<SIMParameter> currentS, Set<SIMParameter> finalS){ if(finalS==null){return false;} //in case it wasn't stablished in final state, it means that is not a stop condition for(SIMParameter initialP : initialS){ for(SIMParameter currentP : currentS){ for(SIMParameter finalP : finalS){ double ip= ((ExpenseFixed) initialP).getAmount(); double cp= ((ExpenseFixed) currentP).getAmount(); double fp= ((ExpenseFixed) finalP).getAmount(); if(ip < fp && cp >= fp){return true;} else if(ip >= fp && cp <=fp){return true;} } } } return false; } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedIncomeFixedEarnings(Set<SIMParameter> initialS, Set<SIMParameter> currentS, Set<SIMParameter> finalS){ if(finalS==null){return false;} //in case it wasn't stablished in final state, it means that is not a stop condition for(SIMParameter initialP : initialS){ for(SIMParameter currentP : currentS){ for(SIMParameter finalP : finalS){ double ip= ((ExpenseFixed) initialP).getAmount(); double cp= ((ExpenseFixed) currentP).getAmount(); double fp= ((ExpenseFixed) finalP).getAmount(); if(ip < fp && cp >= fp){return true;} else if(ip >= fp && cp <=fp){return true;} } } } return false; } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedLiabilitiesAmount(SIMParameter initialP, SIMParameter currentP, SIMParameter finalP){ if(finalP==null){return false;} //in case it wasn't stablished in final state, it means that is not a stop condition double ip= ((LiabilitiesAmount) initialP).getAmount(); double cp= ((LiabilitiesAmount) currentP).getAmount(); double fp= ((LiabilitiesAmount) finalP).getAmount(); if(ip < fp && cp >= fp){return true;} else if(ip >= fp && cp <=fp){return true;} else{return false;} } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedPeriod(SIMParameter initialP, SIMParameter currentP, SIMParameter finalP){ if(((Period)currentP).getMonth() >= ((Period)finalP).getMonth()+1 || ((Period)currentP).getMonth() == ((Period)currentP).MAXVALUE+1 ){return true;} else{return false;} } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedExpenseDesiredExpenses(Set<SIMParameter> initialP, Set<SIMParameter> currentP, Set<SIMParameter> finalP){ /**finalP has all the desired expenses that should be made. In other words, to reach the final, currentP must NOT have ANY of the finalP expenses in it.**/ if(finalP==null){return false;} //in case it wasn't stablished in final state, it means that is not a stop condition //final desired expenses are more than initial if(initialP.size() < finalP.size() || initialP.size() < currentP.size()){//this should never happen throw new NullOrInvalidStateException("Final or current state shouldn't have more " + " desired expenses than initial state ["+initialP.size()+"].");} //initial and final expenses are the same else if(initialP.size()==finalP.size()){return true;} //at least the number of expenses in finalP has been selected else if(currentP.size() <= initialP.size()-finalP.size()){ boolean allExpended = false; for(SIMParameter curr:finalP){ allExpended = allExpended && !currentP.contains(curr); } if (allExpended){return true;} } return false; } /** This method defines specific rules to determine parameter's stop conditions. * @see Project Documentation. */ private static boolean reachedAssetMinSavings(PESMState initialS, PESMState currentS, PESMState finalS){ if(currentS.getPeriod()%12!=0){return false;} AssetMinSavings finalSavings = PESMUtil.getUniqueParam(finalS, new AssetMinSavings()); if(finalSavings==null){return false;} double expectedSavings = finalSavings.getPercentage(); double currYearIncome = PESMUtil.getUniqueParam(currentS, new IncomeYearly()).getAmount(); double currYearExpense = PESMUtil.getUniqueParam(currentS, new ExpenseYearly()).getAmount(); if((currYearIncome*expectedSavings )<= (currYearIncome-currYearExpense)){ return true; } else{return false;} } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.intellij.plugins.intelliLang.inject.xml; import com.intellij.lang.Language; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogBuilder; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Factory; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.*; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiLanguageInjectionHost; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.*; import com.intellij.util.Consumer; import com.intellij.util.PlatformIcons; import com.intellij.util.containers.ContainerUtil; import org.intellij.plugins.intelliLang.Configuration; import org.intellij.plugins.intelliLang.inject.AbstractLanguageInjectionSupport; import org.intellij.plugins.intelliLang.inject.EditInjectionSettingsAction; import org.intellij.plugins.intelliLang.inject.InjectorUtils; import org.intellij.plugins.intelliLang.inject.config.*; import org.intellij.plugins.intelliLang.inject.config.ui.AbstractInjectionPanel; import org.intellij.plugins.intelliLang.inject.config.ui.XmlAttributePanel; import org.intellij.plugins.intelliLang.inject.config.ui.XmlTagPanel; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; /** * @author Gregory.Shrago */ public class XmlLanguageInjectionSupport extends AbstractLanguageInjectionSupport { @NonNls public static final String XML_SUPPORT_ID = "xml"; private static boolean isMine(final PsiLanguageInjectionHost host) { if (host instanceof XmlAttributeValue) { final PsiElement p = host.getParent(); if (p instanceof XmlAttribute) { final String s = ((XmlAttribute)p).getName(); return !("xmlns".equals(s) || s.startsWith("xmlns:")); } } else if (host instanceof XmlText) { final XmlTag tag = ((XmlText)host).getParentTag(); return tag != null/* && tag.getValue().getTextElements().length == 1 && tag.getSubTags().length == 0*/; } return false; } @Override @NotNull public String getId() { return XML_SUPPORT_ID; } @Override @NotNull public Class[] getPatternClasses() { return new Class[] {XmlPatterns.class}; } @Override public boolean isApplicableTo(PsiLanguageInjectionHost host) { return host instanceof XmlElement; } @Nullable @Override public BaseInjection findCommentInjection(@NotNull PsiElement host, @Nullable Ref<PsiElement> commentRef) { if (host instanceof XmlAttributeValue) return null; return InjectorUtils.findCommentInjection(host instanceof XmlText ? host.getParent() : host, getId(), commentRef); } @Override public boolean addInjectionInPlace(Language language, final PsiLanguageInjectionHost psiElement) { if (!isMine(psiElement)) return false; String id = language.getID(); if (psiElement instanceof XmlAttributeValue) { return doInjectInAttributeValue((XmlAttributeValue)psiElement, id); } else if (psiElement instanceof XmlText) { return doInjectInXmlText((XmlText)psiElement, id); } return false; } @Override public boolean removeInjectionInPlace(final PsiLanguageInjectionHost host) { return removeInjection(host); } @Override public boolean removeInjection(PsiElement host) { final Project project = host.getProject(); final Configuration configuration = Configuration.getProjectInstance(project); final ArrayList<BaseInjection> injections = collectInjections(host, configuration); if (injections.isEmpty()) return false; final ArrayList<BaseInjection> newInjections = new ArrayList<>(); for (BaseInjection injection : injections) { final BaseInjection newInjection = injection.copy(); newInjection.setPlaceEnabled(null, false); if (InjectorUtils.canBeRemoved(newInjection)) continue; newInjections.add(newInjection); } configuration.replaceInjectionsWithUndo( project, host.getContainingFile(), newInjections, injections, Collections.emptyList()); return true; } @Override public boolean editInjectionInPlace(final PsiLanguageInjectionHost host) { if (!isMine(host)) return false; final Project project = host.getProject(); final Configuration configuration = Configuration.getProjectInstance(project); final ArrayList<BaseInjection> injections = collectInjections(host, configuration); if (injections.isEmpty()) return false; final BaseInjection originalInjection = injections.get(0); final BaseInjection xmlInjection = createFrom(originalInjection); final BaseInjection newInjection = xmlInjection == null? showDefaultInjectionUI(project, originalInjection.copy()) : showInjectionUI(project, xmlInjection); if (newInjection != null) { configuration.replaceInjectionsWithUndo( project, host.getContainingFile(), Collections.singletonList(newInjection), Collections.singletonList(originalInjection), Collections.emptyList()); } return true; } @Nullable private static BaseInjection showInjectionUI(final Project project, final BaseInjection xmlInjection) { final DialogBuilder builder = new DialogBuilder(project); final AbstractInjectionPanel panel; if (xmlInjection instanceof XmlTagInjection) { panel = new XmlTagPanel((XmlTagInjection)xmlInjection, project); builder.setHelpId("reference.settings.injection.language.injection.settings.xml.tag"); } else if (xmlInjection instanceof XmlAttributeInjection) { panel = new XmlAttributePanel((XmlAttributeInjection)xmlInjection, project); builder.setHelpId("reference.settings.injection.language.injection.settings.xml.attribute"); } else throw new AssertionError(); panel.reset(); builder.addOkAction(); builder.addCancelAction(); builder.setCenterPanel(panel.getComponent()); builder.setTitle(EditInjectionSettingsAction.EDIT_INJECTION_TITLE); builder.setOkOperation(() -> { panel.apply(); builder.getDialogWrapper().close(DialogWrapper.OK_EXIT_CODE); }); if (builder.show() == DialogWrapper.OK_EXIT_CODE) { return xmlInjection.copy(); } return null; } @Nullable private static BaseInjection createFrom(final BaseInjection injection) { if (injection.getInjectionPlaces().length == 0 || injection.getInjectionPlaces().length > 1) return null; AbstractTagInjection result; final InjectionPlace place = injection.getInjectionPlaces()[0]; final ElementPattern<? extends PsiElement> rootPattern = place.getElementPattern(); final ElementPatternCondition<? extends PsiElement> rootCondition = rootPattern.getCondition(); final Class<? extends PsiElement> elementClass = rootCondition.getInitialCondition().getAcceptedClass(); if (XmlAttribute.class.equals(elementClass)) { result = new XmlAttributeInjection().copyFrom(injection); } else if (XmlTag.class.equals(elementClass)) { result = new XmlTagInjection().copyFrom(injection); } else return null; result.setInjectionPlaces(InjectionPlace.EMPTY_ARRAY); for (PatternCondition<?> condition : rootCondition.getConditions()) { final String value = extractValue(condition); if ("withLocalName".equals(condition.getDebugMethodName())) { if (value == null) return null; if (result instanceof XmlAttributeInjection) { ((XmlAttributeInjection)result).setAttributeName(value); } else { result.setTagName(value); } } else if ("withNamespace".equals(condition.getDebugMethodName())) { if (value == null) return null; if (result instanceof XmlAttributeInjection) { ((XmlAttributeInjection)result).setAttributeNamespace(value); } else { result.setTagNamespace(value); } } else if (result instanceof XmlAttributeInjection && condition instanceof PatternConditionPlus) { boolean strict = "withParent".equals(condition.getDebugMethodName()); if (!strict && !"inside".equals(condition.getDebugMethodName())) return null; result.setApplyToSubTags(!strict); ElementPattern<?> insidePattern = ((PatternConditionPlus)condition).getValuePattern(); if (!XmlTag.class.equals(insidePattern.getCondition().getInitialCondition().getAcceptedClass())) return null; for (PatternCondition<?> insideCondition : insidePattern.getCondition().getConditions()) { String tagValue = extractValue(insideCondition); if (tagValue == null) return null; if ("withLocalName".equals(insideCondition.getDebugMethodName())) { result.setTagName(tagValue); } else if ("withNamespace".equals(insideCondition.getDebugMethodName())) { result.setTagNamespace(tagValue); } } } else { return null; } } result.generatePlaces(); return result; } @Nullable private static String extractValue(PatternCondition<?> condition) { if (!(condition instanceof PatternConditionPlus)) return null; final ElementPattern valuePattern = ((PatternConditionPlus)condition).getValuePattern(); final ElementPatternCondition<?> rootCondition = valuePattern.getCondition(); if (!String.class.equals(rootCondition.getInitialCondition().getAcceptedClass())) return null; if (rootCondition.getConditions().size() != 1) return null; final PatternCondition<?> valueCondition = rootCondition.getConditions().get(0); if (!(valueCondition instanceof ValuePatternCondition<?>)) return null; final Collection values = ((ValuePatternCondition)valueCondition).getValues(); if (values.size() == 1) { final Object value = values.iterator().next(); return value instanceof String? (String)value : null; } else if (!values.isEmpty()) { for (Object value : values) { if (!(value instanceof String)) return null; } //noinspection unchecked return StringUtil.join(values, "|"); } return null; } @Override public BaseInjection createInjection(Element element) { String place = StringUtil.notNullize(element.getChildText("place"), ""); if (place.startsWith("xmlAttribute")) { return new XmlAttributeInjection(); } else if (place.startsWith("xmlTag")) { return new XmlTagInjection(); } else { return new BaseInjection(XML_SUPPORT_ID); } } private static boolean doInjectInXmlText(final XmlText host, final String languageId) { final XmlTag tag = host.getParentTag(); if (tag != null) { final XmlTagInjection injection = new XmlTagInjection(); injection.setInjectedLanguageId(languageId); injection.setTagName(tag.getLocalName()); injection.setTagNamespace(tag.getNamespace()); injection.generatePlaces(); doEditInjection(host.getProject(), host.getContainingFile(), injection); return true; } return false; } private static void doEditInjection(final Project project, PsiFile psiFile, final XmlTagInjection template) { final Configuration configuration = InjectorUtils.getEditableInstance(project); final AbstractTagInjection originalInjection = (AbstractTagInjection)configuration.findExistingInjection(template); final XmlTagInjection newInjection = originalInjection == null? template : new XmlTagInjection().copyFrom(originalInjection); configuration.replaceInjectionsWithUndo( project, psiFile, Collections.singletonList(newInjection), ContainerUtil.createMaybeSingletonList(originalInjection), Collections.emptyList()); } private static boolean doInjectInAttributeValue(final XmlAttributeValue host, final String languageId) { final XmlAttribute attribute = PsiTreeUtil.getParentOfType(host, XmlAttribute.class, true); final XmlTag tag = attribute == null? null : attribute.getParent(); if (tag != null) { final XmlAttributeInjection injection = new XmlAttributeInjection(); injection.setInjectedLanguageId(languageId); injection.setAttributeName(attribute.getLocalName()); injection.setAttributeNamespace(attribute.getNamespace()); injection.setTagName(tag.getLocalName()); injection.setTagNamespace(tag.getNamespace()); injection.generatePlaces(); doEditInjection(host.getProject(), host.getContainingFile(), injection); return true; } return false; } private static void doEditInjection(final Project project, PsiFile file, final XmlAttributeInjection template) { final Configuration configuration = InjectorUtils.getEditableInstance(project); final BaseInjection originalInjection = configuration.findExistingInjection(template); final BaseInjection newInjection = originalInjection == null ? template : originalInjection.copy(); configuration.replaceInjectionsWithUndo( project, file, Collections.singletonList(newInjection), ContainerUtil.createMaybeSingletonList(originalInjection), Collections.emptyList()); } private static ArrayList<BaseInjection> collectInjections(final PsiElement host, final Configuration configuration) { final ArrayList<BaseInjection> result = new ArrayList<>(); final PsiElement element = host instanceof XmlText? ((XmlText)host).getParentTag() : host instanceof XmlAttributeValue? host.getParent(): host; for (BaseInjection injection : configuration.getInjections(XML_SUPPORT_ID)) { if (injection.acceptsPsiElement(element)) { result.add(injection); } } return result; } @Override public AnAction[] createAddActions(final Project project, final Consumer<? super BaseInjection> consumer) { return new AnAction[] { new AnAction("XML Tag Injection", null, PlatformIcons.XML_TAG_ICON) { @Override public void actionPerformed(@NotNull final AnActionEvent e) { final BaseInjection newInjection = showInjectionUI(project, new XmlTagInjection()); if (newInjection != null) consumer.consume(newInjection); } }, new AnAction("XML Attribute Injection", null, PlatformIcons.ANNOTATION_TYPE_ICON) { @Override public void actionPerformed(@NotNull final AnActionEvent e) { final BaseInjection injection = showInjectionUI(project, new XmlAttributeInjection()); if (injection != null) consumer.consume(injection); } } }; } @Override public AnAction createEditAction(final Project project, final Factory<? extends BaseInjection> producer) { return new AnAction() { @Override public void actionPerformed(@NotNull final AnActionEvent e) { final BaseInjection originalInjection = producer.create(); final BaseInjection injection = createFrom(originalInjection); if (injection != null) { final BaseInjection newInjection = showInjectionUI(project, injection); if (newInjection != null) { originalInjection.copyFrom(newInjection); } } else { perform(project, producer); } } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.streams.instagram.serializer.util; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.Lists; import org.apache.streams.exceptions.ActivitySerializerException; import org.apache.streams.pojo.extensions.ExtensionUtil; import org.apache.streams.pojo.json.Activity; import org.apache.streams.pojo.json.ActivityObject; import org.apache.streams.pojo.json.Actor; import org.apache.streams.pojo.json.Image; import org.apache.streams.pojo.json.Provider; import org.jinstagram.entity.comments.CommentData; import org.jinstagram.entity.common.Comments; import org.jinstagram.entity.common.ImageData; import org.jinstagram.entity.common.Images; import org.jinstagram.entity.common.VideoData; import org.jinstagram.entity.common.Videos; import org.jinstagram.entity.users.basicinfo.Counts; import org.jinstagram.entity.users.basicinfo.UserInfoData; import org.jinstagram.entity.users.feed.MediaFeedData; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.apache.streams.data.util.ActivityUtil.ensureExtensions; /** * Provides utilities for working with Activity objects within the context of Instagram */ public class InstagramActivityUtil { private static final Logger LOGGER = LoggerFactory.getLogger(InstagramActivityUtil.class); /** * Updates the given Activity object with the values from the item * @param item the object to use as the source * @param activity the target of the updates. Will receive all values from the tweet. * @throws ActivitySerializerException */ public static void updateActivity(MediaFeedData item, Activity activity) throws ActivitySerializerException { activity.setActor(buildActor(item)); activity.setVerb("post"); if(item.getCreatedTime() != null) activity.setPublished(new DateTime(Long.parseLong(item.getCreatedTime()) * 1000)); activity.setId(formatId(activity.getVerb(), Optional.fromNullable( item.getId()) .orNull())); activity.setProvider(getProvider()); activity.setUrl(item.getLink()); activity.setObject(buildActivityObject(item)); if(item.getCaption() != null) activity.setContent(item.getCaption().getText()); addInstagramExtensions(activity, item); } /** * Updates the given Activity object with the values from the item * @param item the object to use as the source * @param activity the target of the updates. Will receive all values from the tweet. * @throws ActivitySerializerException */ public static void updateActivity(UserInfoData item, Activity activity) throws ActivitySerializerException { activity.setActor(buildActor(item)); activity.setId(null); activity.setProvider(getProvider()); } /** * Builds an Actor object given a UserInfoData object * @param item * @return Actor object */ public static Actor buildActor(UserInfoData item) { Actor actor = new Actor(); try { Image image = new Image(); image.setUrl(item.getProfile_picture()); Counts counts = item.getCounts(); Map<String, Object> extensions = new HashMap<String, Object>(); extensions.put("followers", counts.getFollwed_by()); extensions.put("follows", counts.getFollows()); extensions.put("screenName", item.getUsername()); extensions.put("posts", counts.getMedia()); actor.setId(formatId(String.valueOf(item.getId()))); actor.setImage(image); actor.setDisplayName(item.getFullName()); actor.setSummary(item.getBio()); actor.setUrl(item.getWebsite()); actor.setAdditionalProperty("handle", item.getUsername()); actor.setAdditionalProperty("extensions", extensions); } catch (Exception e) { LOGGER.error("Exception trying to build actor object: {}", e.getMessage()); } return actor; } /** * Builds the actor * @param item the item * @return a valid Actor */ public static Actor buildActor(MediaFeedData item) { Actor actor = new Actor(); try { Image image = new Image(); image.setUrl(item.getUser().getProfilePictureUrl()); Map<String, Object> extensions = new HashMap<String, Object>(); extensions.put("screenName", item.getUser().getUserName()); actor.setDisplayName(item.getUser().getFullName()); actor.setSummary(item.getUser().getBio()); actor.setUrl(item.getUser().getWebsiteUrl()); actor.setId(formatId(String.valueOf(item.getUser().getId()))); actor.setImage(image); actor.setAdditionalProperty("extensions", extensions); actor.setAdditionalProperty("handle", item.getUser().getUserName()); } catch (Exception e) { LOGGER.error("Exception trying to build actor object: {}", e.getMessage()); } return actor; } /** * Builds the ActivityObject * @param item the item * @return a valid Activity Object */ public static ActivityObject buildActivityObject(MediaFeedData item) { ActivityObject actObj = new ActivityObject(); actObj.setObjectType(item.getType()); actObj.setAttachments(buildActivityObjectAttachments(item)); Image standardResolution = new Image(); if(item.getType().equals("image") && item.getImages() != null) { ImageData standardResolutionData = item.getImages().getStandardResolution(); standardResolution.setHeight(new Double((double)standardResolutionData.getImageHeight())); standardResolution.setWidth(new Double((double)standardResolutionData.getImageWidth())); standardResolution.setUrl(standardResolutionData.getImageUrl()); } else if(item.getType().equals("video") && item.getVideos() != null) { VideoData standardResolutionData = item.getVideos().getStandardResolution(); standardResolution.setHeight(new Double((double)standardResolutionData.getHeight())); standardResolution.setWidth(new Double((double)standardResolutionData.getWidth())); standardResolution.setUrl(standardResolutionData.getUrl()); } actObj.setImage(standardResolution); return actObj; } /** * Builds all of the attachments associated with a MediaFeedData object * * @param item * @return */ public static List<ActivityObject> buildActivityObjectAttachments(MediaFeedData item) { List<ActivityObject> attachments = new ArrayList<ActivityObject>(); addImageObjects(attachments, item); addVideoObjects(attachments, item); return attachments; } /** * Adds any image objects to the attachment field * @param attachments * @param item */ public static void addImageObjects(List<ActivityObject> attachments, MediaFeedData item) { Images images = item.getImages(); if(images != null) { try { ImageData thumbnail = images.getThumbnail(); ImageData lowResolution = images.getLowResolution(); ActivityObject thumbnailObject = new ActivityObject(); Image thumbnailImage = new Image(); thumbnailImage.setUrl(thumbnail.getImageUrl()); thumbnailImage.setHeight(new Double((double) thumbnail.getImageHeight())); thumbnailImage.setWidth(new Double((double) thumbnail.getImageWidth())); thumbnailObject.setImage(thumbnailImage); thumbnailObject.setObjectType("image"); ActivityObject lowResolutionObject = new ActivityObject(); Image lowResolutionImage = new Image(); lowResolutionImage.setUrl(lowResolution.getImageUrl()); lowResolutionImage.setHeight(new Double((double) lowResolution.getImageHeight())); lowResolutionImage.setWidth(new Double((double) lowResolution.getImageWidth())); lowResolutionObject.setImage(lowResolutionImage); lowResolutionObject.setObjectType("image"); attachments.add(thumbnailObject); attachments.add(lowResolutionObject); } catch (Exception e) { LOGGER.error("Failed to add image objects: {}", e.getMessage()); } } } /** * Adds any video objects to the attachment field * @param attachments * @param item */ public static void addVideoObjects(List<ActivityObject> attachments, MediaFeedData item) { Videos videos = item.getVideos(); if(videos != null) { try { VideoData lowResolutionVideo = videos.getLowResolution(); ActivityObject lowResolutionVideoObject = new ActivityObject(); Image lowResolutionVideoImage = new Image(); lowResolutionVideoImage.setUrl(lowResolutionVideo.getUrl()); lowResolutionVideoImage.setHeight(new Double((double) lowResolutionVideo.getHeight())); lowResolutionVideoImage.setWidth(new Double((double) lowResolutionVideo.getWidth())); lowResolutionVideoObject.setImage(lowResolutionVideoImage); lowResolutionVideoObject.setObjectType("video"); attachments.add(lowResolutionVideoObject); } catch (Exception e) { LOGGER.error("Failed to add video objects: {}", e.getMessage()); } } } /** * Gets the links from the Instagram event * @param item the object to use as the source * @return a list of links corresponding to the expanded URL */ public static List<String> getLinks(MediaFeedData item) { List<String> links = Lists.newArrayList(); return links; } /** * Adds the location extension and populates with teh instagram data * @param activity the Activity object to update * @param item the object to use as the source */ public static void addLocationExtension(Activity activity, MediaFeedData item) { Map<String, Object> extensions = ExtensionUtil.ensureExtensions(activity); if(item.getLocation() != null) { Map<String, Object> coordinates = new HashMap<String, Object>(); coordinates.put("type", "Point"); coordinates.put("coordinates", "[" + item.getLocation().getLongitude() + "," + item.getLocation().getLatitude() + "]"); extensions.put("coordinates", coordinates); } } /** * Gets the common instagram {@link org.apache.streams.pojo.json.Provider} object * @return a provider object representing Instagram */ public static Provider getProvider() { Provider provider = new Provider(); provider.setId("id:providers:instagram"); provider.setDisplayName("Instagram"); return provider; } /** * Formats the ID to conform with the Apache Streams activity ID convention * @param idparts the parts of the ID to join * @return a valid Activity ID in format "id:instagram:part1:part2:...partN" */ public static String formatId(String... idparts) { return Joiner.on(":").join(Lists.asList("id:instagram", idparts)); } /** * Takes various parameters from the instagram object that are currently not part of teh * activity schema and stores them in a generic extensions attribute * @param activity * @param item */ public static void addInstagramExtensions(Activity activity, MediaFeedData item) { Map<String, Object> extensions = ExtensionUtil.ensureExtensions(activity); addLocationExtension(activity, item); if(item.getLikes() != null) { Map<String, Object> likes = new HashMap<String, Object>(); likes.put("count", item.getLikes().getCount()); extensions.put("likes", likes); } extensions.put("hashtags", item.getTags()); Comments comments = item.getComments(); String commentsConcat = ""; if(comments != null) { for (CommentData commentData : comments.getComments()) { commentsConcat += " " + commentData.getText(); } } if(item.getCaption() != null) { commentsConcat += " " + item.getCaption().getText(); } extensions.put("keywords", commentsConcat); } }
/* * Kodkod -- Copyright (c) 2005-2011, Emina Torlak * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package kodkod.engine.satlab; import java.util.NoSuchElementException; import org.sat4j.specs.ContradictionException; import org.sat4j.specs.ISolver; import org.sat4j.specs.IVecInt; import org.sat4j.specs.IteratorInt; /** * A wrapper class that provides * access to the basic funcionality of the MiniSAT solvers * (org.sat4j.specs.ISolver) from CRIL. * * @author Emina Torlak */ final class SAT4J implements SATSolver { private ISolver solver; private final ReadOnlyIVecInt wrapper; private Boolean sat; private int vars, clauses; /** * Constructs a wrapper for the given instance * of ISolver. * @throws NullPointerException solver = null */ SAT4J(ISolver solver) { if (solver==null) throw new NullPointerException("solver"); this.solver = solver; this.wrapper = new ReadOnlyIVecInt(); this.sat = null; this.vars = this.clauses = 0; } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#numberOfVariables() */ public int numberOfVariables() { return vars; } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#numberOfClauses() */ public int numberOfClauses() { return clauses; } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#addVariables(int) */ public void addVariables(int numVars) { if (numVars < 0) throw new IllegalArgumentException("numVars < 0: " + numVars); else if (numVars > 0) { vars += numVars; solver.newVar(vars); } } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#addClause(int[]) */ public boolean addClause(int[] lits) { try { if (!Boolean.FALSE.equals(sat)) { clauses++; solver.addClause(wrapper.wrap(lits)); // for(int lit : lits) { // System.out.print(lit + " "); // } // System.out.println(0); return true; } } catch (ContradictionException e) { sat = Boolean.FALSE; } return false; } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#solve() */ public boolean solve() { try { if (!Boolean.FALSE.equals(sat)) sat = Boolean.valueOf(solver.isSatisfiable()); return sat; } catch (org.sat4j.specs.TimeoutException e) { throw new RuntimeException("timed out"); } } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#valueOf(int) */ public final boolean valueOf(int variable) { if (!Boolean.TRUE.equals(sat)) throw new IllegalStateException(); if (variable < 1 || variable > vars) throw new IllegalArgumentException(variable + " !in [1.." + vars+"]"); return solver.model(variable); } /** * {@inheritDoc} * @see kodkod.engine.satlab.SATSolver#free() */ public synchronized final void free() { solver = null; } /** * A wrapper for an int array that provides * read-only access to the array via the IVecInt interface. * * @author Emina Torlak */ private static final class ReadOnlyIVecInt implements IVecInt { private static final long serialVersionUID = -7689441271777278043L; private int[] vec; /** * Sets this.vec to the given vector * and returns this. */ IVecInt wrap(int[] vec) { this.vec = vec; return this; } public int size() { return vec.length; } public boolean isEmpty() { return size() == 0; } public int unsafeGet(int arg0) { return vec[arg0]; } public int last() { return vec[vec.length - 1]; } public int[] toArray() { return vec; } public int get(int arg0) { if (arg0 < 0 || arg0 >= vec.length) throw new IndexOutOfBoundsException("arg0: " + arg0); return vec[arg0]; } public boolean contains(int arg0) { final int[] workArray = vec; // faster access for(int i : workArray) { if (i==arg0) return true; } return false; } public void copyTo(IVecInt arg0) { int argLength = arg0.size(); final int[] workArray = vec; // faster access arg0.ensure(argLength + workArray.length); for(int i : workArray) { arg0.set(argLength++, i); } } public void copyTo(int[] arg0) { assert arg0.length >= vec.length; System.arraycopy(vec,0, arg0, 0, vec.length); } public IteratorInt iterator() { return new IteratorInt() { int cursor = 0; public boolean hasNext() { return cursor < vec.length; } public int next() { if (!hasNext()) throw new NoSuchElementException(); return vec[cursor++]; } }; } public int containsAt(int e) { final int[] workArray = vec; // faster access for(int n=workArray.length, i=0; i<n; i++) if (workArray[i]==e) return i; return -1; } public int containsAt(int e, int from) { final int[] workArray = vec; // faster access if (from<workArray.length) for(int n=workArray.length, i=from+1; i<n; i++) if (workArray[i]==e) return i; return -1; } public int indexOf(int e) { final int[] workArray = vec; // faster access for (int i = 0, n = workArray.length; i < n; i++) { if (workArray[i] == e) return i; } return -1; } // unsupported public void shrink(int arg0) { throw new UnsupportedOperationException(); } public void shrinkTo(int arg0) { throw new UnsupportedOperationException(); } public IVecInt pop() { throw new UnsupportedOperationException(); } public void growTo(int arg0, int arg1) { throw new UnsupportedOperationException(); } public void ensure(int arg0) { throw new UnsupportedOperationException(); } public IVecInt push(int arg0) { throw new UnsupportedOperationException(); } public void unsafePush(int arg0) { throw new UnsupportedOperationException(); } public void clear() { throw new UnsupportedOperationException(); } public void moveTo(IVecInt arg0) { throw new UnsupportedOperationException(); } public void moveTo2(IVecInt arg0) { throw new UnsupportedOperationException(); } public void moveTo(int[] arg0) { throw new UnsupportedOperationException(); } public void moveTo(int arg0, int arg1) { throw new UnsupportedOperationException(); } public void moveTo(int i, int[] arg1) { throw new UnsupportedOperationException(); } public void insertFirst(int arg0) { throw new UnsupportedOperationException(); } public void remove(int arg0) { throw new UnsupportedOperationException(); } public int delete(int arg0) { throw new UnsupportedOperationException(); } public void set(int arg0, int arg1) { throw new UnsupportedOperationException(); } public void sort() { throw new UnsupportedOperationException(); } public void sortUnique() { throw new UnsupportedOperationException(); } } public static void main(String[] args) { final SAT4J z = (SAT4J)SATFactory.DefaultSAT4J.instance(); // z.addVariables(3); // int[] clause = {1,2,3}; // z.addClause(clause); // int[] clause1 = {-3}; // z.addClause(clause1); // System.out.println(z.solver.nVars()); // z.addVariables(4); // System.out.println(z.solver.nVars()); // clause1[0] = 7; // z.addClause(clause1); z.addVariables(1); int[] clause1 = {1}; z.addClause(clause1); clause1[0] = -1; z.addClause(clause1); System.out.println(z.solve()); //System.out.println(z.variablesThatAre(true, 1, 1)); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.devopsguru.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Details about the source of the anomalous operational data that triggered the anomaly. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devops-guru-2020-12-01/AnomalySourceDetails" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AnomalySourceDetails implements Serializable, Cloneable, StructuredPojo { /** * <p> * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed CloudWatch * metrics that show anomalous behavior. * </p> */ private java.util.List<CloudWatchMetricsDetail> cloudWatchMetrics; /** * <p> * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * </p> */ private java.util.List<PerformanceInsightsMetricsDetail> performanceInsightsMetrics; /** * <p> * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed CloudWatch * metrics that show anomalous behavior. * </p> * * @return An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed * CloudWatch metrics that show anomalous behavior. */ public java.util.List<CloudWatchMetricsDetail> getCloudWatchMetrics() { return cloudWatchMetrics; } /** * <p> * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed CloudWatch * metrics that show anomalous behavior. * </p> * * @param cloudWatchMetrics * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed * CloudWatch metrics that show anomalous behavior. */ public void setCloudWatchMetrics(java.util.Collection<CloudWatchMetricsDetail> cloudWatchMetrics) { if (cloudWatchMetrics == null) { this.cloudWatchMetrics = null; return; } this.cloudWatchMetrics = new java.util.ArrayList<CloudWatchMetricsDetail>(cloudWatchMetrics); } /** * <p> * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed CloudWatch * metrics that show anomalous behavior. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setCloudWatchMetrics(java.util.Collection)} or {@link #withCloudWatchMetrics(java.util.Collection)} if * you want to override the existing values. * </p> * * @param cloudWatchMetrics * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed * CloudWatch metrics that show anomalous behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public AnomalySourceDetails withCloudWatchMetrics(CloudWatchMetricsDetail... cloudWatchMetrics) { if (this.cloudWatchMetrics == null) { setCloudWatchMetrics(new java.util.ArrayList<CloudWatchMetricsDetail>(cloudWatchMetrics.length)); } for (CloudWatchMetricsDetail ele : cloudWatchMetrics) { this.cloudWatchMetrics.add(ele); } return this; } /** * <p> * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed CloudWatch * metrics that show anomalous behavior. * </p> * * @param cloudWatchMetrics * An array of <code>CloudWatchMetricsDetail</code> objects that contain information about analyzed * CloudWatch metrics that show anomalous behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public AnomalySourceDetails withCloudWatchMetrics(java.util.Collection<CloudWatchMetricsDetail> cloudWatchMetrics) { setCloudWatchMetrics(cloudWatchMetrics); return this; } /** * <p> * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * </p> * * @return An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. */ public java.util.List<PerformanceInsightsMetricsDetail> getPerformanceInsightsMetrics() { return performanceInsightsMetrics; } /** * <p> * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * </p> * * @param performanceInsightsMetrics * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. */ public void setPerformanceInsightsMetrics(java.util.Collection<PerformanceInsightsMetricsDetail> performanceInsightsMetrics) { if (performanceInsightsMetrics == null) { this.performanceInsightsMetrics = null; return; } this.performanceInsightsMetrics = new java.util.ArrayList<PerformanceInsightsMetricsDetail>(performanceInsightsMetrics); } /** * <p> * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setPerformanceInsightsMetrics(java.util.Collection)} or * {@link #withPerformanceInsightsMetrics(java.util.Collection)} if you want to override the existing values. * </p> * * @param performanceInsightsMetrics * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public AnomalySourceDetails withPerformanceInsightsMetrics(PerformanceInsightsMetricsDetail... performanceInsightsMetrics) { if (this.performanceInsightsMetrics == null) { setPerformanceInsightsMetrics(new java.util.ArrayList<PerformanceInsightsMetricsDetail>(performanceInsightsMetrics.length)); } for (PerformanceInsightsMetricsDetail ele : performanceInsightsMetrics) { this.performanceInsightsMetrics.add(ele); } return this; } /** * <p> * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * </p> * * @param performanceInsightsMetrics * An array of <code>PerformanceInsightsMetricsDetail</code> objects that contain information about analyzed * Performance Insights metrics that show anomalous behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public AnomalySourceDetails withPerformanceInsightsMetrics(java.util.Collection<PerformanceInsightsMetricsDetail> performanceInsightsMetrics) { setPerformanceInsightsMetrics(performanceInsightsMetrics); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCloudWatchMetrics() != null) sb.append("CloudWatchMetrics: ").append(getCloudWatchMetrics()).append(","); if (getPerformanceInsightsMetrics() != null) sb.append("PerformanceInsightsMetrics: ").append(getPerformanceInsightsMetrics()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AnomalySourceDetails == false) return false; AnomalySourceDetails other = (AnomalySourceDetails) obj; if (other.getCloudWatchMetrics() == null ^ this.getCloudWatchMetrics() == null) return false; if (other.getCloudWatchMetrics() != null && other.getCloudWatchMetrics().equals(this.getCloudWatchMetrics()) == false) return false; if (other.getPerformanceInsightsMetrics() == null ^ this.getPerformanceInsightsMetrics() == null) return false; if (other.getPerformanceInsightsMetrics() != null && other.getPerformanceInsightsMetrics().equals(this.getPerformanceInsightsMetrics()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCloudWatchMetrics() == null) ? 0 : getCloudWatchMetrics().hashCode()); hashCode = prime * hashCode + ((getPerformanceInsightsMetrics() == null) ? 0 : getPerformanceInsightsMetrics().hashCode()); return hashCode; } @Override public AnomalySourceDetails clone() { try { return (AnomalySourceDetails) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.devopsguru.model.transform.AnomalySourceDetailsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Copyright 2011-2016 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.liferay.model.impl; import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler; import com.liferay.portal.kernel.util.GetterUtil; import com.liferay.portal.kernel.util.ProxyUtil; import com.liferay.portal.kernel.util.StringBundler; import com.liferay.portal.kernel.util.StringPool; import com.liferay.portal.model.CacheModel; import com.liferay.portal.model.impl.BaseModelImpl; import com.liferay.portal.service.ServiceContext; import com.liferay.portlet.expando.model.ExpandoBridge; import com.liferay.portlet.expando.util.ExpandoBridgeFactoryUtil; import io.gatling.liferay.model.Scenario; import io.gatling.liferay.model.ScenarioModel; import java.io.Serializable; import java.sql.Types; import java.util.HashMap; import java.util.Map; /** * The base model implementation for the Scenario service. Represents a row in the &quot;StressTool_Scenario&quot; database table, with each column mapped to a property of this class. * * <p> * This implementation and its corresponding interface {@link io.gatling.liferay.model.ScenarioModel} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link ScenarioImpl}. * </p> * * @author Brian Wing Shun Chan * @see ScenarioImpl * @see io.gatling.liferay.model.Scenario * @see io.gatling.liferay.model.ScenarioModel * @generated */ public class ScenarioModelImpl extends BaseModelImpl<Scenario> implements ScenarioModel { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this class directly. All methods that expect a scenario model instance should use the {@link io.gatling.liferay.model.Scenario} interface instead. */ public static final String TABLE_NAME = "StressTool_Scenario"; public static final Object[][] TABLE_COLUMNS = { { "scenario_id", Types.BIGINT }, { "name", Types.VARCHAR }, { "url_site", Types.VARCHAR }, { "group_id", Types.BIGINT }, { "simulation_id", Types.BIGINT }, { "numberOfUsers", Types.BIGINT }, { "duration", Types.BIGINT }, { "injection", Types.VARCHAR } }; public static final String TABLE_SQL_CREATE = "create table StressTool_Scenario (scenario_id LONG not null primary key,name VARCHAR(75) null,url_site VARCHAR(75) null,group_id LONG,simulation_id LONG,numberOfUsers LONG,duration LONG,injection VARCHAR(75) null)"; public static final String TABLE_SQL_DROP = "drop table StressTool_Scenario"; public static final String ORDER_BY_JPQL = " ORDER BY scenario.scenario_id ASC"; public static final String ORDER_BY_SQL = " ORDER BY StressTool_Scenario.scenario_id ASC"; public static final String DATA_SOURCE = "liferayDataSource"; public static final String SESSION_FACTORY = "liferaySessionFactory"; public static final String TX_MANAGER = "liferayTransactionManager"; public static final boolean ENTITY_CACHE_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get( "value.object.entity.cache.enabled.io.gatling.liferay.model.Scenario"), true); public static final boolean FINDER_CACHE_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get( "value.object.finder.cache.enabled.io.gatling.liferay.model.Scenario"), true); public static final boolean COLUMN_BITMASK_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get( "value.object.column.bitmask.enabled.io.gatling.liferay.model.Scenario"), true); public static long NAME_COLUMN_BITMASK = 1L; public static long SIMULATION_ID_COLUMN_BITMASK = 2L; public static long SCENARIO_ID_COLUMN_BITMASK = 4L; public static final long LOCK_EXPIRATION_TIME = GetterUtil.getLong(com.liferay.util.service.ServiceProps.get( "lock.expiration.time.io.gatling.liferay.model.Scenario")); private static ClassLoader _classLoader = Scenario.class.getClassLoader(); private static Class<?>[] _escapedModelInterfaces = new Class[] { Scenario.class }; private long _scenario_id; private String _name; private String _originalName; private String _url_site; private long _group_id; private long _simulation_id; private long _originalSimulation_id; private boolean _setOriginalSimulation_id; private long _numberOfUsers; private long _duration; private String _injection; private long _columnBitmask; private Scenario _escapedModel; public ScenarioModelImpl() { } @Override public long getPrimaryKey() { return _scenario_id; } @Override public void setPrimaryKey(long primaryKey) { setScenario_id(primaryKey); } @Override public Serializable getPrimaryKeyObj() { return _scenario_id; } @Override public void setPrimaryKeyObj(Serializable primaryKeyObj) { setPrimaryKey(((Long) primaryKeyObj).longValue()); } @Override public Class<?> getModelClass() { return Scenario.class; } @Override public String getModelClassName() { return Scenario.class.getName(); } @Override public Map<String, Object> getModelAttributes() { Map<String, Object> attributes = new HashMap<String, Object>(); attributes.put("scenario_id", getScenario_id()); attributes.put("name", getName()); attributes.put("url_site", getUrl_site()); attributes.put("group_id", getGroup_id()); attributes.put("simulation_id", getSimulation_id()); attributes.put("numberOfUsers", getNumberOfUsers()); attributes.put("duration", getDuration()); attributes.put("injection", getInjection()); return attributes; } @Override public void setModelAttributes(Map<String, Object> attributes) { Long scenario_id = (Long) attributes.get("scenario_id"); if (scenario_id != null) { setScenario_id(scenario_id); } String name = (String) attributes.get("name"); if (name != null) { setName(name); } String url_site = (String) attributes.get("url_site"); if (url_site != null) { setUrl_site(url_site); } Long group_id = (Long) attributes.get("group_id"); if (group_id != null) { setGroup_id(group_id); } Long simulation_id = (Long) attributes.get("simulation_id"); if (simulation_id != null) { setSimulation_id(simulation_id); } Long numberOfUsers = (Long) attributes.get("numberOfUsers"); if (numberOfUsers != null) { setNumberOfUsers(numberOfUsers); } Long duration = (Long) attributes.get("duration"); if (duration != null) { setDuration(duration); } String injection = (String) attributes.get("injection"); if (injection != null) { setInjection(injection); } } @Override public long getScenario_id() { return _scenario_id; } @Override public void setScenario_id(long scenario_id) { _scenario_id = scenario_id; } @Override public String getName() { if (_name == null) { return StringPool.BLANK; } else { return _name; } } @Override public void setName(String name) { _columnBitmask |= NAME_COLUMN_BITMASK; if (_originalName == null) { _originalName = _name; } _name = name; } public String getOriginalName() { return GetterUtil.getString(_originalName); } @Override public String getUrl_site() { if (_url_site == null) { return StringPool.BLANK; } else { return _url_site; } } @Override public void setUrl_site(String url_site) { _url_site = url_site; } @Override public long getGroup_id() { return _group_id; } @Override public void setGroup_id(long group_id) { _group_id = group_id; } @Override public long getSimulation_id() { return _simulation_id; } @Override public void setSimulation_id(long simulation_id) { _columnBitmask |= SIMULATION_ID_COLUMN_BITMASK; if (!_setOriginalSimulation_id) { _setOriginalSimulation_id = true; _originalSimulation_id = _simulation_id; } _simulation_id = simulation_id; } public long getOriginalSimulation_id() { return _originalSimulation_id; } @Override public long getNumberOfUsers() { return _numberOfUsers; } @Override public void setNumberOfUsers(long numberOfUsers) { _numberOfUsers = numberOfUsers; } @Override public long getDuration() { return _duration; } @Override public void setDuration(long duration) { _duration = duration; } @Override public String getInjection() { if (_injection == null) { return StringPool.BLANK; } else { return _injection; } } @Override public void setInjection(String injection) { _injection = injection; } public long getColumnBitmask() { return _columnBitmask; } @Override public ExpandoBridge getExpandoBridge() { return ExpandoBridgeFactoryUtil.getExpandoBridge(0, Scenario.class.getName(), getPrimaryKey()); } @Override public void setExpandoBridgeAttributes(ServiceContext serviceContext) { ExpandoBridge expandoBridge = getExpandoBridge(); expandoBridge.setAttributes(serviceContext); } @Override public Scenario toEscapedModel() { if (_escapedModel == null) { _escapedModel = (Scenario) ProxyUtil.newProxyInstance(_classLoader, _escapedModelInterfaces, new AutoEscapeBeanHandler(this)); } return _escapedModel; } @Override public Object clone() { ScenarioImpl scenarioImpl = new ScenarioImpl(); scenarioImpl.setScenario_id(getScenario_id()); scenarioImpl.setName(getName()); scenarioImpl.setUrl_site(getUrl_site()); scenarioImpl.setGroup_id(getGroup_id()); scenarioImpl.setSimulation_id(getSimulation_id()); scenarioImpl.setNumberOfUsers(getNumberOfUsers()); scenarioImpl.setDuration(getDuration()); scenarioImpl.setInjection(getInjection()); scenarioImpl.resetOriginalValues(); return scenarioImpl; } @Override public int compareTo(Scenario scenario) { long primaryKey = scenario.getPrimaryKey(); if (getPrimaryKey() < primaryKey) { return -1; } else if (getPrimaryKey() > primaryKey) { return 1; } else { return 0; } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Scenario)) { return false; } Scenario scenario = (Scenario) obj; long primaryKey = scenario.getPrimaryKey(); if (getPrimaryKey() == primaryKey) { return true; } else { return false; } } @Override public int hashCode() { return (int) getPrimaryKey(); } @Override public void resetOriginalValues() { ScenarioModelImpl scenarioModelImpl = this; scenarioModelImpl._originalName = scenarioModelImpl._name; scenarioModelImpl._originalSimulation_id = scenarioModelImpl._simulation_id; scenarioModelImpl._setOriginalSimulation_id = false; scenarioModelImpl._columnBitmask = 0; } @Override public CacheModel<Scenario> toCacheModel() { ScenarioCacheModel scenarioCacheModel = new ScenarioCacheModel(); scenarioCacheModel.scenario_id = getScenario_id(); scenarioCacheModel.name = getName(); String name = scenarioCacheModel.name; if ((name != null) && (name.length() == 0)) { scenarioCacheModel.name = null; } scenarioCacheModel.url_site = getUrl_site(); String url_site = scenarioCacheModel.url_site; if ((url_site != null) && (url_site.length() == 0)) { scenarioCacheModel.url_site = null; } scenarioCacheModel.group_id = getGroup_id(); scenarioCacheModel.simulation_id = getSimulation_id(); scenarioCacheModel.numberOfUsers = getNumberOfUsers(); scenarioCacheModel.duration = getDuration(); scenarioCacheModel.injection = getInjection(); String injection = scenarioCacheModel.injection; if ((injection != null) && (injection.length() == 0)) { scenarioCacheModel.injection = null; } return scenarioCacheModel; } @Override public String toString() { StringBundler sb = new StringBundler(17); sb.append("{scenario_id="); sb.append(getScenario_id()); sb.append(", name="); sb.append(getName()); sb.append(", url_site="); sb.append(getUrl_site()); sb.append(", group_id="); sb.append(getGroup_id()); sb.append(", simulation_id="); sb.append(getSimulation_id()); sb.append(", numberOfUsers="); sb.append(getNumberOfUsers()); sb.append(", duration="); sb.append(getDuration()); sb.append(", injection="); sb.append(getInjection()); sb.append("}"); return sb.toString(); } @Override public String toXmlString() { StringBundler sb = new StringBundler(28); sb.append("<model><model-name>"); sb.append("io.gatling.liferay.model.Scenario"); sb.append("</model-name>"); sb.append( "<column><column-name>scenario_id</column-name><column-value><![CDATA["); sb.append(getScenario_id()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>name</column-name><column-value><![CDATA["); sb.append(getName()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>url_site</column-name><column-value><![CDATA["); sb.append(getUrl_site()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>group_id</column-name><column-value><![CDATA["); sb.append(getGroup_id()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>simulation_id</column-name><column-value><![CDATA["); sb.append(getSimulation_id()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>numberOfUsers</column-name><column-value><![CDATA["); sb.append(getNumberOfUsers()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>duration</column-name><column-value><![CDATA["); sb.append(getDuration()); sb.append("]]></column-value></column>"); sb.append( "<column><column-name>injection</column-name><column-value><![CDATA["); sb.append(getInjection()); sb.append("]]></column-value></column>"); sb.append("</model>"); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.processors.cache.GridCacheAbstractByteArrayValuesSelfTest; import org.apache.ignite.testframework.MvccFeatureChecker; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.jetbrains.annotations.Nullable; import org.junit.Test; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; import static org.junit.Assert.assertArrayEquals; /** * Tests for byte array values in distributed caches. */ public abstract class GridCacheAbstractDistributedByteArrayValuesSelfTest extends GridCacheAbstractByteArrayValuesSelfTest { /** */ private static final String CACHE = "cache"; /** */ private static final String MVCC_CACHE = "mvccCache"; /** Regular caches. */ private static IgniteCache<Integer, Object>[] caches; /** Regular caches. */ private static IgniteCache<Integer, Object>[] mvccCaches; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); CacheConfiguration mvccCfg = cacheConfiguration(MVCC_CACHE) .setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL_SNAPSHOT) .setNearConfiguration(null); // TODO IGNITE-7187: remove near cache disabling. CacheConfiguration ccfg = cacheConfiguration(CACHE); c.setCacheConfiguration(ccfg, mvccCfg); c.setPeerClassLoadingEnabled(peerClassLoading()); return c; } /** * @return Whether peer class loading is enabled. */ protected abstract boolean peerClassLoading(); /** * @return How many grids to start. */ protected int gridCount() { return 3; } /** * @param name Cache name. * @return Cache configuration. */ protected CacheConfiguration cacheConfiguration(String name) { CacheConfiguration cfg = cacheConfiguration0(); cfg.setName(name); return cfg; } /** * @return Internal cache configuration. */ protected abstract CacheConfiguration cacheConfiguration0(); /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); int gridCnt = gridCount(); assert gridCnt > 0; caches = new IgniteCache[gridCnt]; mvccCaches = new IgniteCache[gridCnt]; startGridsMultiThreaded(gridCnt); for (int i = 0; i < gridCnt; i++) { caches[i] = grid(i).cache(CACHE); mvccCaches[i] = grid(i).cache(MVCC_CACHE); } } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { caches = null; mvccCaches = null; } /** * Check whether cache with byte array entry works correctly in PESSIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testPessimistic() throws Exception { testTransaction0(caches, PESSIMISTIC, KEY_1, wrap(1)); } /** * Check whether cache with byte array entry works correctly in PESSIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testPessimisticMixed() throws Exception { testTransactionMixed0(caches, PESSIMISTIC, KEY_1, wrap(1), KEY_2, 1); } /** * Check whether cache with byte array entry works correctly in OPTIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testOptimistic() throws Exception { testTransaction0(caches, OPTIMISTIC, KEY_1, wrap(1)); } /** * Check whether cache with byte array entry works correctly in OPTIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testOptimisticMixed() throws Exception { testTransactionMixed0(caches, OPTIMISTIC, KEY_1, wrap(1), KEY_2, 1); } /** * Check whether cache with byte array entry works correctly in PESSIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testPessimisticMvcc() throws Exception { testTransaction0(mvccCaches, PESSIMISTIC, KEY_1, wrap(1)); } /** * Check whether cache with byte array entry works correctly in PESSIMISTIC transaction. * * @throws Exception If failed. */ @Test public void testPessimisticMvccMixed() throws Exception { testTransactionMixed0(mvccCaches, PESSIMISTIC, KEY_1, wrap(1), KEY_2, 1); } /** * Test transaction behavior. * * @param caches Caches. * @param concurrency Concurrency. * @param key Key. * @param val Value. * @throws Exception If failed. */ private void testTransaction0(IgniteCache<Integer, Object>[] caches, TransactionConcurrency concurrency, Integer key, byte[] val) throws Exception { testTransactionMixed0(caches, concurrency, key, val, null, null); } /** * Test transaction behavior. * * @param caches Caches. * @param concurrency Concurrency. * @param key1 Key 1. * @param val1 Value 1. * @param key2 Key 2. * @param val2 Value 2. * @throws Exception If failed. */ private void testTransactionMixed0(IgniteCache<Integer, Object>[] caches, TransactionConcurrency concurrency, Integer key1, byte[] val1, @Nullable Integer key2, @Nullable Object val2) throws Exception { if (MvccFeatureChecker.forcedMvcc() && !MvccFeatureChecker.isSupported(concurrency, REPEATABLE_READ)) return; for (IgniteCache<Integer, Object> cache : caches) { info("Checking cache: " + cache.getName()); Transaction tx = cache.unwrap(Ignite.class).transactions().txStart(concurrency, REPEATABLE_READ); try { cache.put(key1, val1); if (key2 != null) cache.put(key2, val2); tx.commit(); } finally { tx.close(); } for (IgniteCache<Integer, Object> cacheInner : caches) { info("Getting value from cache: " + cacheInner.getName()); tx = cacheInner.unwrap(Ignite.class).transactions().txStart(concurrency, REPEATABLE_READ); try { assertArrayEquals(val1, (byte[])cacheInner.get(key1)); if (key2 != null) { Object actual = cacheInner.get(key2); assertEquals(val2, actual); } tx.commit(); } finally { tx.close(); } } tx = cache.unwrap(Ignite.class).transactions().txStart(concurrency, REPEATABLE_READ); try { cache.remove(key1); if (key2 != null) cache.remove(key2); tx.commit(); } finally { tx.close(); } assertNull(cache.get(key1)); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/enums/offline_user_data_job_type.proto package com.google.ads.googleads.v9.enums; /** * <pre> * Container for enum describing types of an offline user data job. * </pre> * * Protobuf type {@code google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum} */ public final class OfflineUserDataJobTypeEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) OfflineUserDataJobTypeEnumOrBuilder { private static final long serialVersionUID = 0L; // Use OfflineUserDataJobTypeEnum.newBuilder() to construct. private OfflineUserDataJobTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private OfflineUserDataJobTypeEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new OfflineUserDataJobTypeEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private OfflineUserDataJobTypeEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeProto.internal_static_google_ads_googleads_v9_enums_OfflineUserDataJobTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeProto.internal_static_google_ads_googleads_v9_enums_OfflineUserDataJobTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.class, com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.Builder.class); } /** * <pre> * The type of an offline user data job. * </pre> * * Protobuf enum {@code google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.OfflineUserDataJobType} */ public enum OfflineUserDataJobType implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * Store Sales Direct data for self service. * </pre> * * <code>STORE_SALES_UPLOAD_FIRST_PARTY = 2;</code> */ STORE_SALES_UPLOAD_FIRST_PARTY(2), /** * <pre> * Store Sales Direct data for third party. * </pre> * * <code>STORE_SALES_UPLOAD_THIRD_PARTY = 3;</code> */ STORE_SALES_UPLOAD_THIRD_PARTY(3), /** * <pre> * Customer Match user list data. * </pre> * * <code>CUSTOMER_MATCH_USER_LIST = 4;</code> */ CUSTOMER_MATCH_USER_LIST(4), /** * <pre> * Customer Match with attribute data. * </pre> * * <code>CUSTOMER_MATCH_WITH_ATTRIBUTES = 5;</code> */ CUSTOMER_MATCH_WITH_ATTRIBUTES(5), UNRECOGNIZED(-1), ; /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * Store Sales Direct data for self service. * </pre> * * <code>STORE_SALES_UPLOAD_FIRST_PARTY = 2;</code> */ public static final int STORE_SALES_UPLOAD_FIRST_PARTY_VALUE = 2; /** * <pre> * Store Sales Direct data for third party. * </pre> * * <code>STORE_SALES_UPLOAD_THIRD_PARTY = 3;</code> */ public static final int STORE_SALES_UPLOAD_THIRD_PARTY_VALUE = 3; /** * <pre> * Customer Match user list data. * </pre> * * <code>CUSTOMER_MATCH_USER_LIST = 4;</code> */ public static final int CUSTOMER_MATCH_USER_LIST_VALUE = 4; /** * <pre> * Customer Match with attribute data. * </pre> * * <code>CUSTOMER_MATCH_WITH_ATTRIBUTES = 5;</code> */ public static final int CUSTOMER_MATCH_WITH_ATTRIBUTES_VALUE = 5; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OfflineUserDataJobType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static OfflineUserDataJobType forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return STORE_SALES_UPLOAD_FIRST_PARTY; case 3: return STORE_SALES_UPLOAD_THIRD_PARTY; case 4: return CUSTOMER_MATCH_USER_LIST; case 5: return CUSTOMER_MATCH_WITH_ATTRIBUTES; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<OfflineUserDataJobType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< OfflineUserDataJobType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<OfflineUserDataJobType>() { public OfflineUserDataJobType findValueByNumber(int number) { return OfflineUserDataJobType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.getDescriptor().getEnumTypes().get(0); } private static final OfflineUserDataJobType[] VALUES = values(); public static OfflineUserDataJobType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private OfflineUserDataJobType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.OfflineUserDataJobType) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum)) { return super.equals(obj); } com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum other = (com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing types of an offline user data job. * </pre> * * Protobuf type {@code google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeProto.internal_static_google_ads_googleads_v9_enums_OfflineUserDataJobTypeEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeProto.internal_static_google_ads_googleads_v9_enums_OfflineUserDataJobTypeEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.class, com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.Builder.class); } // Construct using com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeProto.internal_static_google_ads_googleads_v9_enums_OfflineUserDataJobTypeEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum getDefaultInstanceForType() { return com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum build() { com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum buildPartial() { com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum result = new com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) { return mergeFrom((com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum other) { if (other == com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum) private static final com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum(); } public static com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<OfflineUserDataJobTypeEnum> PARSER = new com.google.protobuf.AbstractParser<OfflineUserDataJobTypeEnum>() { @java.lang.Override public OfflineUserDataJobTypeEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new OfflineUserDataJobTypeEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<OfflineUserDataJobTypeEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<OfflineUserDataJobTypeEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.enums.OfflineUserDataJobTypeEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.raizlabs.android.dbflow.structure.provider; import android.content.ContentResolver; import android.content.ContentValues; import android.database.Cursor; import android.net.Uri; import com.raizlabs.android.dbflow.config.FlowLog; import com.raizlabs.android.dbflow.config.FlowManager; import com.raizlabs.android.dbflow.sql.SqlUtils; import com.raizlabs.android.dbflow.sql.builder.ConditionQueryBuilder; import com.raizlabs.android.dbflow.structure.Model; import com.raizlabs.android.dbflow.structure.ModelAdapter; import java.util.List; /** * Description: Provides handy wrapper mechanisms for {@link android.content.ContentProvider} */ public class ContentUtils { /** * The default content URI that Android recommends. Not necessary, however. */ public static final String BASE_CONTENT_URI = "content://"; /** * Constructs an Uri with the {@link #BASE_CONTENT_URI} and authority. Add paths to append to the Uri. * * @param authority The authority for a {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param paths The list of paths to append. * @return A complete Uri for a {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} */ public static Uri buildUri(String authority, String... paths) { return buildUri(BASE_CONTENT_URI, authority, paths); } /** * Constructs an Uri with the specified basecontent uri and authority. Add paths to append to the Uri. * * @param baseContentUri The base content URI for a {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param authority The authority for a {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param paths The list of paths to append. * @return A complete Uri for a {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} */ public static Uri buildUri(String baseContentUri, String authority, String... paths) { Uri.Builder builder = Uri.parse(baseContentUri + authority).buildUpon(); for (String path : paths) { builder.appendPath(path); } return builder.build(); } /** * Inserts the model into the {@link android.content.ContentResolver}. Uses the insertUri to resolve * the reference and the model to convert its data into {@link android.content.ContentValues} * * @param insertUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} class definition. * @param model The model to insert. * @param <TableClass> The class that implemets {@link com.raizlabs.android.dbflow.structure.Model} * @return A Uri of the inserted data. */ public static <TableClass extends Model> Uri insert(Uri insertUri, TableClass model) { return insert(FlowManager.getContext().getContentResolver(), insertUri, model); } /** * Inserts the model into the {@link android.content.ContentResolver}. Uses the insertUri to resolve * the reference and the model to convert its data into {@link android.content.ContentValues} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param insertUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} class definition. * @param model The model to insert. * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The Uri of the inserted data. */ @SuppressWarnings("unchecked") public static <TableClass extends Model> Uri insert(ContentResolver contentResolver, Uri insertUri, TableClass model) { ModelAdapter<TableClass> adapter = (ModelAdapter<TableClass>) FlowManager.getModelAdapter(model.getClass()); ContentValues contentValues = new ContentValues(); adapter.bindToInsertValues(contentValues, model); Uri uri = contentResolver.insert(insertUri, contentValues); adapter.updateAutoIncrement(model, Long.valueOf(uri.getPathSegments().get(uri.getPathSegments().size() - 1))); return uri; } /** * Inserts the list of model into the {@link ContentResolver}. Binds all of the models to {@link ContentValues} * and runs the {@link ContentResolver#bulkInsert(Uri, ContentValues[])} method. Note: if any of these use * autoincrementing primary keys the ROWID will not be properly updated from this method. If you care * use {@link #insert(ContentResolver, Uri, Model)} instead. * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param bulkInsertUri The URI to bulk insert with * @param table The table to insert into * @param models The models to insert. * @param <TableClass> The class that implements {@link Model} * @return The count of the rows affected by the insert. */ public static <TableClass extends Model> int bulkInsert(ContentResolver contentResolver, Uri bulkInsertUri, Class<TableClass> table, List<TableClass> models) { ContentValues[] contentValues = new ContentValues[models == null ? 0 : models.size()]; ModelAdapter<TableClass> adapter = FlowManager.getModelAdapter(table); if (models != null) { for (int i = 0; i < contentValues.length; i++) { contentValues[i] = new ContentValues(); adapter.bindToInsertValues(contentValues[i], models.get(i)); } } return contentResolver.bulkInsert(bulkInsertUri, contentValues); } /** * Inserts the list of model into the {@link ContentResolver}. Binds all of the models to {@link ContentValues} * and runs the {@link ContentResolver#bulkInsert(Uri, ContentValues[])} method. Note: if any of these use * autoincrementing primary keys the ROWID will not be properly updated from this method. If you care * use {@link #insert(Uri, Model)} instead. * * @param bulkInsertUri The URI to bulk insert with * @param table The table to insert into * @param models The models to insert. * @param <TableClass> The class that implements {@link Model} * @return The count of the rows affected by the insert. */ public static <TableClass extends Model> int bulkInsert(Uri bulkInsertUri, Class<TableClass> table, List<TableClass> models) { return bulkInsert(FlowManager.getContext().getContentResolver(), bulkInsertUri, table, models); } /** * Updates the model through the {@link android.content.ContentResolver}. Uses the updateUri to * resolve the reference and the model to convert its data in {@link android.content.ContentValues} * * @param updateUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param model A model to update * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The number of rows updated. */ public static <TableClass extends Model> int update(Uri updateUri, TableClass model) { return update(FlowManager.getContext().getContentResolver(), updateUri, model); } /** * Updates the model through the {@link android.content.ContentResolver}. Uses the updateUri to * resolve the reference and the model to convert its data in {@link android.content.ContentValues} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param updateUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param model The model to update * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The number of rows updated. */ @SuppressWarnings("unchecked") public static <TableClass extends Model> int update(ContentResolver contentResolver, Uri updateUri, TableClass model) { ModelAdapter<TableClass> adapter = (ModelAdapter<TableClass>) FlowManager.getModelAdapter(model.getClass()); ContentValues contentValues = new ContentValues(); adapter.bindToContentValues(contentValues, model); int count = contentResolver.update(updateUri, contentValues, adapter.getPrimaryModelWhere(model).getQuery(), null); if (count == 0) { FlowLog.log(FlowLog.Level.W, "Updated failed of: " + model.getClass()); } return count; } /** * Deletes the specified model through the {@link android.content.ContentResolver}. Uses the deleteUri * to resolve the reference and the model to {@link com.raizlabs.android.dbflow.structure.ModelAdapter#getPrimaryModelWhere(com.raizlabs.android.dbflow.structure.Model)} * * @param deleteUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param model The model to delete * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The number of rows deleted. */ @SuppressWarnings("unchecked") public static <TableClass extends Model> int delete(Uri deleteUri, TableClass model) { return delete(FlowManager.getContext().getContentResolver(), deleteUri, model); } /** * Deletes the specified model through the {@link android.content.ContentResolver}. Uses the deleteUri * to resolve the reference and the model to {@link com.raizlabs.android.dbflow.structure.ModelAdapter#getPrimaryModelWhere(com.raizlabs.android.dbflow.structure.Model)} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param deleteUri A {@link android.net.Uri} from the {@link com.raizlabs.android.dbflow.annotation.provider.ContentProvider} * @param model The model to delete * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The number of rows deleted. */ @SuppressWarnings("unchecked") public static <TableClass extends Model> int delete(ContentResolver contentResolver, Uri deleteUri, TableClass model) { ModelAdapter<TableClass> adapter = (ModelAdapter<TableClass>) FlowManager.getModelAdapter(model.getClass()); int count = contentResolver.delete(deleteUri, adapter.getPrimaryModelWhere(model).getQuery(), null); // reset autoincrement to 0 if (count > 0) { adapter.updateAutoIncrement(model, 0); } else { FlowLog.log(FlowLog.Level.W, "A delete on " + model.getClass() + " within the ContentResolver appeared to fail."); } return count; } /** * Queries the {@link android.content.ContentResolver} with the specified query uri. It generates * the correct query and returns a {@link android.database.Cursor} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param queryUri The URI of the query * @param whereConditions The set of {@link com.raizlabs.android.dbflow.sql.builder.Condition} to query the content provider. * @param orderBy The order by clause without the ORDER BY * @param columns The list of columns to query. * @return A {@link android.database.Cursor} */ public static <TableClass extends Model> Cursor query(ContentResolver contentResolver, Uri queryUri, ConditionQueryBuilder<TableClass> whereConditions, String orderBy, String... columns) { return contentResolver.query(queryUri, columns, whereConditions.getQuery(), null, orderBy); } /** * Queries the {@link android.content.ContentResolver} with the specified queryUri. It will generate * the correct query and return a list of {@link TableClass} * * @param queryUri The URI of the query * @param table The table to get from. * @param whereConditions The set of {@link com.raizlabs.android.dbflow.sql.builder.Condition} to query the content provider. * @param orderBy The order by clause without the ORDER BY * @param columns The list of columns to query. * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return A list of {@link TableClass} */ public static <TableClass extends Model> List<TableClass> queryList(Uri queryUri, Class<TableClass> table, ConditionQueryBuilder<TableClass> whereConditions, String orderBy, String... columns) { return queryList(FlowManager.getContext().getContentResolver(), queryUri, table, whereConditions, orderBy, columns); } /** * Queries the {@link android.content.ContentResolver} with the specified queryUri. It will generate * the correct query and return a list of {@link TableClass} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param queryUri The URI of the query * @param table The table to get from. * @param whereConditions The set of {@link com.raizlabs.android.dbflow.sql.builder.Condition} to query the content provider. * @param orderBy The order by clause without the ORDER BY * @param columns The list of columns to query. * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return A list of {@link TableClass} */ public static <TableClass extends Model> List<TableClass> queryList(ContentResolver contentResolver, Uri queryUri, Class<TableClass> table, ConditionQueryBuilder<TableClass> whereConditions, String orderBy, String... columns) { Cursor cursor = contentResolver.query(queryUri, columns, whereConditions.getQuery(), null, orderBy); List<TableClass> list = SqlUtils.convertToList(table, cursor); cursor.close(); return list; } /** * Queries the {@link android.content.ContentResolver} with the specified queryUri. It will generate * the correct query and return a the first item from the list of {@link TableClass} * * @param queryUri The URI of the query * @param table The table to get from * @param whereConditions The set of {@link com.raizlabs.android.dbflow.sql.builder.Condition} to query the content provider. * @param orderBy The order by clause without the ORDER BY * @param columns The list of columns to query. * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The first {@link TableClass} of the list query from the content provider. */ public static <TableClass extends Model> TableClass querySingle(Uri queryUri, Class<TableClass> table, ConditionQueryBuilder<TableClass> whereConditions, String orderBy, String... columns) { return querySingle(FlowManager.getContext().getContentResolver(), queryUri, table, whereConditions, orderBy, columns); } /** * Queries the {@link android.content.ContentResolver} with the specified queryUri. It will generate * the correct query and return a the first item from the list of {@link TableClass} * * @param contentResolver The content resolver to use (if different from {@link com.raizlabs.android.dbflow.config.FlowManager#getContext()}) * @param queryUri The URI of the query * @param table The table to get from * @param whereConditions The set of {@link com.raizlabs.android.dbflow.sql.builder.Condition} to query the content provider. * @param orderBy The order by clause without the ORDER BY * @param columns The list of columns to query. * @param <TableClass> The class that implements {@link com.raizlabs.android.dbflow.structure.Model} * @return The first {@link TableClass} of the list query from the content provider. */ public static <TableClass extends Model> TableClass querySingle(ContentResolver contentResolver, Uri queryUri, Class<TableClass> table, ConditionQueryBuilder<TableClass> whereConditions, String orderBy, String... columns) { List<TableClass> list = queryList(contentResolver, queryUri, table, whereConditions, orderBy, columns); return list.size() > 0 ? list.get(0) : null; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.analysis.config.transitions.NoTransition; import com.google.devtools.build.lib.analysis.config.transitions.PatchTransition; import com.google.devtools.build.lib.analysis.util.AnalysisCachingTestBase; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.rules.java.JavaInfo; import com.google.devtools.build.lib.rules.java.JavaSourceJarsProvider; import com.google.devtools.build.lib.skyframe.AspectValue; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import com.google.devtools.build.lib.skylarkinterface.SkylarkValue; import com.google.devtools.build.lib.testutil.Suite; import com.google.devtools.build.lib.testutil.TestConstants.InternalTestExecutionMode; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.lib.testutil.TestSpec; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDefinition; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionsParser; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Analysis caching tests. */ @TestSpec(size = Suite.SMALL_TESTS) @RunWith(JUnit4.class) public class AnalysisCachingTest extends AnalysisCachingTestBase { @Test public void testSimpleCleanAnalysis() throws Exception { scratch.file("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); update("//java/a:A"); ConfiguredTarget javaTest = getConfiguredTarget("//java/a:A"); assertThat(javaTest).isNotNull(); assertThat(JavaInfo.getProvider(JavaSourceJarsProvider.class, javaTest)).isNotNull(); } @Test public void testTickTock() throws Exception { scratch.file( "java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])", "java_test(name = 'B',", " srcs = ['B.java'])"); update("//java/a:A"); update("//java/a:B"); update("//java/a:A"); } @Test public void testFullyCached() throws Exception { scratch.file("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); update("//java/a:A"); ConfiguredTarget old = getConfiguredTarget("//java/a:A"); update("//java/a:A"); ConfiguredTarget current = getConfiguredTarget("//java/a:A"); assertThat(current).isSameInstanceAs(old); } @Test public void testSubsetCached() throws Exception { scratch.file( "java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])", "java_test(name = 'B',", " srcs = ['B.java'])"); update("//java/a:A", "//java/a:B"); ConfiguredTarget old = getConfiguredTarget("//java/a:A"); update("//java/a:A"); ConfiguredTarget current = getConfiguredTarget("//java/a:A"); assertThat(current).isSameInstanceAs(old); } @Test public void testDependencyChanged() throws Exception { scratch.file( "java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'],", " deps = ['//java/b'])"); scratch.file("java/b/BUILD", "java_library(name = 'b',", " srcs = ['B.java'])"); update("//java/a:A"); ConfiguredTarget old = getConfiguredTarget("//java/a:A"); scratch.overwriteFile( "java/b/BUILD", "java_library(name = 'b',", " srcs = ['C.java'])"); update("//java/a:A"); ConfiguredTarget current = getConfiguredTarget("//java/a:A"); assertThat(current).isNotSameInstanceAs(old); } @Test public void testTopLevelChanged() throws Exception { scratch.file( "java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'],", " deps = ['//java/b'])"); scratch.file("java/b/BUILD", "java_library(name = 'b',", " srcs = ['B.java'])"); update("//java/a:A"); ConfiguredTarget old = getConfiguredTarget("//java/a:A"); scratch.overwriteFile("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); update("//java/a:A"); ConfiguredTarget current = getConfiguredTarget("//java/a:A"); assertThat(current).isNotSameInstanceAs(old); } // Regression test for: // "action conflict detection is incorrect if conflict is in non-top-level configured targets". @Test public void testActionConflictInDependencyImpliesTopLevelTargetFailure() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])", "cc_binary(name='foo', deps=['x'], data=['_objs/x/foo.o'])"); reporter.removeHandler(failFastHandler); // expect errors update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:foo"); assertContainsEvent("file 'conflict/_objs/x/foo.o' " + CONFLICT_MSG); assertThat(getAnalysisResult().getTargetsToBuild()).isEmpty(); } /** * Generating the same output from two targets is ok if we build them on successive builds and * invalidate the first target before we build the second target. This is a strictly weaker test * than if we didn't invalidate the first target, but since Skyframe can't pass then, this test * could be useful for it. Actually, since Skyframe makes multiple update calls, it manages to * unregister actions even when it shouldn't, and so this test can incorrectly pass. However, * {@code SkyframeExecutorTest#testNoActionConflictWithInvalidatedTarget} tests it more * rigorously. */ @Test public void testNoActionConflictWithInvalidatedTarget() throws Exception { useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); update("//conflict:x"); ConfiguredTarget conflict = getConfiguredTarget("//conflict:x"); Action oldAction = getGeneratingAction(getBinArtifact("_objs/x/foo.o", conflict)); assertThat(oldAction.getOwner().getLabel().toString()).isEqualTo("//conflict:x"); scratch.overwriteFile( "conflict/BUILD", "cc_library(name='newx', srcs=['foo.cc'])", // Rename target. "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); update(defaultFlags(), "//conflict:_objs/x/foo.o"); ConfiguredTarget objsConflict = getConfiguredTarget("//conflict:_objs/x/foo.o"); Action newAction = getGeneratingAction(getBinArtifact("_objs/x/foo.o", objsConflict)); assertThat(newAction.getOwner().getLabel().toString()).isEqualTo("//conflict:_objs/x/foo.o"); } /** Generating the same output from multiple actions is causing an error. */ @Test public void testActionConflictCausesError() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); reporter.removeHandler(failFastHandler); // expect errors update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:_objs/x/foo.o"); assertContainsEvent("file 'conflict/_objs/x/foo.o' " + CONFLICT_MSG); } @Test public void testNoActionConflictErrorAfterClearedAnalysis() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); reporter.removeHandler(failFastHandler); // expect errors update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:_objs/x/foo.o"); // We want to force a "dropConfiguredTargetsNow" operation, which won't inform the // invalidation receiver about the dropped configured targets. skyframeExecutor.clearAnalysisCache( ImmutableList.<ConfiguredTarget>of(), ImmutableSet.<AspectValue>of()); assertContainsEvent("file 'conflict/_objs/x/foo.o' " + CONFLICT_MSG); eventCollector.clear(); scratch.overwriteFile( "conflict/BUILD", "cc_library(name='x', srcs=['baz.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:_objs/x/foo.o"); assertNoEvents(); } /** * For two conflicting actions whose primary inputs are different, no list diff detail should be * part of the output. */ @Test public void testConflictingArtifactsErrorWithNoListDetail() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); reporter.removeHandler(failFastHandler); // expect errors update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:_objs/x/foo.o"); assertContainsEvent("file 'conflict/_objs/x/foo.o' " + CONFLICT_MSG); assertDoesNotContainEvent("MandatoryInputs"); assertDoesNotContainEvent("Outputs"); } /** * For two conflicted actions whose primary inputs are the same, list diff (max 5) should be part * of the output. */ @Test public void testConflictingArtifactsWithListDetail() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo1.cc'])", "genrule(name = 'foo', outs=['_objs/x/foo1.o'], srcs=['foo1.cc', 'foo2.cc', " + "'foo3.cc', 'foo4.cc', 'foo5.cc', 'foo6.cc'], cmd='', output_to_bindir=1)"); reporter.removeHandler(failFastHandler); // expect errors update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:foo"); Event event = assertContainsEvent("file 'conflict/_objs/x/foo1.o' " + CONFLICT_MSG); assertContainsEvent("MandatoryInputs"); assertContainsEvent("Outputs"); // Validate that maximum of 5 artifacts in MandatoryInputs are part of output. Pattern pattern = Pattern.compile("\tconflict\\/foo[2-6].cc"); Matcher matcher = pattern.matcher(event.getMessage()); int matchCount = 0; while (matcher.find()) { matchCount++; } assertWithMessage( "Event does not contain expected number of file conflicts:\n" + event.getMessage()) .that(matchCount) .isEqualTo(5); } /** * The current action conflict detection code will only mark one of the targets as having an * error, and with multi-threaded analysis it is not deterministic which one that will be. */ @Test public void testActionConflictMarksTargetInvalid() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67529176): conflicts not detected. return; } useConfiguration("--cpu=k8"); scratch.file( "conflict/BUILD", "cc_library(name='x', srcs=['foo.cc'])", "cc_binary(name='_objs/x/foo.o', srcs=['bar.cc'])"); reporter.removeHandler(failFastHandler); // expect errors int successfulAnalyses = update(defaultFlags().with(Flag.KEEP_GOING), "//conflict:x", "//conflict:_objs/x/foo.pic.o") .getTargetsToBuild() .size(); assertThat(successfulAnalyses).isEqualTo(1); } /** BUILD file involved in BUILD-file cycle is changed */ @Test public void testBuildFileInCycleChanged() throws Exception { if (getInternalTestExecutionMode() != InternalTestExecutionMode.NORMAL) { // TODO(b/67412276): cycles not properly handled. return; } scratch.file( "java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'],", " deps = ['//java/b'])"); scratch.file( "java/b/BUILD", "java_library(name = 'b',", " srcs = ['B.java'],", " deps = ['//java/c'])"); scratch.file( "java/c/BUILD", "java_library(name = 'c',", " srcs = ['C.java'],", " deps = ['//java/b'])"); // expect error reporter.removeHandler(failFastHandler); update(defaultFlags().with(Flag.KEEP_GOING), "//java/a:A"); ConfiguredTarget old = getConfiguredTarget("//java/a:A"); // drop dependency on from b to c scratch.overwriteFile( "java/b/BUILD", "java_library(name = 'b',", " srcs = ['B.java'])"); eventCollector.clear(); reporter.addHandler(failFastHandler); update("//java/a:A"); ConfiguredTarget current = getConfiguredTarget("//java/a:A"); assertThat(current).isNotSameInstanceAs(old); } private void assertNoTargetsVisited() { Set<?> analyzedTargets = getSkyframeEvaluatedTargetKeys(); assertWithMessage(analyzedTargets.toString()).that(analyzedTargets.size()).isEqualTo(0); } @Test public void testSecondRunAllCacheHits() throws Exception { scratch.file("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); update("//java/a:A"); update("//java/a:A"); assertNoTargetsVisited(); } @Test public void testDependencyAllCacheHits() throws Exception { scratch.file( "java/a/BUILD", "java_library(name = 'x', srcs = ['A.java'], deps = ['y'])", "java_library(name = 'y', srcs = ['B.java'])"); update("//java/a:x"); Set<?> oldAnalyzedTargets = getSkyframeEvaluatedTargetKeys(); assertThat(oldAnalyzedTargets.size()).isAtLeast(2); // could be greater due to implicit deps assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:x")).isEqualTo(1); assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:y")).isEqualTo(1); update("//java/a:y"); assertNoTargetsVisited(); } @Test public void testSupersetNotAllCacheHits() throws Exception { scratch.file( "java/a/BUILD", // It's important that all targets are of the same rule class, otherwise the second update // call might analyze more than one extra target because of potential implicit dependencies. "java_library(name = 'x', srcs = ['A.java'], deps = ['y'])", "java_library(name = 'y', srcs = ['B.java'], deps = ['z'])", "java_library(name = 'z', srcs = ['C.java'])"); update("//java/a:y"); Set<?> oldAnalyzedTargets = getSkyframeEvaluatedTargetKeys(); assertThat(oldAnalyzedTargets.size()).isAtLeast(3); // could be greater due to implicit deps assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:x")).isEqualTo(0); assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:y")).isEqualTo(1); update("//java/a:x"); Set<?> newAnalyzedTargets = getSkyframeEvaluatedTargetKeys(); assertThat(newAnalyzedTargets).isNotEmpty(); // could be greater due to implicit deps assertThat(countObjectsPartiallyMatchingRegex(newAnalyzedTargets, "//java/a:x")).isEqualTo(1); assertThat(countObjectsPartiallyMatchingRegex(newAnalyzedTargets, "//java/a:y")).isEqualTo(0); } @Test public void testExtraActions() throws Exception { scratch.file("java/com/google/a/BUILD", "java_library(name='a', srcs=['A.java'])"); scratch.file("java/com/google/b/BUILD", "java_library(name='b', srcs=['B.java'])"); scratch.file( "extra/BUILD", "extra_action(name = 'extra',", " out_templates = ['$(OWNER_LABEL_DIGEST)_$(ACTION_ID).tst'],", " cmd = '')", "action_listener(name = 'listener',", " mnemonics = ['Javac'],", " extra_actions = [':extra'])"); useConfiguration("--experimental_action_listener=//extra:listener"); update("//java/com/google/a:a"); update("//java/com/google/b:b"); } @Test public void testExtraActionsCaching() throws Exception { scratch.file("java/a/BUILD", "java_library(name='a', srcs=['A.java'])"); scratch.file( "extra/BUILD", "extra_action(name = 'extra',", " out_templates = ['$(OWNER_LABEL_DIGEST)_$(ACTION_ID).tst'],", " cmd = 'echo $(EXTRA_ACTION_FILE)')", "action_listener(name = 'listener',", " mnemonics = ['Javac'],", " extra_actions = [':extra'])"); useConfiguration("--experimental_action_listener=//extra:listener"); update("//java/a:a"); getConfiguredTarget("//java/a:a"); scratch.overwriteFile( "extra/BUILD", "extra_action(name = 'extra',", " out_templates = ['$(OWNER_LABEL_DIGEST)_$(ACTION_ID).tst'],", " cmd = 'echo $(BUG)')", // <-- change here "action_listener(name = 'listener',", " mnemonics = ['Javac'],", " extra_actions = [':extra'])"); reporter.removeHandler(failFastHandler); ViewCreationFailedException e = assertThrows(ViewCreationFailedException.class, () -> update("//java/a:a")); assertThat(e).hasMessageThat().contains("Analysis of target '//java/a:a' failed"); assertContainsEvent("$(BUG) not defined"); } @Test public void testConfigurationCachingWithWarningReplay() throws Exception { useConfiguration("--strip=always", "--copt=-g"); update(); assertContainsEvent("Debug information will be generated and then stripped away"); eventCollector.clear(); update(); assertContainsEvent("Debug information will be generated and then stripped away"); } @Test public void testSkyframeCacheInvalidationBuildFileChange() throws Exception { scratch.file("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); String aTarget = "//java/a:A"; update(aTarget); ConfiguredTarget firstCT = getConfiguredTarget(aTarget); scratch.overwriteFile("java/a/BUILD", "java_test(name = 'A',", " srcs = ['B.java'])"); update(aTarget); ConfiguredTarget updatedCT = getConfiguredTarget(aTarget); assertThat(updatedCT).isNotSameInstanceAs(firstCT); update(aTarget); ConfiguredTarget updated2CT = getConfiguredTarget(aTarget); assertThat(updated2CT).isSameInstanceAs(updatedCT); } @Test public void testSkyframeDifferentPackagesInvalidation() throws Exception { scratch.file("java/a/BUILD", "java_test(name = 'A',", " srcs = ['A.java'])"); scratch.file("java/b/BUILD", "java_test(name = 'B',", " srcs = ['B.java'])"); String aTarget = "//java/a:A"; update(aTarget); ConfiguredTarget oldAConfTarget = getConfiguredTarget(aTarget); String bTarget = "//java/b:B"; update(bTarget); ConfiguredTarget oldBConfTarget = getConfiguredTarget(bTarget); scratch.overwriteFile("java/b/BUILD", "java_test(name = 'B',", " srcs = ['C.java'])"); update(aTarget); // Check that 'A' was not invalidated because 'B' was modified and invalidated. ConfiguredTarget newAConfTarget = getConfiguredTarget(aTarget); ConfiguredTarget newBConfTarget = getConfiguredTarget(bTarget); assertThat(newAConfTarget).isSameInstanceAs(oldAConfTarget); assertThat(newBConfTarget).isNotSameInstanceAs(oldBConfTarget); } private int countObjectsPartiallyMatchingRegex( Iterable<? extends Object> elements, String toStringMatching) { toStringMatching = ".*" + toStringMatching + ".*"; int result = 0; for (Object o : elements) { if (o.toString().matches(toStringMatching)) { ++result; } } return result; } @Test public void testGetSkyframeEvaluatedTargetKeysOmitsCachedTargets() throws Exception { scratch.file( "java/a/BUILD", "java_library(name = 'x', srcs = ['A.java'], deps = ['z', 'w'])", "java_library(name = 'y', srcs = ['B.java'], deps = ['z', 'w'])", "java_library(name = 'z', srcs = ['C.java'])", "java_library(name = 'w', srcs = ['D.java'])"); update("//java/a:x"); Set<?> oldAnalyzedTargets = getSkyframeEvaluatedTargetKeys(); assertThat(oldAnalyzedTargets.size()).isAtLeast(2); // could be greater due to implicit deps assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:x")).isEqualTo(1); assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:y")).isEqualTo(0); assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:z")).isEqualTo(1); assertThat(countObjectsPartiallyMatchingRegex(oldAnalyzedTargets, "//java/a:w")).isEqualTo(1); // Unless the build is not fully cached, we get notified about newly evaluated targets, as well // as cached top-level targets. For the two tests above to work correctly, we need to ensure // that getSkyframeEvaluatedTargetKeys() doesn't return these. update("//java/a:x", "//java/a:y", "//java/a:z"); Set<?> newAnalyzedTargets = getSkyframeEvaluatedTargetKeys(); assertThat(newAnalyzedTargets).hasSize(2); assertThat(countObjectsPartiallyMatchingRegex(newAnalyzedTargets, "//java/a:B.java")) .isEqualTo(1); assertThat(countObjectsPartiallyMatchingRegex(newAnalyzedTargets, "//java/a:y")).isEqualTo(1); } /** Test options class for testing diff-based analysis cache resetting. */ public static final class DiffResetOptions extends FragmentOptions { public static final OptionDefinition PROBABLY_IRRELEVANT_OPTION = OptionsParser.getOptionDefinitionByName(DiffResetOptions.class, "probably_irrelevant"); public static final OptionDefinition ALSO_IRRELEVANT_OPTION = OptionsParser.getOptionDefinitionByName(DiffResetOptions.class, "also_irrelevant"); public static final PatchTransition CLEAR_IRRELEVANT = (options) -> { BuildOptions cloned = options.clone(); cloned.get(DiffResetOptions.class).probablyIrrelevantOption = "(cleared)"; cloned.get(DiffResetOptions.class).alsoIrrelevantOption = "(cleared)"; return cloned; }; @Option( name = "probably_irrelevant", defaultValue = "(unset)", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This option is irrelevant to non-uses_irrelevant targets and is trimmed from them.") public String probablyIrrelevantOption; @Option( name = "also_irrelevant", defaultValue = "(unset)", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This option is irrelevant to non-uses_irrelevant targets and is trimmed from them.") public String alsoIrrelevantOption; @Option( name = "definitely_relevant", defaultValue = "(unset)", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This option is not trimmed and is used by all targets.") public String definitelyRelevantOption; @Option( name = "also_relevant", defaultValue = "(unset)", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This option is not trimmed and is used by all targets.") public String alsoRelevantOption; @Option( name = "host_relevant", defaultValue = "(unset)", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This option is not trimmed and is used by all host targets.") public String hostRelevantOption; @Override public DiffResetOptions getHost() { DiffResetOptions host = ((DiffResetOptions) super.getHost()); host.definitelyRelevantOption = hostRelevantOption; return host; } } @SkylarkModule(name = "test_diff_fragment", doc = "fragment for testing differy fragments") private static final class DiffResetFragment extends BuildConfiguration.Fragment implements SkylarkValue {} private static final class DiffResetFactory implements ConfigurationFragmentFactory { @Override public BuildConfiguration.Fragment create(BuildOptions options) { return new DiffResetFragment(); } @Override public Class<? extends BuildConfiguration.Fragment> creates() { return DiffResetFragment.class; } @Override public ImmutableSet<Class<? extends FragmentOptions>> requiredOptions() { return ImmutableSet.of(DiffResetOptions.class); } } private void setupDiffResetTesting() throws Exception { ImmutableSet<OptionDefinition> optionsThatCanChange = ImmutableSet.of( DiffResetOptions.PROBABLY_IRRELEVANT_OPTION, DiffResetOptions.ALSO_IRRELEVANT_OPTION); ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); TestRuleClassProvider.addStandardRules(builder); builder.addConfigurationFragment(new DiffResetFactory()); builder.overrideShouldInvalidateCacheForOptionDiffForTesting( (newOptions, changedOption, oldValue, newValue) -> { return !optionsThatCanChange.contains(changedOption); }); builder.overrideTrimmingTransitionFactoryForTesting( (rule) -> { if (rule.getRuleClassObject().getName().equals("uses_irrelevant")) { return NoTransition.INSTANCE; } return DiffResetOptions.CLEAR_IRRELEVANT; }); useRuleClassProvider(builder.build()); scratch.file( "test/lib.bzl", "def _empty_impl(ctx):", " pass", "normal_lib = rule(", " implementation = _empty_impl,", " fragments = ['test_diff_fragment'],", " attrs = {", " 'deps': attr.label_list(),", " 'host_deps': attr.label_list(cfg='host'),", " },", ")", "uses_irrelevant = rule(", " implementation = _empty_impl,", " fragments = ['test_diff_fragment'],", " attrs = {", " 'deps': attr.label_list(),", " 'host_deps': attr.label_list(cfg='host'),", " },", ")"); update(); } @Test public void cacheNotClearedWhenOptionsStaySame() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--definitely_relevant=Testing"); update("//test:top"); update("//test:top"); // these targets were cached and did not need to be reanalyzed assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 0) .put("//test:shared", 0) .build()); } @Test public void cacheNotClearedWhenOptionsStaySameWithMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--experimental_multi_cpu=k8,ppc", "--definitely_relevant=Testing"); update("//test:top"); update("//test:top"); // these targets were cached and did not need to be reanalyzed assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 0) .put("//test:shared", 0) .build()); } @Test public void cacheClearedWhenNonAllowedOptionsChange() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--definitely_relevant=Test 1"); update("//test:top"); useConfiguration("--definitely_relevant=Test 2"); update("//test:top"); useConfiguration("--definitely_relevant=Test 1"); update("//test:top"); // these targets needed to be reanalyzed even though we built them in this configuration // just a moment ago assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 1) .put("//test:shared", 1) .build()); } @Test public void cacheClearedWhenNonAllowedHostOptionsChange() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', host_deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--host_relevant=Test 1"); update("//test:top"); useConfiguration("--host_relevant=Test 2"); update("//test:top"); useConfiguration("--host_relevant=Test 1"); update("//test:top"); // these targets needed to be reanalyzed even though we built them in this configuration // just a moment ago assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 1) .put("//test:shared", 1) .build()); } @Test public void cacheClearedWhenMultiCpuChanges() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--experimental_multi_cpu=k8,ppc"); update("//test:top"); useConfiguration("--experimental_multi_cpu=k8,armeabi-v7a"); update("//test:top"); // we needed to reanalyze these in both k8 and armeabi-v7a even though we did the k8 analysis // just a moment ago as part of the previous build assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 2) .put("//test:shared", 2) .build()); } @Test public void cacheClearedWhenMultiCpuGetsBigger() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--experimental_multi_cpu=k8,ppc"); update("//test:top"); useConfiguration("--experimental_multi_cpu=k8,ppc,armeabi-v7a"); update("//test:top"); // we needed to reanalyze these in all of {k8,ppc,armeabi-v7a} even though we did the k8 and ppc // analysis just a moment ago as part of the previous build assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 3) .put("//test:shared", 3) .build()); } @Test public void cacheClearedWhenMultiCpuGetsSmaller() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--experimental_multi_cpu=k8,ppc,armeabi-v7a"); update("//test:top"); useConfiguration("--experimental_multi_cpu=k8,ppc"); update("//test:top"); // we needed to reanalyze these in both k8 and ppc even though we did the k8 and ppc // analysis just a moment ago as part of the previous build assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 2) .put("//test:shared", 2) .build()); } @Test public void cacheNotClearedWhenAllowedOptionsChange() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration("--definitely_relevant=Testing", "--probably_irrelevant=Test 1"); update("//test:top"); useConfiguration("--definitely_relevant=Testing", "--probably_irrelevant=Test 2"); update("//test:top"); // the shared library got to reuse the cached value, while the entry point had to be rebuilt in // the new configuration assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 1) .put("//test:shared", 0) .build()); useConfiguration("--definitely_relevant=Testing", "--probably_irrelevant=Test 1"); update("//test:top"); // now we're back to the old configuration with no cache clears, so no work needed to be done assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 0) .put("//test:shared", 0) .build()); } @Test public void cacheNotClearedWhenAllowedOptionsChangeWithMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file( "test/BUILD", "load(':lib.bzl', 'normal_lib', 'uses_irrelevant')", "uses_irrelevant(name='top', deps=[':shared'])", "normal_lib(name='shared')"); useConfiguration( "--experimental_multi_cpu=k8,ppc", "--definitely_relevant=Testing", "--probably_irrelevant=Test 1"); update("//test:top"); useConfiguration( "--experimental_multi_cpu=k8,ppc", "--definitely_relevant=Testing", "--probably_irrelevant=Test 2"); update("//test:top"); // the shared library got to reuse the cached value, while the entry point had to be rebuilt in // the new configurations assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 2) .put("//test:shared", 0) .build()); useConfiguration( "--experimental_multi_cpu=k8,ppc", "--definitely_relevant=Testing", "--probably_irrelevant=Test 1"); update("//test:top"); // now we're back to the old configurations with no cache clears, so no work needed to be done assertNumberOfAnalyzedConfigurationsOfTargets( ImmutableMap.<String, Integer>builder() .put("//test:top", 0) .put("//test:shared", 0) .build()); } @Test public void cacheClearedWhenRedundantDefinesChange_collapseDuplicateDefinesDisabled() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--nocollapse_duplicate_defines", "--define=a=1", "--define=a=2"); update("//test:top"); useConfiguration("--nocollapse_duplicate_defines", "--define=a=2"); update("//test:top"); assertNumberOfAnalyzedConfigurationsOfTargets(ImmutableMap.of("//test:top", 1)); } @Test public void cacheNotClearedWhenRedundantDefinesChange() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--collapse_duplicate_defines", "--define=a=1", "--define=a=2"); update("//test:top"); useConfiguration("--collapse_duplicate_defines", "--define=a=2"); update("//test:top"); assertNumberOfAnalyzedConfigurationsOfTargets(ImmutableMap.of("//test:top", 0)); } @Test public void noCacheClearMessageAfterCleanWithSameOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration(); update("//test:top"); cleanSkyframe(); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void noCacheClearMessageAfterCleanWithDifferentOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--definitely_relevant=before"); update("//test:top"); cleanSkyframe(); useConfiguration("--definitely_relevant=after"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void noCacheClearMessageAfterDiscardAnalysisCacheThenCleanWithSameOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--discard_analysis_cache"); update("//test:top"); cleanSkyframe(); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void noCacheClearMessageAfterDiscardAnalysisCacheThenCleanWithChangedOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--definitely_relevant=before", "--discard_analysis_cache"); update("//test:top"); cleanSkyframe(); useConfiguration("--definitely_relevant=after", "--discard_analysis_cache"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void cacheClearMessageAfterDiscardAnalysisCacheBuild() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--probably_irrelevant=yeah", "--discard_analysis_cache"); update("//test:top"); eventCollector.clear(); update("//test:top"); assertContainsEvent("--discard_analysis_cache"); assertDoesNotContainEvent("Build option"); assertContainsEvent("discarding analysis cache"); } @Test public void noCacheClearMessageAfterNonDiscardAnalysisCacheBuild() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--discard_analysis_cache"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1"); update("//test:top"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void noCacheClearMessageAfterIrrelevantOptionChanges() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--probably_irrelevant=old"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--probably_irrelevant=new"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void noCacheClearMessageAfterIrrelevantOptionChangesWithDiffDisabled() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=0", "--probably_irrelevant=old"); update("//test:top"); useConfiguration("--max_config_changes_to_show=0", "--probably_irrelevant=new"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void cacheClearMessageAfterNumberOfConfigurationsIncreases() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,ppc"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8,ppc"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterNumberOfConfigurationsDecreases() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8,ppc"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,ppc"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterChangingExperimentalMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,ppc"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void noCacheClearMessageAfterOnlyChangingExperimentalMultiCpuOrder() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=k8,armeabi-v7a"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8"); eventCollector.clear(); update("//test:top"); assertNoEvents(); } @Test public void cacheClearMessageAfterChangingFirstCpuOnMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=k8,piii"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,ppc"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterChangingCpu() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--cpu=k8"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--cpu=armeabi-v7a"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent("Build option --cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterTurningOnExperimentalMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--cpu=armeabi-v7a"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8,ppc"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterTurningOffExperimentalMultiCpu() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8,ppc"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--cpu=armeabi-v7a"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --experimental_multi_cpu has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterChangingExperimentalMultiCpuAndOtherRelevantOption() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8,ppc", "--definitely_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8", "--definitely_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build options --definitely_relevant and --experimental_multi_cpu have changed, " + "discarding analysis cache"); } @Test public void cacheClearMessageAfterChangingExperimentalMultiCpuOrderAndOtherRelevantOption() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=k8,armeabi-v7a", "--definitely_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--experimental_multi_cpu=armeabi-v7a,k8", "--definitely_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --definitely_relevant has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterSingleRelevantOptionChanges() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=-1", "--definitely_relevant=old"); update("//test:top"); useConfiguration("--max_config_changes_to_show=-1", "--definitely_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --definitely_relevant has changed, discarding analysis cache"); } @Test public void cacheClearMessageDoesNotIncludeIrrelevantOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=old", "--probably_irrelevant=old", "--also_irrelevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=new", "--probably_irrelevant=new", "--also_irrelevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --definitely_relevant has changed, discarding analysis cache"); } @Test public void cacheClearMessageDoesNotIncludeUnchangedOptions() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=old", "--also_relevant=fixed"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=new", "--also_relevant=fixed"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build option --definitely_relevant has changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterRelevantOptionChangeWithDiffDisabled() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration("--max_config_changes_to_show=0", "--definitely_relevant=old"); update("//test:top"); useConfiguration("--max_config_changes_to_show=0", "--definitely_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent("Build options have changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterTwoRelevantOptionsChange() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=old", "--also_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=new", "--also_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build options --also_relevant and --definitely_relevant have changed, " + "discarding analysis cache"); } @Test public void cacheClearMessageAfterMultipleRelevantOptionsChange() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=old", "--also_relevant=old", "--host_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--definitely_relevant=new", "--also_relevant=new", "--host_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build options --also_relevant, --definitely_relevant, and --host_relevant have changed, " + "discarding analysis cache"); } @Test public void cacheClearMessageAfterMultipleRelevantOptionsChangeWithDiffLimit() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=2", "--definitely_relevant=old", "--also_relevant=old", "--host_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=2", "--definitely_relevant=new", "--also_relevant=new", "--host_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build options --also_relevant, --definitely_relevant, and 1 more have changed, " + "discarding analysis cache"); } @Test public void cacheClearMessageAfterMultipleRelevantOptionsChangeWithSingleDiffLimit() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=1", "--definitely_relevant=old", "--also_relevant=old", "--host_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=1", "--definitely_relevant=new", "--also_relevant=new", "--host_relevant=new"); eventCollector.clear(); update("//test:top"); assertDoesNotContainEvent("--discard_analysis_cache"); assertContainsEvent( "Build options --also_relevant and 2 more have changed, discarding analysis cache"); } @Test public void cacheClearMessageAfterDiscardAnalysisCacheBuildWithRelevantOptionChanges() throws Exception { setupDiffResetTesting(); scratch.file("test/BUILD", "load(':lib.bzl', 'normal_lib')", "normal_lib(name='top')"); useConfiguration( "--max_config_changes_to_show=-1", "--discard_analysis_cache", "--definitely_relevant=old"); update("//test:top"); useConfiguration( "--max_config_changes_to_show=-1", "--discard_analysis_cache", "--definitely_relevant=new"); eventCollector.clear(); update("//test:top"); assertContainsEvent("--discard_analysis_cache"); assertDoesNotContainEvent("Build option"); assertContainsEvent("discarding analysis cache"); } }
package de.freiburg.iif.extraction.metadata; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.pdmodel.PDDocument; import de.freiburg.iif.extraction.MetadataMatcher; import de.freiburg.iif.extraction.metadataknowledge.InvertedIndexMetadataKnowledge; import de.freiburg.iif.extraction.metadataknowledge.MetadataKnowledge; import de.freiburg.iif.extraction.metadataknowledge.MetadataKnowledge.MetadataKnowledgeQueryType; import de.freiburg.iif.extraction.stripper.PdfBoxStripper; import de.freiburg.iif.extraction.stripper.PdfStripper; import de.freiburg.iif.model.HasMetadata; import de.freiburg.iif.model.Region; import de.freiburg.iif.utils.StringSimilarity; /** * The class DocumentMetadataMatcher, that implements MetadataMatcher and can be * used to find the record in the metadata knowledge base, referred by a * document. The matching process uses an inverted index, which is implemented * in C++. The inverted index provides a socket, which can be queried by sending * ordinary HTTP requests to it. The result is a xml file, containing the * result. * * @author Claudius Korzen * */ public class DocumentMetadataMatcher implements MetadataMatcher { // TODO: Move constants to a properties file. /** The maximal length of a title */ private static final int TITLEEXTRACTION_MAX_LENGTH_TITLE = 300; /** The log4j logger */ protected Log LOG; /** The interface to the metadata knowledge base */ protected MetadataKnowledge mk; /** The interface to the pdf extraction tool */ protected PdfStripper ex; /** The header lines of the PDF document as string */ protected String header; /** The stop titles (titles of candidates, that we won't consider). */ protected Set<String> stopTitles; /** The extracted lines */ protected List<Region> lines; /** * The constructor. */ public DocumentMetadataMatcher() { this.mk = new InvertedIndexMetadataKnowledge(); this.ex = new PdfBoxStripper(); this.stopTitles = readStopTitlesFile(); this.LOG = LogFactory.getLog(DocumentMetadataMatcher.class); } @Override public List<HasMetadata> match(String filepath, boolean strict, boolean disableMK, int minWaitInterval) throws IOException { return match(PDDocument.load(filepath), strict, disableMK, minWaitInterval); } @Override public List<HasMetadata> match(File file, boolean strict, boolean disableMK, int minWaitInterval) throws IOException { return match(PDDocument.load(file), strict, disableMK, minWaitInterval); } @Override public List<HasMetadata> match(InputStream is, boolean strict, boolean disableMK, int minWaitInterval) throws IOException { return match(PDDocument.load(is), strict, disableMK, minWaitInterval); } @Override public List<HasMetadata> match(PDDocument doc, boolean strict, boolean disableMK, int minWaitInterval) throws IOException { lines = ex.extractLines(doc, 1, 1, false); return match(lines, strict, disableMK, minWaitInterval); } /** * Tries to find the referred metadata record on the basis of the given * textlines. * * @param lines * the textlines to analyze. * @return the matched metadata record. * @throws IOException * if the matching process fails. */ public List<HasMetadata> match(List<Region> lines, boolean strict, boolean disableMK, int minWaitInterval) throws IOException { // Compute the most common fontsize in the first page. Stats stats = getMostCommonFontSize(lines); // Filter out all lines, which are smaller than the most common fontsize. List<Region> relevantLines = getRelevantLines(lines, stats); // Group the relevant lines into logical blocks. List<Region> groups = group(relevantLines); for (int i = 0; i < groups.size(); i++) { Region g = groups.get(i); for (Region line : g.getIncludedLines()) { LOG.debug("GROUP" + i + ": " + line); } } groups = group(groups); for (int i = 0; i < groups.size(); i++) { Region g = groups.get(i); for (Region line : g.getIncludedLines()) { LOG.debug("2GROUP" + i + ": " + line); } } // Build the header string. StringBuffer sb = new StringBuffer(); for (int i = 0; i < lines.size(); i++) { sb.append(lines.get(i).getText()); } this.header = sb.toString(); double maxScore = 0; HasMetadata mostLikelyCandidate = null; // Iterate over the groups: Try to find the best matching metadata record by // querying the metadata knowledge with all line combinations. for (int i = 0; i < groups.size(); i++) { // Iterate over the groups Region group = groups.get(i); int y = Math.min(5, group.getIncludedLines().size()); for (int j = 0; j < y; j++) { // Iterate over any line combinations. sb = new StringBuffer(); double size = group.getIncludedLines().get(j).getFontsize(); for (int k = j; k < y; k++) { Region line = group.getIncludedLines().get(k); sb.append(line.getText()); String query = clearSpecialChars(sb.toString()).trim(); LOG.debug("*******************************************"); LOG.debug("QUERY : " + query); LOG.debug("LINE : " + line); // Query the metadata knowledge for candidates. List<HasMetadata> candidates = mk.query(MetadataKnowledgeQueryType.TITLE, query, 0); // Evaluate the candidates. for (int l = 0; l < Math.min(10, candidates.size()); l++) { HasMetadata candidate = candidates.get(l); if (!isStopTitle(candidate.getTitle())) { double score = scoreCandidate(candidate, query, size); if (score > maxScore) { LOG.debug("CAND : " + candidate + " " + score); mostLikelyCandidate = candidate; maxScore = score; } } } // We assume a maximal length for title. Abort, if the query is larger // than the maximal length. if (query.length() > TITLEEXTRACTION_MAX_LENGTH_TITLE) { break; } } } } List<HasMetadata> wrapperList = new ArrayList<HasMetadata>(); wrapperList.add(mostLikelyCandidate); LOG.debug("maxScore: " + maxScore); return wrapperList; } /** * Computes the average (rounded) fontsize of the given text lines. * * @param lines * the textlines to analyze. * @return the average (rounded) fontsize */ protected Stats getMostCommonFontSize(List<Region> lines) { int mostCommontFontsize = -1; int mostCommontPitchsize = -1; if (lines != null && lines.size() > 0) { // Count the various fontsizes. Map<Integer, Integer> stats = new HashMap<Integer, Integer>(); Map<Integer, Integer> stats2 = new HashMap<Integer, Integer>(); for (int i = 0; i < lines.size(); i++) { Region line = lines.get(i); int fontsize = Math.round(line.getFontsize()); // Increment the counter for the fontsize int count = stats.containsKey(fontsize) ? stats.get(fontsize) : 0; stats.put(fontsize, count + 1); if (i > 0) { // Create stats for the vertical distances of text lines. Region prevLine = lines.get(i - 1); float prevYBottom = prevLine.getY() + prevLine.getHeight(); float curTop = line.getY(); int yDiff = Math.round(Math.abs(prevYBottom - curTop)); int count2 = stats2.containsKey(yDiff) ? stats2.get(yDiff) : 0; stats2.put(yDiff, count2 + 1); } } int mostCommontFontsizeNum = 0; // Iterate over the stats to determine the most common fontsize. for (Entry<Integer, Integer> stat : stats.entrySet()) { // Don't consider the fontsize 0. if (stat.getKey() > 0 && stat.getValue() > mostCommontFontsizeNum) { mostCommontFontsizeNum = stat.getValue(); mostCommontFontsize = stat.getKey(); } } int mostCommontPitchSizeNum = 0; // Iterate over the stats to determine the most common fontsize. for (Entry<Integer, Integer> stat : stats2.entrySet()) { // Don't consider the pitch 0. if (stat.getKey() > 0 && stat.getValue() > mostCommontPitchSizeNum) { mostCommontPitchSizeNum = stat.getValue(); mostCommontPitchsize = stat.getKey(); } } } LOG.debug("Most common fontsize: " + mostCommontFontsize); LOG.debug("Most common pitchsize: " + mostCommontPitchsize); return new Stats(mostCommontFontsize, mostCommontPitchsize); } /** * Returns the lines, which are relevant. That means all lines, which are * larger than the given fontsize and whose length of text is large enough. * * @param lines * the text lines to analyze. * @param stats * the style stats of lines (containing the most common fontsize and * the most common line pitch). * @return list of lines, which are larger than the given fontsize. The lines * are sorted by their reading order. */ protected List<Region> getRelevantLines(List<Region> lines, Stats stats) { List<Region> relevantLines = new ArrayList<Region>(); double mostCommonFontsize = stats.mostCommonFontSize; double mostCommonLinePitch = stats.mostCommontPitchSize; int sectionLength = 0; boolean isSectionRelevant = true; for (int i = 0; i < lines.size(); i++) { Region line = lines.get(i); // Line is relevant, if its fontsize is larger than the most common one. double fontsize = Math.round(line.getFontsize()); // Add a bonus to the fontsize, if the text is bold, italic, or uppercase. // natural order of importance factors: fontsize, bold, italic, uppercase. double bonus = ((double) line.getFontFlag()) / 10; if (line.isInUpperCase()) { bonus += .1; } fontsize += bonus; boolean isFontsizeRelevant = fontsize > mostCommonFontsize; // Line is relevant, if the line is the first one or if the pitch to the // previous line is larger than the most common one. if (i > 0) { // Measure the pitch to the previous line. Region prevLine = lines.get(i - 1); float prevYBottom = prevLine.getY() + prevLine.getHeight(); float curTop = line.getY(); int yDiff = Math.round(Math.abs(prevYBottom - curTop)); // System.out.println(yDiff + ">" + 1.5 * mostCommonLinePitch + "; " + // fontsize +" > "+ mostCommonFontsize + "; " + isSectionRelevant +"&&"+ // sectionLength); if (yDiff > 1.5 * mostCommonLinePitch) { isSectionRelevant = true; sectionLength = line.getText().length(); } else if (isSectionRelevant && sectionLength < TITLEEXTRACTION_MAX_LENGTH_TITLE) { sectionLength += line.getText().length(); } else { isSectionRelevant = false; sectionLength = 0; } // System.out.println("isSectionRelevant: " + isSectionRelevant + // ", isFontsizeRelevant: " + isFontsizeRelevant); } if (consider(line) && (isFontsizeRelevant || isSectionRelevant)) { relevantLines.add(line); LOG.debug("R"); if (isFontsizeRelevant) { LOG.debug("F"); } else { LOG.debug(" "); } if (isSectionRelevant) { LOG.debug("S"); } else { LOG.debug(" "); } LOG.debug(" "); } else { LOG.debug("XXX "); } LOG.debug("LINE: " + line); } // Collections.sort(relevantLines, new ReadingOrderComparator()); return relevantLines; } /** * Groups the given text lines into logical blocks. * * @param lines * the lines to group. * @return the lsit of groups. */ public List<Region> group(List<Region> lines) { List<Region> groups = new ArrayList<Region>(); if (lines != null && lines.size() > 0) { Region group = lines.get(0); Region prevLine = lines.get(0); int groupLength = prevLine.getText().length(); for (int i = 1; i < lines.size(); i++) { Region line = lines.get(i); // Take the vertical distance between the current and the previous line. float amount = Math.max(prevLine.getHeight(), prevLine.getFontsize()); float prevBottom = prevLine.getY() + amount; float top = line.getY(); float yDiff = Math.abs(prevBottom - top); // Expand the current group with the current line, if the distance is // small enough. if (yDiff <= 2 * amount) { // Expand the group only, if the length of the group doesn't exceed // the maximal length for a title. if (groupLength < TITLEEXTRACTION_MAX_LENGTH_TITLE) { group.expand(line); } } else { // Create a new group. groups.add(group); group = line; } prevLine = line; } // Don't forget to add the last group. groups.add(group); } Collections.sort(groups, new ReadingOrderComparator()); return groups; } /** * Returns true, if the given line should be considered on the matching * process. * * @param line * the line to analyze. * @return true, if the line should be considered on the matching process, * false otherwise. */ protected boolean consider(Region line) { if (line != null && line.getText() != null) { return line.getText() .replaceAll("[^a-zA-Z0-9]+", "").length() > 2 && !line.getText().contains("http://") && !line.getText().contains("@"); } return false; } /** * Scores a matching candidate. * * @param candidate * the candidate to score. * @param query * the query, from which the candidate resulted. * @param fontSize * the fontsize of the query. * @return the score of the candidate. */ protected double scoreCandidate(HasMetadata candidate, String query, double fontSize) { double score = -1; if (candidate != null && query != null) { LOG.debug("CAND : " + candidate); // Only procced, if the score of the metadata knowlege is large enough. // System.out.println("indexscore: " + candidate.getScore()); // System.out.println("|C| " + candidate.getTitle().length()); // System.out.println("|Q| " + query.length()); // System.out.println("min/max " + ((double) // Math.min(candidate.getTitle().length(), query.length()) / // Math.max(candidate.getTitle().length(), query.length()))); double titleScore = StringSimilarity.levenshtein(candidate.getTitle(), query); titleScore = -1 * (titleScore / Math.max(candidate.getTitle().length(), query.length())) + 1; if (candidate.getScore() > .5) { // MAYBE: double titleScore = candidate.getScore / double authorScore = scoreAuthors(candidate); double yearScore = header.contains("" + candidate.getYear()) ? 1 : 0; double journalScore = header.contains(candidate.getJournal()) ? 2 : 0; score = (authorScore * titleScore * titleScore * fontSize) + yearScore + journalScore; LOG.debug("HEADER: " + header); LOG.debug("TSCORE: " + titleScore); LOG.debug("ASCORE: " + authorScore); LOG.debug("YSCORE: " + yearScore); LOG.debug("JSCORE: " + journalScore); LOG.debug("SIZE : " + fontSize); LOG.debug("TOTAL : " + score); } } return score; } /** * Computes a score for the authors of the given candidate. * * @param candidate * the candidate to process. * @return the score for the authors. */ protected double scoreAuthors(HasMetadata candidate) { double authorScore = 0; for (String author : candidate.getAuthors()) { String[] authorWords = author.split(" "); String[] headerWords = header.split(" "); double maxAuthorScore = 0; for (String headerWord : headerWords) { // Compute the coverage of the author's lastname by the header. float[] simResult = StringSimilarity.smithWaterman( authorWords[authorWords.length - 1], headerWord); float s = simResult[0]; if (s > maxAuthorScore) { maxAuthorScore = s; } } authorScore += maxAuthorScore; } // Compute a relative score. authorScore /= candidate.getAuthors().size(); return authorScore; } /** * Removes all special characters and digits from a given string. * * @param text * the text to process. * @return the simplified string. */ protected String clearSpecialChars(String text) { // return text.replaceAll("[^a-zA-Z-,.]+", " "); return text.replaceAll("[^a-zA-Z0-9]+", " "); } /** * Returns true, if the given title should be ignored. * * @param title * the title to check. * @return true, if the given title should be ignored. */ protected boolean isStopTitle(String title) { if (stopTitles != null) { return stopTitles.contains(clearSpecialChars( title).trim().toLowerCase()); } return false; } /** * Reads the stoptitles file and fills the stoptitles into a set. * * @return as set containing the stoptitles in stoptitles file. */ protected Set<String> readStopTitlesFile() { try (BufferedReader br = new BufferedReader(new InputStreamReader( this.getClass().getResourceAsStream("stoptitles")))) { Set<String> stopTitles = new HashSet<String>(); String line = null; try { while ((line = br.readLine()) != null) { stopTitles.add(clearSpecialChars(line).trim().toLowerCase()); } } catch (Exception e) { e.printStackTrace(); } } catch (Exception e) { e.printStackTrace(); } return stopTitles; } /** * Class stats, that holds the most common fontsize and the most common pitch * size of lines. * * @author Claudius Korzen. * */ public class Stats { /** The most common font size */ public double mostCommonFontSize; /** The most common pitch size */ public double mostCommontPitchSize; /** The number of reference anchors (for references extraction only). */ public int numOfReferenceAnchors; /** * The number of advanced reference headers (for references extraction * only). */ public int numOfAdvancedReferenceHeader; /** * The constructor. * * @param mostCommonFontsize * the most common fontsize * @param mostCommonPitchSize * the most common pitch size. */ public Stats(double mostCommonFontsize, double mostCommonPitchSize) { this.mostCommonFontSize = mostCommonFontsize; this.mostCommontPitchSize = mostCommonPitchSize; } } @Override public String getFulltext() { // TODO Auto-generated method stub return null; } @Override public long[] getRuntimes() { // TODO Auto-generated method stub return null; } @Override public List<Region> getLines() { return lines; } } /** * Comparator to sort text lines by their reading order. * * @author Claudius Korzen */ class ReadingOrderComparator implements Comparator<Region> { @Override public int compare(Region r1, Region r2) { int compare = Region.compare(r1.getY(), r2.getY(), 10f); if (compare != 0) { return -1 * compare; } compare = Region.compare(r1.getX(), r2.getX(), 3f); if (compare != 0) { return -1 * compare; } return 0; } } /** * Comparator to sort text lines by their reading order. * * @author Claudius Korzen */ class FontsizeComparator implements Comparator<Region> { @Override public int compare(Region r1, Region r2) { return -1 * Float.compare(r1.getFontsize(), r2.getFontsize()); } }
/* * The MIT License * * Copyright (c) 2015, CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import com.gargoylesoftware.htmlunit.html.DomElement; import com.gargoylesoftware.htmlunit.html.HtmlElement; import com.gargoylesoftware.htmlunit.html.HtmlElementUtil; import com.gargoylesoftware.htmlunit.html.HtmlInput; import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.html.HtmlTableRow; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.TestPluginManager; import org.xml.sax.SAXException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author <a href="mailto:tom.fennelly@gmail.com">tom.fennelly@gmail.com</a> */ public class PluginManagerInstalledGUITest { @Rule public JenkinsRule jenkinsRule = new JenkinsRule() { @Override public PluginManager getPluginManager() { try { return new TestPluginManager() { @Override protected Collection<String> loadBundledPlugins() throws Exception { try { return super.loadBundledPlugins(); } finally { copyBundledPlugin(PluginManagerInstalledGUITest.class.getResource("/WEB-INF/detached-plugins/matrix-auth.hpi"), "matrix-auth.jpi"); // cannot use installDetachedPlugin at this point copyBundledPlugin(PluginManagerInstalledGUITest.class.getResource("/plugins/dependee-0.0.2.hpi"), "dependee.jpi"); copyBundledPlugin(PluginManagerInstalledGUITest.class.getResource("/plugins/depender-0.0.2.hpi"), "depender.jpi"); copyBundledPlugin(PluginManagerInstalledGUITest.class.getResource("/plugins/mandatory-depender-0.0.2.hpi"), "mandatory-depender.jpi"); } } }; } catch (IOException e) { Assert.fail(e.getMessage()); return null; } } }; @Issue("JENKINS-33843") @Test public void test_enable_disable_uninstall() throws IOException, SAXException { InstalledPlugins installedPlugins = new InstalledPlugins(); InstalledPlugin matrixAuthPlugin = installedPlugins.get("matrix-auth"); InstalledPlugin dependeePlugin = installedPlugins.get("dependee"); InstalledPlugin dependerPlugin = installedPlugins.get("depender"); InstalledPlugin mandatoryDependerPlugin = installedPlugins.get("mandatory-depender"); // As a detached plugin, it is an optional dependency of others built against a newer baseline. matrixAuthPlugin.assertHasNoDependents(); // Has a mandatory dependency: dependeePlugin.assertHasDependents(); // Leaf plugins: dependerPlugin.assertHasNoDependents(); mandatoryDependerPlugin.assertHasNoDependents(); // This plugin should be enabled and it should be possible to disable it // because no other plugins depend on it. mandatoryDependerPlugin.assertEnabled(); mandatoryDependerPlugin.assertEnabledStateChangeable(); mandatoryDependerPlugin.assertUninstallable(); // This plugin should be enabled, but it should not be possible to disable or uninstall it // because another plugin depends on it. dependeePlugin.assertEnabled(); dependeePlugin.assertEnabledStateNotChangeable(); dependeePlugin.assertNotUninstallable(); // Disable one plugin mandatoryDependerPlugin.clickEnabledWidget(); // Now that plugin should be disabled, but it should be possible to re-enable it // and it should still be uninstallable. mandatoryDependerPlugin.assertNotEnabled(); // this is different to earlier mandatoryDependerPlugin.assertEnabledStateChangeable(); mandatoryDependerPlugin.assertUninstallable(); // The dependee plugin should still be enabled, but it should now be possible to disable it because // the mandatory depender plugin is no longer enabled. Should still not be possible to uninstall it. // Note that the depender plugin does not block its disablement. dependeePlugin.assertEnabled(); dependeePlugin.assertEnabledStateChangeable(); // this is different to earlier dependeePlugin.assertNotUninstallable(); dependerPlugin.assertEnabled(); // Disable the dependee plugin dependeePlugin.clickEnabledWidget(); // Now it should NOT be possible to change the enable state of the depender plugin because one // of the plugins it depends on is not enabled. mandatoryDependerPlugin.assertNotEnabled(); mandatoryDependerPlugin.assertEnabledStateNotChangeable(); // this is different to earlier mandatoryDependerPlugin.assertUninstallable(); dependerPlugin.assertEnabled(); // You can disable a detached plugin if there is no explicit dependency on it. matrixAuthPlugin.assertEnabled(); matrixAuthPlugin.assertEnabledStateChangeable(); matrixAuthPlugin.assertUninstallable(); matrixAuthPlugin.clickEnabledWidget(); matrixAuthPlugin.assertNotEnabled(); matrixAuthPlugin.assertEnabledStateChangeable(); matrixAuthPlugin.assertUninstallable(); } private class InstalledPlugins { private final List<InstalledPlugin> installedPlugins; private InstalledPlugins () throws IOException, SAXException { JenkinsRule.WebClient webClient = jenkinsRule.createWebClient(); HtmlPage installedPage = webClient.goTo("pluginManager/installed"); // Note for debugging... simply print installedPage to get the JenkinsRule // Jenkins URL and then add a long Thread.sleep here. It's useful re being // able to see what the code is testing. DomElement pluginsTable = installedPage.getElementById("plugins"); HtmlElement tbody = pluginsTable.getElementsByTagName("TBODY").get(0); installedPlugins = new ArrayList<>(); for (DomElement htmlTableRow : tbody.getChildElements()) { installedPlugins.add(new InstalledPlugin((HtmlTableRow) htmlTableRow)); } } public InstalledPlugin get(String pluginId) { for (InstalledPlugin plugin : installedPlugins) { if (plugin.isPlugin(pluginId)) { return plugin; } } Assert.fail("No pluginManager/installed row for plugin " + pluginId); return null; } } private class InstalledPlugin { private final HtmlTableRow pluginRow; InstalledPlugin(HtmlTableRow pluginRow) { this.pluginRow = pluginRow; } public String getId() { return pluginRow.getAttribute("data-plugin-id"); } public boolean isPlugin(String pluginId) { return pluginId.equals(getId()); } private HtmlInput getEnableWidget() { HtmlElement input = pluginRow.getCells().get(0).getElementsByTagName("input").get(0); return (HtmlInput) input; } public void assertEnabled() { HtmlInput enableWidget = getEnableWidget(); Assert.assertTrue("Plugin '" + getId() + "' is expected to be enabled.", enableWidget.isChecked()); } public void assertNotEnabled() { HtmlInput enableWidget = getEnableWidget(); Assert.assertFalse("Plugin '" + getId() + "' is not expected to be enabled.", enableWidget.isChecked()); } public void clickEnabledWidget() throws IOException { HtmlInput enableWidget = getEnableWidget(); HtmlElementUtil.click(enableWidget); } public void assertEnabledStateChangeable() { if (!hasDependents() && !hasDisabledDependency() && !allDependentsDisabled()) { return; } if (allDependentsDisabled() && !hasDisabledDependency()) { return; } Assert.fail("The enable/disable state of plugin '" + getId() + "' cannot be changed."); } public void assertEnabledStateNotChangeable() { if (hasDependents() && !hasDisabledDependency() && !allDependentsDisabled()) { return; } if (!hasDependents() && hasDisabledDependency()) { return; } Assert.fail("The enable/disable state of plugin '" + getId() + "' cannot be changed."); } public void assertUninstallable() { Assert.assertFalse("Plugin '" + getId() + "' cannot be uninstalled.", hasDependents()); } public void assertNotUninstallable() { Assert.assertTrue("Plugin '" + getId() + "' can be uninstalled.", hasDependents()); } public void assertHasDependents() { Assert.assertTrue("Plugin '" + getId() + "' is expected to have dependents.", hasDependents()); } public void assertHasNoDependents() { Assert.assertFalse("Plugin '" + getId() + "' is expected to have no dependents.", hasDependents()); } private boolean hasClassName(String className) { String classAttribute = pluginRow.getAttribute("class"); Set<String> classes = new HashSet<>(Arrays.asList(classAttribute.split(" "))); return classes.contains(className); } private boolean hasDisabledDependency() { return hasClassName("has-disabled-dependency"); } private boolean allDependentsDisabled() { return hasClassName("all-dependents-disabled"); } private boolean hasDependents() { return hasClassName("has-dependents"); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.networkmanager.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/networkmanager-2019-07-05/GetCoreNetworkChangeSet" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetCoreNetworkChangeSetRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of a core network. * </p> */ private String coreNetworkId; /** * <p> * The ID of the policy version. * </p> */ private Integer policyVersionId; /** * <p> * The maximum number of results to return. * </p> */ private Integer maxResults; /** * <p> * The token for the next page of results. * </p> */ private String nextToken; /** * <p> * The ID of a core network. * </p> * * @param coreNetworkId * The ID of a core network. */ public void setCoreNetworkId(String coreNetworkId) { this.coreNetworkId = coreNetworkId; } /** * <p> * The ID of a core network. * </p> * * @return The ID of a core network. */ public String getCoreNetworkId() { return this.coreNetworkId; } /** * <p> * The ID of a core network. * </p> * * @param coreNetworkId * The ID of a core network. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCoreNetworkChangeSetRequest withCoreNetworkId(String coreNetworkId) { setCoreNetworkId(coreNetworkId); return this; } /** * <p> * The ID of the policy version. * </p> * * @param policyVersionId * The ID of the policy version. */ public void setPolicyVersionId(Integer policyVersionId) { this.policyVersionId = policyVersionId; } /** * <p> * The ID of the policy version. * </p> * * @return The ID of the policy version. */ public Integer getPolicyVersionId() { return this.policyVersionId; } /** * <p> * The ID of the policy version. * </p> * * @param policyVersionId * The ID of the policy version. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCoreNetworkChangeSetRequest withPolicyVersionId(Integer policyVersionId) { setPolicyVersionId(policyVersionId); return this; } /** * <p> * The maximum number of results to return. * </p> * * @param maxResults * The maximum number of results to return. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return. * </p> * * @return The maximum number of results to return. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return. * </p> * * @param maxResults * The maximum number of results to return. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCoreNetworkChangeSetRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The token for the next page of results. * </p> * * @param nextToken * The token for the next page of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next page of results. * </p> * * @return The token for the next page of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next page of results. * </p> * * @param nextToken * The token for the next page of results. * @return Returns a reference to this object so that method calls can be chained together. */ public GetCoreNetworkChangeSetRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCoreNetworkId() != null) sb.append("CoreNetworkId: ").append(getCoreNetworkId()).append(","); if (getPolicyVersionId() != null) sb.append("PolicyVersionId: ").append(getPolicyVersionId()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetCoreNetworkChangeSetRequest == false) return false; GetCoreNetworkChangeSetRequest other = (GetCoreNetworkChangeSetRequest) obj; if (other.getCoreNetworkId() == null ^ this.getCoreNetworkId() == null) return false; if (other.getCoreNetworkId() != null && other.getCoreNetworkId().equals(this.getCoreNetworkId()) == false) return false; if (other.getPolicyVersionId() == null ^ this.getPolicyVersionId() == null) return false; if (other.getPolicyVersionId() != null && other.getPolicyVersionId().equals(this.getPolicyVersionId()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCoreNetworkId() == null) ? 0 : getCoreNetworkId().hashCode()); hashCode = prime * hashCode + ((getPolicyVersionId() == null) ? 0 : getPolicyVersionId().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public GetCoreNetworkChangeSetRequest clone() { return (GetCoreNetworkChangeSetRequest) super.clone(); } }
/* * * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.web.security.oauth2.pingfederate; import com.netflix.spectator.api.Id; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Timer; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.boot.autoconfigure.security.oauth2.resource.ResourceServerProperties; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpRequest; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.client.ClientHttpRequestExecution; import org.springframework.http.client.ClientHttpRequestInterceptor; import org.springframework.http.client.ClientHttpResponse; import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; import org.springframework.security.core.AuthenticationException; import org.springframework.security.oauth2.common.exceptions.InvalidTokenException; import org.springframework.security.oauth2.provider.OAuth2Authentication; import org.springframework.security.oauth2.provider.token.AccessTokenConverter; import org.springframework.security.oauth2.provider.token.RemoteTokenServices; import org.springframework.util.Assert; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.client.DefaultResponseErrorHandler; import org.springframework.web.client.RestTemplate; import javax.validation.constraints.NotNull; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * A remote token services extension for Ping Federate based IDPs. * * @author tgianos * @since 3.0.0 */ @Slf4j public class PingFederateRemoteTokenServices extends RemoteTokenServices { protected static final String TOKEN_NAME_KEY = "token"; protected static final String CLIENT_ID_KEY = "client_id"; protected static final String CLIENT_SECRET_KEY = "client_secret"; protected static final String GRANT_TYPE_KEY = "grant_type"; protected static final String ERROR_KEY = "error"; protected static final String SCOPE_KEY = "scope"; protected static final String GRANT_TYPE = "urn:pingidentity.com:oauth2:grant_type:validate_bearer"; protected static final String AUTHENTICATION_TIMER_NAME = "genie.security.oauth2.pingFederate.authentication.timer"; protected static final String API_TIMER_NAME = "genie.security.oauth2.pingFederate.api.timer"; private final AccessTokenConverter converter; private RestTemplate localRestTemplate; private final String checkTokenEndpointUrl; private final String clientId; private final String clientSecret; // Metrics private final Id tokenValidationError; private final Timer authenticationTimer; private final Timer pingFederateAPITimer; /** * Constructor. * * @param serverProperties The properties of the resource server (Genie) * @param converter The access token converter to use * @param registry The metrics registry to use */ public PingFederateRemoteTokenServices( @NotNull final ResourceServerProperties serverProperties, @NotNull final AccessTokenConverter converter, @NotNull final Registry registry ) { super(); this.tokenValidationError = registry.createId("genie.security.oauth2.pingFederate.tokenValidation.error.rate"); this.authenticationTimer = registry.timer(AUTHENTICATION_TIMER_NAME); this.pingFederateAPITimer = registry.timer(API_TIMER_NAME); final HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(); factory.setConnectTimeout(2000); factory.setReadTimeout(10000); final RestTemplate restTemplate = new RestTemplate(factory); final List<ClientHttpRequestInterceptor> interceptors = new ArrayList<>(); interceptors.add( (final HttpRequest request, final byte[] body, final ClientHttpRequestExecution execution) -> { final long start = System.nanoTime(); try { return execution.execute(request, body); } finally { pingFederateAPITimer.record(System.nanoTime() - start, TimeUnit.NANOSECONDS); } } ); restTemplate.setInterceptors(interceptors); restTemplate.setErrorHandler( new DefaultResponseErrorHandler() { // Ignore 400 @Override public void handleError(final ClientHttpResponse response) throws IOException { final int errorCode = response.getRawStatusCode(); registry.counter(tokenValidationError.withTag("status", Integer.toString(errorCode))).increment(); if (response.getRawStatusCode() != HttpStatus.BAD_REQUEST.value()) { super.handleError(response); } } } ); this.setRestTemplate(restTemplate); this.checkTokenEndpointUrl = serverProperties.getTokenInfoUri(); this.clientId = serverProperties.getClientId(); this.clientSecret = serverProperties.getClientSecret(); Assert.state(StringUtils.isNotBlank(this.checkTokenEndpointUrl), "Check Endpoint URL is required"); Assert.state(StringUtils.isNotBlank(this.clientId), "Client ID is required"); Assert.state(StringUtils.isNotBlank(this.clientSecret), "Client secret is required"); log.debug("checkTokenEndpointUrl = {}", this.checkTokenEndpointUrl); log.debug("clientId = {}", this.clientId); log.debug("clientSecret = {}", this.clientSecret); this.converter = converter; } /** * {@inheritDoc} */ @Override public OAuth2Authentication loadAuthentication(final String accessToken) throws AuthenticationException, InvalidTokenException { final long start = System.nanoTime(); try { final MultiValueMap<String, String> formData = new LinkedMultiValueMap<>(); formData.add(TOKEN_NAME_KEY, accessToken); formData.add(CLIENT_ID_KEY, this.clientId); formData.add(CLIENT_SECRET_KEY, this.clientSecret); formData.add(GRANT_TYPE_KEY, GRANT_TYPE); final Map<String, Object> map = this.postForMap(this.checkTokenEndpointUrl, formData); if (map.containsKey(ERROR_KEY)) { final String error = map.get(ERROR_KEY).toString(); log.debug("Validating the token produced an error: {}", error); throw new InvalidTokenException(error); } Assert.state(map.containsKey(CLIENT_ID_KEY), "Client id must be present in response from auth server"); Assert.state(map.containsKey(SCOPE_KEY), "No scopes included in response from authentication server"); this.convertScopes(map); final OAuth2Authentication authentication = this.converter.extractAuthentication(map); log.info( "User {} authenticated with authorities {}", authentication.getPrincipal(), authentication.getAuthorities() ); return authentication; } finally { final long finished = System.nanoTime(); this.authenticationTimer.record(finished - start, TimeUnit.NANOSECONDS); } } /** * Set the rest operations to use. * * @param restTemplate The rest operations to use. Not null. */ protected void setRestTemplate(@NotNull final RestTemplate restTemplate) { super.setRestTemplate(restTemplate); this.localRestTemplate = restTemplate; } private Map<String, Object> postForMap(final String path, final MultiValueMap<String, String> formData) { final HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED); @SuppressWarnings("rawtypes") final Map map = this.localRestTemplate.exchange( path, HttpMethod.POST, new HttpEntity<>(formData, headers), Map.class ).getBody(); @SuppressWarnings("unchecked") final Map<String, Object> result = map; return result; } private void convertScopes(final Map<String, Object> oauth2Map) { final Object scopesObject = oauth2Map.get(SCOPE_KEY); if (scopesObject == null) { throw new InvalidTokenException("Scopes were null"); } if (scopesObject instanceof String) { final String scopes = (String) scopesObject; if (StringUtils.isBlank(scopes)) { throw new InvalidTokenException("No scopes found unable to authenticate"); } oauth2Map.put(SCOPE_KEY, Arrays.asList(StringUtils.split(scopes, ' '))); } else { throw new InvalidTokenException("Scopes was not a String"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.cli.commands.messages.perf; import javax.jms.BytesMessage; import javax.jms.CompletionListener; import javax.jms.JMSException; import javax.jms.Message; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; import io.netty.util.concurrent.OrderedEventExecutor; import org.HdrHistogram.SingleWriterRecorder; import org.apache.activemq.artemis.cli.commands.messages.perf.AsyncJms2ProducerFacade.SendAttemptResult; public abstract class SkeletalProducerLoadGenerator implements CompletionListener, ProducerLoadGenerator { protected final AsyncJms2ProducerFacade producer; private final OrderedEventExecutor executor; protected final BooleanSupplier keepOnSending; protected final MicrosTimeProvider timeProvider; private final String group; private final byte[] messageContent; private BytesMessage messageToSend; protected boolean closed; protected volatile boolean stopLoad; private final SingleWriterRecorder waitLatencies; private final SingleWriterRecorder sendCompletedLatencies; private final AtomicLong unprocessedCompletions; private final AtomicBoolean scheduledProcessingCompletions; private volatile Exception fatalException; private boolean stopHandlingCompletions; public SkeletalProducerLoadGenerator(final AsyncJms2ProducerFacade producer, final OrderedEventExecutor executor, final MicrosTimeProvider timeProvider, final BooleanSupplier keepOnSending, final String group, final byte[] msgContent, final SingleWriterRecorder sendCompletedLatencies, final SingleWriterRecorder waitLatencies) { this.sendCompletedLatencies = sendCompletedLatencies; this.waitLatencies = waitLatencies; this.producer = producer; this.executor = executor; this.timeProvider = timeProvider; this.keepOnSending = keepOnSending; this.group = group; this.messageContent = msgContent; this.messageToSend = null; this.closed = false; this.stopLoad = false; this.unprocessedCompletions = new AtomicLong(); this.scheduledProcessingCompletions = new AtomicBoolean(); this.fatalException = null; this.stopHandlingCompletions = false; } @Override public Exception getFatalException() { return fatalException; } @Override public SingleWriterRecorder getSendCompletedLatencies() { return sendCompletedLatencies; } @Override public SingleWriterRecorder getWaitLatencies() { return waitLatencies; } @Override public AsyncJms2ProducerFacade getProducer() { return producer; } @Override public boolean isCompleted() { if (stopLoad && fatalException != null) { return true; } return stopLoad && producer.getMessageCompleted() == producer.getMessageSent(); } @Override public OrderedEventExecutor getExecutor() { return executor; } protected final void asyncContinue() { asyncContinue(0); } protected final void asyncContinue(final long usDelay) { if (usDelay == 0) { executor.execute(this); } else { executor.schedule(this, usDelay, TimeUnit.MICROSECONDS); } } protected final boolean trySend(final long sendTime) { return trySend(sendTime, sendTime); } protected final boolean trySend(final long expectedSendTime, final long sendTime) { assert executor.inEventLoop(); assert !closed; try { if (messageToSend == null) { messageToSend = producer.createBytesMessage(); messageToSend.writeBytes(this.messageContent); } messageToSend.setLongProperty("time", sendTime); if (group != null) { messageToSend.setStringProperty("JMSXGroupID", group); } final SendAttemptResult result = producer.trySend(messageToSend, this, this); if (result != SendAttemptResult.NotAvailable) { messageToSend = null; if (result == SendAttemptResult.Success) { if (waitLatencies != null) { waitLatencies.recordValue(sendTime - expectedSendTime); } } } return result == SendAttemptResult.Success; } catch (final JMSException e) { onSendErrored(e); return false; } } @Override public void onCompletion(final Message message) { asyncOnSendCompleted(message, null); } @Override public void onException(final Message message, final Exception exception) { asyncOnSendCompleted(message, exception); } private void asyncOnSendCompleted(final Message message, Exception completionError) { if (stopHandlingCompletions) { return; } if (completionError == null) { try { recordSendCompletionLatency(message); unprocessedCompletions.incrementAndGet(); scheduleProcessingCompletions(); } catch (final JMSException jmsException) { completionError = jmsException; } } if (completionError != null) { stopHandlingCompletions = true; final Exception fatal = completionError; executor.execute(() -> onSendErrored(fatal)); } } private void onSendErrored(final Exception fatal) { assert executor.inEventLoop(); if (fatalException != null) { return; } producer.onSendErrored(); fatalException = fatal; stopLoad = true; closed = true; } private void scheduleProcessingCompletions() { if (unprocessedCompletions.get() > 0 && scheduledProcessingCompletions.compareAndSet(false, true)) { executor.execute(this::processCompletions); } } private void processCompletions() { assert executor.inEventLoop(); assert scheduledProcessingCompletions.get(); if (fatalException != null) { return; } final long completions = unprocessedCompletions.getAndSet(0); for (long i = 0; i < completions; i++) { final JMSException completionException = producer.onSendCompleted(); if (completionException != null) { fatalException = completionException; return; } } scheduledProcessingCompletions.set(false); scheduleProcessingCompletions(); } private void recordSendCompletionLatency(final Message message) throws JMSException { final long time = message.getLongProperty("time"); final long elapsedMicros = timeProvider.now() - time; sendCompletedLatencies.recordValue(elapsedMicros); } @Override public Future<?> asyncClose(final Runnable onClosed) { return executor.submit(() -> onClose(onClosed)); } private void onClose(final Runnable onClosed) { assert executor.inEventLoop(); if (closed) { onClosed.run(); return; } closed = true; // no need for this anymore messageToSend = null; producer.requestClose(onClosed); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.nfs.nfs3; import java.io.IOException; import java.util.EnumSet; import io.netty.channel.Channel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.nfs.conf.NfsConfigKeys; import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration; import org.apache.hadoop.hdfs.nfs.nfs3.OpenFileCtx.COMMIT_STATUS; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.nfs.NfsFileType; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.nfs.nfs3.request.WRITE3Request; import org.apache.hadoop.nfs.nfs3.response.COMMIT3Response; import org.apache.hadoop.nfs.nfs3.response.WRITE3Response; import org.apache.hadoop.nfs.nfs3.response.WccData; import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.security.IdMappingServiceProvider; import org.apache.hadoop.classification.VisibleForTesting; /** * Manage the writes and responds asynchronously. */ public class WriteManager { public static final Logger LOG = LoggerFactory.getLogger(WriteManager.class); private final NfsConfiguration config; private final IdMappingServiceProvider iug; private AsyncDataService asyncDataService; private boolean asyncDataServiceStarted = false; private final int maxStreams; private final boolean aixCompatMode; /** * The time limit to wait for accumulate reordered sequential writes to the * same file before the write is considered done. */ private long streamTimeout; private final OpenFileCtxCache fileContextCache; static public class MultipleCachedStreamException extends IOException { private static final long serialVersionUID = 1L; public MultipleCachedStreamException(String msg) { super(msg); } } boolean addOpenFileStream(FileHandle h, OpenFileCtx ctx) { return fileContextCache.put(h, ctx); } WriteManager(IdMappingServiceProvider iug, final NfsConfiguration config, boolean aixCompatMode) { this.iug = iug; this.config = config; this.aixCompatMode = aixCompatMode; streamTimeout = config.getLong(NfsConfigKeys.DFS_NFS_STREAM_TIMEOUT_KEY, NfsConfigKeys.DFS_NFS_STREAM_TIMEOUT_DEFAULT); LOG.info("Stream timeout is " + streamTimeout + "ms."); if (streamTimeout < NfsConfigKeys.DFS_NFS_STREAM_TIMEOUT_MIN_DEFAULT) { LOG.info("Reset stream timeout to minimum value " + NfsConfigKeys.DFS_NFS_STREAM_TIMEOUT_MIN_DEFAULT + "ms."); streamTimeout = NfsConfigKeys.DFS_NFS_STREAM_TIMEOUT_MIN_DEFAULT; } maxStreams = config.getInt(NfsConfigKeys.DFS_NFS_MAX_OPEN_FILES_KEY, NfsConfigKeys.DFS_NFS_MAX_OPEN_FILES_DEFAULT); LOG.info("Maximum open streams is "+ maxStreams); this.fileContextCache = new OpenFileCtxCache(config, streamTimeout); } void startAsyncDataService() { if (asyncDataServiceStarted) { return; } fileContextCache.start(); this.asyncDataService = new AsyncDataService(); asyncDataServiceStarted = true; } void shutdownAsyncDataService() { if (!asyncDataServiceStarted) { return; } asyncDataServiceStarted = false; asyncDataService.shutdown(); fileContextCache.shutdown(); } void handleWrite(DFSClient dfsClient, WRITE3Request request, Channel channel, int xid, Nfs3FileAttributes preOpAttr) throws IOException { int count = request.getCount(); byte[] data = request.getData().array(); if (data.length < count) { WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_INVAL); Nfs3Utils.writeChannel(channel, response.serialize( new XDR(), xid, new VerifierNone()), xid); return; } FileHandle handle = request.getHandle(); if (LOG.isDebugEnabled()) { LOG.debug("handleWrite " + request); } // Check if there is a stream to write FileHandle fileHandle = request.getHandle(); OpenFileCtx openFileCtx = fileContextCache.get(fileHandle); if (openFileCtx == null) { LOG.info("No opened stream for fileHandle: " + fileHandle.dumpFileHandle()); String fileIdPath = Nfs3Utils.getFileIdPath(fileHandle.getFileId()); HdfsDataOutputStream fos = null; Nfs3FileAttributes latestAttr = null; try { int bufferSize = config.getInt( CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT); fos = dfsClient.append(fileIdPath, bufferSize, EnumSet.of(CreateFlag.APPEND), null, null); latestAttr = Nfs3Utils.getFileAttr(dfsClient, fileIdPath, iug); } catch (RemoteException e) { IOException io = e.unwrapRemoteException(); if (io instanceof AlreadyBeingCreatedException) { LOG.warn("Can't append file: " + fileIdPath + ". Possibly the file is being closed. Drop the request: " + request + ", wait for the client to retry..."); return; } throw e; } catch (IOException e) { LOG.error("Can't append to file: " + fileIdPath, e); if (fos != null) { fos.close(); } WccData fileWcc = new WccData(Nfs3Utils.getWccAttr(preOpAttr), preOpAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO, fileWcc, count, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); Nfs3Utils.writeChannel(channel, response.serialize( new XDR(), xid, new VerifierNone()), xid); return; } // Add open stream String writeDumpDir = config.get(NfsConfigKeys.DFS_NFS_FILE_DUMP_DIR_KEY, NfsConfigKeys.DFS_NFS_FILE_DUMP_DIR_DEFAULT); openFileCtx = new OpenFileCtx(fos, latestAttr, writeDumpDir + "/" + fileHandle.getFileId(), dfsClient, iug, aixCompatMode, config); if (!addOpenFileStream(fileHandle, openFileCtx)) { LOG.info("Can't add new stream. Close it. Tell client to retry."); try { fos.close(); } catch (IOException e) { LOG.error("Can't close stream for fileHandle: " + handle.dumpFileHandle(), e); } // Notify client to retry WccData fileWcc = new WccData(latestAttr.getWccAttr(), latestAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_JUKEBOX, fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); Nfs3Utils.writeChannel(channel, response.serialize(new XDR(), xid, new VerifierNone()), xid); return; } if (LOG.isDebugEnabled()) { LOG.debug("Opened stream for appending file: " + fileHandle.dumpFileHandle()); } } // Add write into the async job queue openFileCtx.receivedNewWrite(dfsClient, request, channel, xid, asyncDataService, iug); return; } // Do a possible commit before read request in case there is buffered data // inside DFSClient which has been flushed but not synced. int commitBeforeRead(DFSClient dfsClient, FileHandle fileHandle, long commitOffset) { int status; OpenFileCtx openFileCtx = fileContextCache.get(fileHandle); if (openFileCtx == null) { if (LOG.isDebugEnabled()) { LOG.debug("No opened stream for fileId: " + fileHandle.dumpFileHandle() + " commitOffset=" + commitOffset + ". Return success in this case."); } status = Nfs3Status.NFS3_OK; } else { // commit request triggered by read won't create pending comment obj COMMIT_STATUS ret = openFileCtx.checkCommit(dfsClient, commitOffset, null, 0, null, true); switch (ret) { case COMMIT_FINISHED: case COMMIT_INACTIVE_CTX: status = Nfs3Status.NFS3_OK; break; case COMMIT_INACTIVE_WITH_PENDING_WRITE: case COMMIT_ERROR: status = Nfs3Status.NFS3ERR_IO; break; case COMMIT_WAIT: case COMMIT_SPECIAL_WAIT: /** * This should happen rarely in some possible cases, such as read * request arrives before DFSClient is able to quickly flush data to DN, * or Prerequisite writes is not available. Won't wait since we don't * want to block read. */ status = Nfs3Status.NFS3ERR_JUKEBOX; break; case COMMIT_SPECIAL_SUCCESS: // Read beyond eof could result in partial read status = Nfs3Status.NFS3_OK; break; default: LOG.error("Should not get commit return code: " + ret.name()); throw new RuntimeException("Should not get commit return code: " + ret.name()); } } return status; } void handleCommit(DFSClient dfsClient, FileHandle fileHandle, long commitOffset, Channel channel, int xid, Nfs3FileAttributes preOpAttr, int namenodeId) { long startTime = System.nanoTime(); int status; OpenFileCtx openFileCtx = fileContextCache.get(fileHandle); if (openFileCtx == null) { LOG.info("No opened stream for fileId: " + fileHandle.dumpFileHandle() + " commitOffset=" + commitOffset + ". Return success in this case."); status = Nfs3Status.NFS3_OK; } else { COMMIT_STATUS ret = openFileCtx.checkCommit(dfsClient, commitOffset, channel, xid, preOpAttr, false); switch (ret) { case COMMIT_FINISHED: case COMMIT_INACTIVE_CTX: status = Nfs3Status.NFS3_OK; break; case COMMIT_INACTIVE_WITH_PENDING_WRITE: case COMMIT_ERROR: status = Nfs3Status.NFS3ERR_IO; break; case COMMIT_WAIT: // Do nothing. Commit is async now. return; case COMMIT_SPECIAL_WAIT: status = Nfs3Status.NFS3ERR_JUKEBOX; break; case COMMIT_SPECIAL_SUCCESS: status = Nfs3Status.NFS3_OK; break; default: LOG.error("Should not get commit return code: " + ret.name()); throw new RuntimeException("Should not get commit return code: " + ret.name()); } } // Send out the response Nfs3FileAttributes postOpAttr = null; try { postOpAttr = getFileAttr(dfsClient, new FileHandle(preOpAttr.getFileId(), namenodeId), iug); } catch (IOException e1) { LOG.info("Can't get postOpAttr for fileId: " + preOpAttr.getFileId(), e1); } WccData fileWcc = new WccData(Nfs3Utils.getWccAttr(preOpAttr), postOpAttr); COMMIT3Response response = new COMMIT3Response(status, fileWcc, Nfs3Constant.WRITE_COMMIT_VERF); RpcProgramNfs3.metrics.addCommit(Nfs3Utils.getElapsedTime(startTime)); Nfs3Utils.writeChannelCommit(channel, response.serialize(new XDR(), xid, new VerifierNone()), xid); } /** * If the file is in cache, update the size based on the cached data size */ Nfs3FileAttributes getFileAttr(DFSClient client, FileHandle fileHandle, IdMappingServiceProvider iug) throws IOException { String fileIdPath = Nfs3Utils.getFileIdPath(fileHandle); Nfs3FileAttributes attr = Nfs3Utils.getFileAttr(client, fileIdPath, iug); if (attr != null) { OpenFileCtx openFileCtx = fileContextCache.get(fileHandle); if (openFileCtx != null) { attr.setSize(openFileCtx.getNextOffset()); attr.setUsed(openFileCtx.getNextOffset()); } } return attr; } Nfs3FileAttributes getFileAttr(DFSClient client, FileHandle dirHandle, String fileName, int namenodeId) throws IOException { String fileIdPath = Nfs3Utils.getFileIdPath(dirHandle) + "/" + fileName; Nfs3FileAttributes attr = Nfs3Utils.getFileAttr(client, fileIdPath, iug); if ((attr != null) && (attr.getType() == NfsFileType.NFSREG.toValue())) { OpenFileCtx openFileCtx = fileContextCache.get(new FileHandle(attr .getFileId(), namenodeId)); if (openFileCtx != null) { attr.setSize(openFileCtx.getNextOffset()); attr.setUsed(openFileCtx.getNextOffset()); } } return attr; } @VisibleForTesting OpenFileCtxCache getOpenFileCtxCache() { return this.fileContextCache; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.compaction.mapreduce; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.math3.primes.Primes; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.TaskCompletionEvent; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.apache.gobblin.compaction.dataset.DatasetHelper; import org.apache.gobblin.compaction.mapreduce.avro.MRCompactorAvroKeyDedupJobRunner; import org.apache.gobblin.compaction.parser.CompactionPathParser; import org.apache.gobblin.compaction.verify.InputRecordCountHelper; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.configuration.State; import org.apache.gobblin.dataset.FileSystemDataset; import org.apache.gobblin.hive.policy.HiveRegistrationPolicy; import org.apache.gobblin.util.FileListUtils; import org.apache.gobblin.util.HadoopUtils; /** * Configurator for compaction job. * Different data formats should have their own impl. for this interface. * */ @Slf4j public abstract class CompactionJobConfigurator { public static final String COMPACTION_JOB_CONFIGURATOR_FACTORY_CLASS_KEY = "compaction.jobConfiguratorFactory.class"; public static final String DEFAULT_COMPACTION_JOB_CONFIGURATOR_FACTORY_CLASS = "org.apache.gobblin.compaction.mapreduce.CompactionAvroJobConfigurator$Factory"; @Getter @AllArgsConstructor protected enum EXTENSION { AVRO("avro"), ORC("orc"); private String extensionString; } protected final State state; @Getter protected final FileSystem fs; // Below attributes are MR related @Getter protected Job configuredJob; @Getter protected final boolean shouldDeduplicate; @Getter protected Path mrOutputPath = null; @Getter protected boolean isJobCreated = false; @Getter protected Collection<Path> mapReduceInputPaths = null; //All the old files, which is needed when emit GMCE to register iceberg data @Getter protected Collection<String> oldFiles = null; //All the new files in the final publish dir, which is needed when emit GMCE to register iceberg data @Getter @Setter protected Collection<Path> dstNewFiles = null; @Getter protected long fileNameRecordCount = 0; public interface ConfiguratorFactory { CompactionJobConfigurator createConfigurator(State state) throws IOException; } public CompactionJobConfigurator(State state) throws IOException { this.state = state; this.fs = getFileSystem(state); this.shouldDeduplicate = state.getPropAsBoolean(MRCompactor.COMPACTION_SHOULD_DEDUPLICATE, true); } public static CompactionJobConfigurator instantiateConfigurator(State state) { String compactionConfiguratorFactoryClass = state.getProp(COMPACTION_JOB_CONFIGURATOR_FACTORY_CLASS_KEY, DEFAULT_COMPACTION_JOB_CONFIGURATOR_FACTORY_CLASS); try { return Class.forName(compactionConfiguratorFactoryClass) .asSubclass(ConfiguratorFactory.class) .newInstance() .createConfigurator(state); } catch (ReflectiveOperationException | IOException e) { throw new RuntimeException("Failed to instantiate a instance of job configurator:", e); } } public abstract String getFileExtension(); /** * Customized MR job creation for Avro. * * @param dataset A path or directory which needs compaction * @return A configured map-reduce job for avro compaction */ public Job createJob(FileSystemDataset dataset) throws IOException { Configuration conf = HadoopUtils.getConfFromState(state); // Turn on mapreduce output compression by default if (conf.get("mapreduce.output.fileoutputformat.compress") == null && conf.get("mapred.output.compress") == null) { conf.setBoolean("mapreduce.output.fileoutputformat.compress", true); } // Disable delegation token cancellation by default if (conf.get("mapreduce.job.complete.cancel.delegation.tokens") == null) { conf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false); } addJars(conf, this.state, fs); Job job = Job.getInstance(conf); job.setJobName(MRCompactorJobRunner.HADOOP_JOB_NAME); boolean emptyDirectoryFlag = this.configureInputAndOutputPaths(job, dataset); if (emptyDirectoryFlag) { this.state.setProp(HiveRegistrationPolicy.MAPREDUCE_JOB_INPUT_PATH_EMPTY_KEY, true); } this.configureMapper(job); this.configureReducer(job); if (emptyDirectoryFlag || !this.shouldDeduplicate) { job.setNumReduceTasks(0); } // Configure schema at the last step because FilesInputFormat will be used internally this.configureSchema(job); this.isJobCreated = true; this.configuredJob = job; return job; } /** * Configuring Mapper/Reducer's input/output schema for compaction MR job. * The input schema for Mapper should be obtained from to-be-compacted file. * The output schema for Mapper is for dedup. * The output schema for Reducer should be identical to input schema of Mapper. * @param job The compaction jobConf. * @throws IOException */ protected abstract void configureSchema(Job job) throws IOException; /** * Configuring Mapper class, specific to data format. */ protected abstract void configureMapper(Job job); /** * Configuring Reducer class, specific to data format. */ protected abstract void configureReducer(Job job) throws IOException; protected FileSystem getFileSystem(State state) throws IOException { Configuration conf = HadoopUtils.getConfFromState(state); String uri = state.getProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI); return FileSystem.get(URI.create(uri), conf); } /** * Refer to {@link MRCompactorAvroKeyDedupJobRunner#setNumberOfReducers(Job)} * Note that this method is not format specific. */ protected void setNumberOfReducers(Job job) throws IOException { // get input size long inputSize = 0; for (Path inputPath : this.mapReduceInputPaths) { inputSize += this.fs.getContentSummary(inputPath).getLength(); } // get target file size long targetFileSize = this.state.getPropAsLong(MRCompactorAvroKeyDedupJobRunner.COMPACTION_JOB_TARGET_OUTPUT_FILE_SIZE, MRCompactorAvroKeyDedupJobRunner.DEFAULT_COMPACTION_JOB_TARGET_OUTPUT_FILE_SIZE); // get max reducers int maxNumReducers = state.getPropAsInt(MRCompactorAvroKeyDedupJobRunner.COMPACTION_JOB_MAX_NUM_REDUCERS, MRCompactorAvroKeyDedupJobRunner.DEFAULT_COMPACTION_JOB_MAX_NUM_REDUCERS); int numReducers = Math.min(Ints.checkedCast(inputSize / targetFileSize) + 1, maxNumReducers); // get use prime reducers boolean usePrimeReducers = state.getPropAsBoolean(MRCompactorAvroKeyDedupJobRunner.COMPACTION_JOB_USE_PRIME_REDUCERS, MRCompactorAvroKeyDedupJobRunner.DEFAULT_COMPACTION_JOB_USE_PRIME_REDUCERS); if (usePrimeReducers && numReducers != 1) { numReducers = Primes.nextPrime(numReducers); } job.setNumReduceTasks(numReducers); } protected void addJars(Configuration conf, State state, FileSystem fs) throws IOException { if (!state.contains(MRCompactor.COMPACTION_JARS)) { return; } Path jarFileDir = new Path(state.getProp(MRCompactor.COMPACTION_JARS)); for (FileStatus status : fs.listStatus(jarFileDir)) { DistributedCache.addFileToClassPath(status.getPath(), conf, fs); } } /** * Refer to MRCompactorAvroKeyDedupJobRunner#configureInputAndOutputPaths(Job). * @return false if no valid input paths present for MR job to process, where a path is valid if it is * a directory containing one or more files. * */ protected boolean configureInputAndOutputPaths(Job job, FileSystemDataset dataset) throws IOException { boolean emptyDirectoryFlag = false; String mrOutputBase = this.state.getProp(MRCompactor.COMPACTION_JOB_DIR); CompactionPathParser parser = new CompactionPathParser(this.state); CompactionPathParser.CompactionParserResult rst = parser.parse(dataset); this.mrOutputPath = concatPaths(mrOutputBase, rst.getDatasetName(), rst.getDstSubDir(), rst.getTimeString()); if(this.state.contains(ConfigurationKeys.USE_DATASET_LOCAL_WORK_DIR)) { mrOutputBase = this.state.getProp(MRCompactor.COMPACTION_DEST_DIR); this.mrOutputPath = concatPaths(mrOutputBase, rst.getDatasetName(), ConfigurationKeys.TMP_DIR, rst.getDstSubDir(), rst.getTimeString()); } log.info("Cleaning temporary MR output directory: " + mrOutputPath); this.fs.delete(mrOutputPath, true); this.mapReduceInputPaths = getGranularInputPaths(dataset.datasetRoot()); if (this.mapReduceInputPaths.isEmpty()) { this.mapReduceInputPaths.add(dataset.datasetRoot()); emptyDirectoryFlag = true; } this.oldFiles = new HashSet<>(); for (Path path : mapReduceInputPaths) { oldFiles.add(this.fs.makeQualified(path).toString()); FileInputFormat.addInputPath(job, path); } FileOutputFormat.setOutputPath(job, mrOutputPath); return emptyDirectoryFlag; } /** * Concatenate multiple directory or file names into one path * * @return Concatenated path or null if the parameter is empty */ private Path concatPaths(String... names) { if (names == null || names.length == 0) { return null; } Path cur = new Path(names[0]); for (int i = 1; i < names.length; ++i) { cur = new Path(cur, new Path(names[i])); } return cur; } /** * Converts a top level input path to a group of sub-paths according to user defined granularity. * This may be required because if upstream application generates many sub-paths but the map-reduce * job only keeps track of the top level path, after the job is done, we won't be able to tell if * those new arriving sub-paths is processed by previous map-reduce job or not. Hence a better way * is to pre-define those sub-paths as input paths before we start to run MR. The implementation of * this method should depend on the data generation granularity controlled by upstream. Here we just * list the deepest level of containing folder as the smallest granularity. * * @param path top level directory needs compaction * @return A collection of input paths which will participate in map-reduce job */ protected Collection<Path> getGranularInputPaths(Path path) throws IOException { boolean appendDelta = this.state.getPropAsBoolean(MRCompactor.COMPACTION_RENAME_SOURCE_DIR_ENABLED, MRCompactor.DEFAULT_COMPACTION_RENAME_SOURCE_DIR_ENABLED); Set<Path> uncompacted = Sets.newHashSet(); Set<Path> total = Sets.newHashSet(); for (FileStatus fileStatus : FileListUtils.listFilesRecursively(fs, path)) { if (appendDelta) { // use source dir suffix to identify the delta input paths if (!fileStatus.getPath().getParent().toString().endsWith(MRCompactor.COMPACTION_RENAME_SOURCE_DIR_SUFFIX)) { uncompacted.add(fileStatus.getPath().getParent()); } total.add(fileStatus.getPath().getParent()); } else { uncompacted.add(fileStatus.getPath().getParent()); } } if (appendDelta) { // When the output record count from mr counter doesn't match // the record count from input file names, we prefer file names because // it will be used to calculate the difference of count in next run. this.fileNameRecordCount = new InputRecordCountHelper(this.state).calculateRecordCount(total); log.info("{} has total input record count (based on file name) {}", path, this.fileNameRecordCount); } return uncompacted; } private static List<TaskCompletionEvent> getAllTaskCompletionEvent(Job completedJob) { List<TaskCompletionEvent> completionEvents = new LinkedList<>(); while (true) { try { TaskCompletionEvent[] bunchOfEvents; bunchOfEvents = completedJob.getTaskCompletionEvents(completionEvents.size()); if (bunchOfEvents == null || bunchOfEvents.length == 0) { break; } completionEvents.addAll(Arrays.asList(bunchOfEvents)); } catch (IOException e) { break; } } return completionEvents; } private static List<TaskCompletionEvent> getUnsuccessfulTaskCompletionEvent(Job completedJob) { return getAllTaskCompletionEvent(completedJob).stream() .filter(te -> te.getStatus() != TaskCompletionEvent.Status.SUCCEEDED) .collect(Collectors.toList()); } private static boolean isFailedPath(Path path, List<TaskCompletionEvent> failedEvents) { return path.toString().contains("_temporary") || failedEvents.stream() .anyMatch( event -> path.toString().contains(Path.SEPARATOR + event.getTaskAttemptId().toString() + Path.SEPARATOR)); } /** * Get good files * The problem happens when speculative task attempt initialized but then killed in the middle of processing. * Some partial file was generated at {tmp_output}/_temporary/1/_temporary/attempt_xxx_xxx/xxxx(Avro file * might have .avro as extension file name), without being committed to its final destination * at {tmp_output}/xxxx. * * @param job Completed MR job * @param fs File system that can handle file system * @param acceptableExtension file extension acceptable as "good files". * @return all successful files that has been committed */ public static List<Path> getGoodFiles(Job job, Path tmpPath, FileSystem fs, List<String> acceptableExtension) throws IOException { List<TaskCompletionEvent> failedEvents = getUnsuccessfulTaskCompletionEvent(job); List<Path> allFilePaths = DatasetHelper.getApplicableFilePaths(fs, tmpPath, acceptableExtension); List<Path> goodPaths = new ArrayList<>(); for (Path filePath : allFilePaths) { if (isFailedPath(filePath, failedEvents)) { fs.delete(filePath, false); log.error("{} is a bad path so it was deleted", filePath); } else { goodPaths.add(filePath); } } return goodPaths; } }
package com.gentics.mesh.core.webroot; import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.TestDataProvider.PROJECT_NAME; import static com.gentics.mesh.test.TestSize.FULL; import static io.netty.handler.codec.http.HttpResponseStatus.CONFLICT; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import org.junit.Test; import com.gentics.mesh.FieldUtil; import com.gentics.mesh.core.rest.node.NodeCreateRequest; import com.gentics.mesh.core.rest.node.NodeResponse; import com.gentics.mesh.core.rest.node.NodeUpdateRequest; import com.gentics.mesh.core.rest.schema.impl.SchemaCreateRequest; import com.gentics.mesh.core.rest.schema.impl.SchemaResponse; import com.gentics.mesh.parameter.impl.VersioningParametersImpl; import com.gentics.mesh.test.MeshTestSetting; import com.gentics.mesh.test.context.AbstractMeshTest; @MeshTestSetting(testSize = FULL, startServer = true) public class WebRootEndpointUrlPathTest extends AbstractMeshTest { private void setupSchema(boolean addSegmentField) { SchemaCreateRequest request = new SchemaCreateRequest(); request.setUrlFields("shortUrl", "shortUrlList"); request.setName("dummySchema"); if (addSegmentField) { request.setSegmentField("slug"); } request.addField(FieldUtil.createStringFieldSchema("slug")); request.addField(FieldUtil.createStringFieldSchema("shortUrl")); request.addField(FieldUtil.createListFieldSchema("shortUrlList", "string")); SchemaResponse schemaResponse = call(() -> client().createSchema(request)); call(() -> client().assignSchemaToProject(PROJECT_NAME, schemaResponse.getUuid())); } @Test public void testUrlPathResolving() { final String niceUrlPath = "/some/wonderful/short/url"; setupSchema(true); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField(niceUrlPath)); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, niceUrlPath))).hasUuid(uuid); // Now verify that no published node can be found call(() -> client().webroot(PROJECT_NAME, niceUrlPath, new VersioningParametersImpl().published()), NOT_FOUND, "node_not_found_for_path", niceUrlPath); } /** * Test list resolving. */ @Test public void testUrlPathListResolving() { final String niceUrlPath = "/some/wonderful/short/url"; setupSchema(true); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField(niceUrlPath)); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/last/segment")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, niceUrlPath))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/middle"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/last/segment"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/slugValue"))).hasUuid(uuid); call(() -> client().webroot(PROJECT_NAME, "/not_found"), NOT_FOUND, "node_not_found_for_path", "/not_found"); } /** * Assert that no problems occur when saving a node which has multiple url fields which share the same value. */ @Test public void testDuplicateFieldValueInSameNode() { setupSchema(true); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); } /** * Assert that a conflict is detected when updating a node which causes a conflict with the url fields value of the second node. */ @Test public void testConflictWithExistingNode() { setupSchema(true); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); NodeCreateRequest nodeCreateRequest2 = new NodeCreateRequest(); nodeCreateRequest2.setSchemaName("dummySchema"); nodeCreateRequest2.setLanguage("en"); nodeCreateRequest2.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest2.getFields().put("slug", FieldUtil.createStringField("slugValue2")); nodeCreateRequest2.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest2.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url2", "/middle3", "/some/other/url4")); call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest2), CONFLICT, "node_conflicting_urlfield_update", "/some/other/url", uuid, "en"); } @Test public void testConflictDuringPublish() { setupSchema(true); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); call(() -> client().publishNode(PROJECT_NAME, uuid)); // Update the draft values in order to prevent a conflict when creating the second node NodeUpdateRequest nodeUpdateRequest = new NodeUpdateRequest(); nodeUpdateRequest.setLanguage("en"); nodeUpdateRequest.setVersion(nodeResponse.getVersion()); nodeUpdateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue2")); nodeUpdateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url2")); nodeUpdateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url2", "/middle2", "/some/other/url2")); call(() -> client().updateNode(PROJECT_NAME, uuid, nodeUpdateRequest)); // Now create the second node NodeCreateRequest nodeCreateRequest2 = new NodeCreateRequest(); nodeCreateRequest2.setSchemaName("dummySchema"); nodeCreateRequest2.setLanguage("en"); nodeCreateRequest2.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest2.getFields().put("slug", FieldUtil.createStringField("slugValue1")); nodeCreateRequest2.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url1")); nodeCreateRequest2.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url1", "/middle", "/some/other/url")); NodeResponse secondNode = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest2)); // Now publish the second node - The published fields of the second node should cause a conflict with the first published node call(() -> client().publishNode(PROJECT_NAME, secondNode.getUuid()), CONFLICT, "node_conflicting_urlfield_update", "/some/other/url,/middle", uuid, "en"); } /** * Assert that the webroot resolving still works even if the node has only a url field and no segment field value. */ @Test public void testNodeWithOnlyUrlField() { setupSchema(false); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); } /** * Assert that publishing and taking offline a node which only has a url field (no segment field) works. */ @Test public void testPublishUrlFieldNode() { setupSchema(false); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); // Publish it and test with takeOffline (single language) call(() -> client().publishNode(PROJECT_NAME, uuid)); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url", new VersioningParametersImpl().published()))).hasUuid(uuid); call(() -> client().takeNodeLanguageOffline(PROJECT_NAME, uuid, "en")); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); call(() -> client().webroot(PROJECT_NAME, "/some/other/url", new VersioningParametersImpl().published()), NOT_FOUND, "node_not_found_for_path", "/some/other/url"); // Publish it again and test with takeOffline (all languages) call(() -> client().publishNode(PROJECT_NAME, uuid)); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url", new VersioningParametersImpl().published()))).hasUuid(uuid); call(() -> client().takeNodeOffline(PROJECT_NAME, uuid)); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); call(() -> client().webroot(PROJECT_NAME, "/some/other/url", new VersioningParametersImpl().published()), NOT_FOUND, "node_not_found_for_path", "/some/other/url"); } /** * Assert that the short url works also fine when having a node which has different short urls for different languages. */ @Test public void testMultiLanguageFieldHandling() { setupSchema(false); NodeCreateRequest nodeCreateRequest = new NodeCreateRequest(); nodeCreateRequest.setSchemaName("dummySchema"); nodeCreateRequest.setLanguage("en"); nodeCreateRequest.setParentNodeUuid(tx(() -> project().getBaseNode().getUuid())); nodeCreateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeCreateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url")); nodeCreateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url", "/middle", "/some/other/url")); NodeResponse nodeResponse = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest)); String uuid = nodeResponse.getUuid(); NodeUpdateRequest nodeUpdateRequest = new NodeUpdateRequest(); nodeUpdateRequest.setLanguage("de"); nodeUpdateRequest.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeUpdateRequest.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url2")); nodeUpdateRequest.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url2", "/middle2", "/some/other/url2")); NodeResponse updateResponse = call(() -> client().updateNode(PROJECT_NAME, uuid, nodeUpdateRequest)); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url2"))).hasUuid(uuid); assertThat(call(() -> client().webroot(PROJECT_NAME, "/some/other/url"))).hasUuid(uuid); // Now update [de] again and assert that a conflict with [en] is detected NodeUpdateRequest nodeUpdateRequest2 = new NodeUpdateRequest(); nodeUpdateRequest2.setLanguage("de"); nodeUpdateRequest2.setVersion(updateResponse.getVersion()); nodeUpdateRequest2.getFields().put("slug", FieldUtil.createStringField("slugValue")); nodeUpdateRequest2.getFields().put("shortUrl", FieldUtil.createStringField("/some/other/url2")); // Conflict with /middle nodeUpdateRequest2.getFields().put("shortUrlList", FieldUtil.createStringListField("/some/other/url2", "/middle", "/some/other/url2")); call(() -> client().updateNode(PROJECT_NAME, uuid, nodeUpdateRequest2), CONFLICT, "node_conflicting_urlfield_update", "/middle", uuid, "en"); } }
package play.server; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBufferInputStream; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.*; import org.jboss.netty.handler.codec.http.*; import org.jboss.netty.handler.stream.ChunkedFile; import org.jboss.netty.handler.stream.ChunkedStream; import play.Invoker; import play.Logger; import play.Play; import play.PlayPlugin; import play.exceptions.PlayException; import play.exceptions.UnexpectedException; import play.i18n.Messages; import play.libs.MimeTypes; import play.mvc.ActionInvoker; import play.mvc.Http; import play.mvc.Http.Request; import play.mvc.Http.Response; import play.mvc.Router; import play.mvc.Scope; import play.mvc.results.NotFound; import play.mvc.results.RenderStatic; import play.templates.JavaExtensions; import play.templates.TemplateLoader; import play.utils.Utils; import play.vfs.VirtualFile; import java.io.*; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.URLDecoder; import java.net.URLEncoder; import java.text.ParseException; import java.util.*; import javax.mail.internet.InternetAddress; import play.data.validation.Validation; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.*; public class PlayHandler extends SimpleChannelUpstreamHandler { private final static String signature = "Play! Framework;" + Play.version + ";" + Play.mode.name().toLowerCase(); public Request processRequest(Request request) { return request; } @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { Logger.trace("messageReceived: begin"); final Object msg = e.getMessage(); if (msg instanceof HttpRequest) { final HttpRequest nettyRequest = (HttpRequest) msg; try { Request request = parseRequest(ctx, nettyRequest); request = processRequest(request); final Response response = new Response(); Http.Response.current.set(response); response.out = new ByteArrayOutputStream(); boolean raw = false; for (PlayPlugin plugin : Play.plugins) { if (plugin.rawInvocation(request, response)) { raw = true; break; } } if (raw) { copyResponse(ctx, request, response, nettyRequest); } else { Invoker.invoke(new NettyInvocation(request, response, ctx, nettyRequest, e)); } } catch (Exception ex) { serve500(ex, ctx, nettyRequest); } } Logger.trace("messageReceived: end"); } private static Map<String, RenderStatic> staticPathsCache = new HashMap<String, RenderStatic>(); public class NettyInvocation extends Invoker.Invocation { private final ChannelHandlerContext ctx; private final Request request; private final Response response; private final HttpRequest nettyRequest; private final MessageEvent e; public NettyInvocation(Request request, Response response, ChannelHandlerContext ctx, HttpRequest nettyRequest, MessageEvent e) { this.ctx = ctx; this.request = request; this.response = response; this.nettyRequest = nettyRequest; this.e = e; } @Override public boolean init() { Logger.trace("init: begin"); Request.current.set(request); Response.current.set(response); super.init(); if (Play.mode == Play.Mode.PROD && staticPathsCache.containsKey(request.path)) { RenderStatic rs = null; synchronized (staticPathsCache) { rs = staticPathsCache.get(request.path); } serveStatic(rs, ctx, request, response, nettyRequest, e); Logger.trace("init: end false"); return false; } try { Router.routeOnlyStatic(request); } catch (NotFound e) { serve404(e, ctx, request, nettyRequest); Logger.trace("init: end false"); return false; } catch (RenderStatic e) { if (Play.mode == Play.Mode.PROD) { synchronized (staticPathsCache) { staticPathsCache.put(request.path, e); } } serveStatic(e, ctx, request, response, nettyRequest, this.e); Logger.trace("init: end false"); return false; } Logger.trace("init: end true"); return true; } @Override public void run() { try { Logger.trace("run: begin"); super.run(); } catch (Exception e) { serve500(e, ctx, nettyRequest); } Logger.trace("run: end"); } @Override public void execute() throws Exception { if (!ctx.getChannel().isConnected()) { try { ctx.getChannel().close(); } catch (Throwable e) { // Ignore } return; } // Check the exceeded size before re rendering so we can render the error if the size is exceeded saveExceededSizeError(nettyRequest, request, response); ActionInvoker.invoke(request, response); } @Override public void onSuccess() throws Exception { super.onSuccess(); copyResponse(ctx, request, response, nettyRequest); Logger.trace("execute: end"); } } void saveExceededSizeError(HttpRequest nettyRequest, Request request, Response response) { String warning = nettyRequest.getHeader(HttpHeaders.Names.WARNING); String length = nettyRequest.getHeader(HttpHeaders.Names.CONTENT_LENGTH); if (warning != null) { Logger.trace("saveExceededSizeError: begin"); try { StringBuilder error = new StringBuilder(); error.append("\u0000"); // Cannot put warning which is play.netty.content.length.exceeded // as Key as it will result error when printing error error.append("play.netty.maxContentLength"); error.append(":"); String size = null; try { size = JavaExtensions.formatSize(Long.parseLong(length)); } catch (Exception e) { size = length + " bytes"; } error.append(Messages.get(warning, size)); error.append("\u0001"); error.append(size); error.append("\u0000"); if (request.cookies.get(Scope.COOKIE_PREFIX + "_ERRORS") != null && request.cookies.get(Scope.COOKIE_PREFIX + "_ERRORS").value != null) { error.append(request.cookies.get(Scope.COOKIE_PREFIX + "_ERRORS").value); } String errorData = URLEncoder.encode(error.toString(), "utf-8"); Http.Cookie c = new Http.Cookie(); c.value = errorData; c.name = Scope.COOKIE_PREFIX + "_ERRORS"; request.cookies.put(Scope.COOKIE_PREFIX + "_ERRORS", c); Logger.trace("saveExceededSizeError: end"); } catch (Exception e) { throw new UnexpectedException("Error serialization problem", e); } } } protected static void addToResponse(Response response, HttpResponse nettyResponse) { Map<String, Http.Header> headers = response.headers; for (Map.Entry<String, Http.Header> entry : headers.entrySet()) { Http.Header hd = entry.getValue(); for (String value : hd.values) { nettyResponse.setHeader(entry.getKey(), value); } } Map<String, Http.Cookie> cookies = response.cookies; for (Http.Cookie cookie : cookies.values()) { CookieEncoder encoder = new CookieEncoder(true); Cookie c = new DefaultCookie(cookie.name, cookie.value); c.setSecure(cookie.secure); c.setPath(cookie.path); if (cookie.domain != null) { c.setDomain(cookie.domain); } if (cookie.maxAge != null) { c.setMaxAge(cookie.maxAge); } c.setHttpOnly(cookie.httpOnly); encoder.addCookie(c); nettyResponse.addHeader(SET_COOKIE, encoder.encode()); } if (!response.headers.containsKey(CACHE_CONTROL)) { nettyResponse.setHeader(CACHE_CONTROL, "no-cache"); } } protected static void writeResponse(ChannelHandlerContext ctx, Response response, HttpResponse nettyResponse, HttpRequest nettyRequest) { Logger.trace("writeResponse: begin"); byte[] content = null; final boolean keepAlive = isKeepAlive(nettyRequest); if (nettyRequest.getMethod().equals(HttpMethod.HEAD)) { content = new byte[0]; } else { content = response.out.toByteArray(); } ChannelBuffer buf = ChannelBuffers.copiedBuffer(content); nettyResponse.setContent(buf); if (keepAlive) { // Add 'Content-Length' header only for a keep-alive connection. Logger.trace("writeResponse: content length [" + response.out.size() + "]"); setContentLength(nettyResponse, response.out.size()); } ChannelFuture f = ctx.getChannel().write(nettyResponse); // Decide whether to close the connection or not. if (!keepAlive) { // Close the connection when the whole content is written out. f.addListener(ChannelFutureListener.CLOSE); } Logger.trace("writeResponse: end"); } public static void copyResponse(ChannelHandlerContext ctx, Request request, Response response, HttpRequest nettyRequest) throws Exception { Logger.trace("copyResponse: begin"); //response.out.flush(); // Decide whether to close the connection or not. HttpResponse nettyResponse = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.valueOf(response.status)); nettyResponse.setHeader(SERVER, signature); if (response.contentType != null) { nettyResponse.setHeader(CONTENT_TYPE, response.contentType + (response.contentType.startsWith("text/") && !response.contentType.contains("charset") ? "; charset=utf-8" : "")); } else { nettyResponse.setHeader(CONTENT_TYPE, "text/plain; charset=utf-8"); } addToResponse(response, nettyResponse); final Object obj = response.direct; File file = null; InputStream is = null; if (obj instanceof File) { file = (File) obj; } else if (obj instanceof InputStream) { is = (InputStream) obj; } final boolean keepAlive = isKeepAlive(nettyRequest); if (file != null && file.isFile()) { try { nettyResponse = addEtag(nettyRequest, nettyResponse, file); if (nettyResponse.getStatus().equals(HttpResponseStatus.NOT_MODIFIED)) { Channel ch = ctx.getChannel(); // Write the initial line and the header. ChannelFuture writeFuture = ch.write(nettyResponse); if (!keepAlive) { // Close the connection when the whole content is written out. writeFuture.addListener(ChannelFutureListener.CLOSE); } } else { nettyResponse.setHeader(CONTENT_TYPE, MimeTypes.getContentType(file.getName(), "text/plain")); RandomAccessFile raf = new RandomAccessFile(file, "r"); long fileLength = raf.length(); if (keepAlive) { // Add 'Content-Length' header only for a keep-alive connection. Logger.trace("file length is [" + fileLength + "]"); setContentLength(nettyResponse, fileLength); } Channel ch = ctx.getChannel(); // Write the initial line and the header. ChannelFuture writeFuture = ch.write(nettyResponse); // Write the content. // If it is not a HEAD if (!nettyRequest.getMethod().equals(HttpMethod.HEAD)) { writeFuture = ch.write(new ChunkedFile(raf, 0, fileLength, 8192)); } if (!keepAlive) { // Close the connection when the whole content is written out. writeFuture.addListener(ChannelFutureListener.CLOSE); } } } catch (Exception e) { throw e; } } else if (is != null) { ChannelFuture writeFuture = ctx.getChannel().write(nettyResponse); if (!nettyRequest.getMethod().equals(HttpMethod.HEAD) && !nettyResponse.getStatus().equals(HttpResponseStatus.NOT_MODIFIED)) { writeFuture = ctx.getChannel().write(new ChunkedStream(is)); } if (!keepAlive) { writeFuture.addListener(ChannelFutureListener.CLOSE); } } else { writeResponse(ctx, response, nettyResponse, nettyRequest); } Logger.trace("copyResponse: end"); } static String getRemoteIPAddress(ChannelHandlerContext ctx) { String fullAddress = ((InetSocketAddress) ctx.getChannel().getRemoteAddress()).getAddress().getHostAddress(); // Address resolves to /x.x.x.x:zzzz we only want x.x.x.x if (fullAddress.startsWith("/")) { fullAddress = fullAddress.substring(1); } int i = fullAddress.indexOf(":"); if (i != -1) { fullAddress = fullAddress.substring(0, i); } return fullAddress; } public static Request parseRequest(ChannelHandlerContext ctx, HttpRequest nettyRequest) throws Exception { Logger.trace("parseRequest: begin"); Logger.trace("parseRequest: URI = " + nettyRequest.getUri()); int index = nettyRequest.getUri().indexOf("?"); String querystring = ""; String path = URLDecoder.decode(nettyRequest.getUri(), "UTF-8"); if (index != -1) { path = URLDecoder.decode(nettyRequest.getUri().substring(0, index), "UTF-8"); querystring = nettyRequest.getUri().substring(index + 1); } final Request request = new Request(); request.remoteAddress = getRemoteIPAddress(ctx); request.method = nettyRequest.getMethod().getName(); request.path = path; request.querystring = querystring; final String contentType = nettyRequest.getHeader(CONTENT_TYPE); if (contentType != null) { request.contentType = contentType.split(";")[0].trim().toLowerCase(); } else { request.contentType = "text/html"; } if (nettyRequest.getHeader("X-HTTP-Method-Override") != null) { request.method = nettyRequest.getHeader("X-HTTP-Method-Override").intern(); } ChannelBuffer b = nettyRequest.getContent(); if (b instanceof FileChannelBuffer) { FileChannelBuffer buffer = (FileChannelBuffer) b; // An error occurred Integer max = Integer.valueOf(Play.configuration.getProperty("play.netty.maxContentLength", "-1")); request.body = buffer.getInputStream(); if (!(max == -1 || request.body.available() < max)) { request.body = new ByteArrayInputStream(new byte[0]); } } else { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(new ChannelBufferInputStream(b), out); byte[] n = out.toByteArray(); request.body = new ByteArrayInputStream(n); } request.url = nettyRequest.getUri(); request.host = nettyRequest.getHeader(HOST); request.isLoopback = ((InetSocketAddress) ctx.getChannel().getRemoteAddress()).getAddress().isLoopbackAddress() && request.host.matches("^127\\.0\\.0\\.1:?[0-9]*$"); if (request.host == null) { request.host = ""; request.port = 80; request.domain = ""; } else { if (request.host.contains(":")) { final String[] host = request.host.split(":"); request.port = Integer.parseInt(host[1]); request.domain = host[0]; } else { request.port = 80; request.domain = request.host; } } if (Play.configuration.containsKey("XForwardedSupport") && nettyRequest.getHeader("X-Forwarded-For") != null) { if (!Arrays.asList(Play.configuration.getProperty("XForwardedSupport", "127.0.0.1").split(",")).contains(request.remoteAddress)) { throw new RuntimeException("This proxy request is not authorized: " + request.remoteAddress); } else { request.secure = ("https".equals(Play.configuration.get("XForwardedProto")) || "https".equals(nettyRequest.getHeader("X-Forwarded-Proto")) || "on".equals(nettyRequest.getHeader("X-Forwarded-Ssl"))); if (Play.configuration.containsKey("XForwardedHost")) { request.host = (String) Play.configuration.get("XForwardedHost"); } else if (nettyRequest.getHeader("X-Forwarded-Host") != null) { request.host = nettyRequest.getHeader("X-Forwarded-Host"); } if (nettyRequest.getHeader("X-Forwarded-For") != null) { request.remoteAddress = nettyRequest.getHeader("X-Forwarded-For"); } } } addToRequest(nettyRequest, request); request.resolveFormat(); request._init(); Logger.trace("parseRequest: end"); return request; } protected static void addToRequest(HttpRequest nettyRequest, Request request) { for (String key : nettyRequest.getHeaderNames()) { Http.Header hd = new Http.Header(); hd.name = key.toLowerCase(); hd.values = new ArrayList<String>(); for (String next : nettyRequest.getHeaders(key)) { hd.values.add(next); } request.headers.put(hd.name, hd); } String value = nettyRequest.getHeader(COOKIE); if (value != null) { Set<Cookie> cookies = new CookieDecoder().decode(value); if (cookies != null) { for (Cookie cookie : cookies) { Http.Cookie playCookie = new Http.Cookie(); playCookie.name = cookie.getName(); playCookie.path = cookie.getPath(); playCookie.domain = cookie.getDomain(); playCookie.secure = cookie.isSecure(); playCookie.value = cookie.getValue(); playCookie.httpOnly = cookie.isHttpOnly(); request.cookies.put(playCookie.name, playCookie); } } } } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { e.getChannel().close(); } public static void serve404(NotFound e, ChannelHandlerContext ctx, Request request, HttpRequest nettyRequest) { Logger.trace("serve404: begin"); HttpResponse nettyResponse = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND); nettyResponse.setHeader(SERVER, signature); nettyResponse.setHeader(CONTENT_TYPE, "text/html"); Map<String, Object> binding = getBindingForErrors(e, false); String format = Request.current().format; if (format == null || ("XMLHttpRequest".equals(request.headers.get("x-requested-with")) && "html".equals(format))) { format = "txt"; } nettyResponse.setHeader(CONTENT_TYPE, (MimeTypes.getContentType("404." + format, "text/plain"))); String errorHtml = TemplateLoader.load("errors/404." + format).render(binding); try { ChannelBuffer buf = ChannelBuffers.copiedBuffer(errorHtml.getBytes("utf-8")); nettyResponse.setContent(buf); ChannelFuture writeFuture = ctx.getChannel().write(nettyResponse); writeFuture.addListener(ChannelFutureListener.CLOSE); } catch (UnsupportedEncodingException fex) { Logger.error(fex, "(utf-8 ?)"); } Logger.trace("serve404: end"); } protected static Map<String, Object> getBindingForErrors(Exception e, boolean isError) { Map<String, Object> binding = new HashMap<String, Object>(); if (!isError) { binding.put("result", e); } else { binding.put("exception", e); } binding.put("session", Scope.Session.current()); binding.put("request", Http.Request.current()); binding.put("flash", Scope.Flash.current()); binding.put("params", Scope.Params.current()); binding.put("play", new Play()); try { binding.put("errors", Validation.errors()); } catch (Exception ex) { //Logger.error(ex, "Error when getting Validation errors"); } return binding; } // TODO: add request and response as parameter public static void serve500(Exception e, ChannelHandlerContext ctx, HttpRequest nettyRequest) { Logger.trace("serve500: begin"); HttpResponse nettyResponse = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR); nettyResponse.setHeader(SERVER, signature); Request request = Request.current(); Response response = Response.current(); try { if (!(e instanceof PlayException)) { e = new play.exceptions.UnexpectedException(e); } // Flush some cookies try { Map<String, Http.Cookie> cookies = response.cookies; for (Http.Cookie cookie : cookies.values()) { CookieEncoder encoder = new CookieEncoder(true); Cookie c = new DefaultCookie(cookie.name, cookie.value); c.setSecure(cookie.secure); c.setPath(cookie.path); if (cookie.domain != null) { c.setDomain(cookie.domain); } if (cookie.maxAge != null) { c.setMaxAge(cookie.maxAge); } c.setHttpOnly(cookie.httpOnly); encoder.addCookie(c); nettyResponse.addHeader(SET_COOKIE, encoder.encode()); } } catch (Exception exx) { Logger.error(e, "Trying to flush cookies"); // humm ? } Map<String, Object> binding = getBindingForErrors(e, true); String format = request.format; if (format == null || ("XMLHttpRequest".equals(request.headers.get("x-requested-with")) && "html".equals(format))) { format = "txt"; } nettyResponse.setHeader("Content-Type", (MimeTypes.getContentType("500." + format, "text/plain"))); try { String errorHtml = TemplateLoader.load("errors/500." + format).render(binding); ChannelBuffer buf = ChannelBuffers.copiedBuffer(errorHtml.getBytes("utf-8")); nettyResponse.setContent(buf); ChannelFuture writeFuture = ctx.getChannel().write(nettyResponse); writeFuture.addListener(ChannelFutureListener.CLOSE); Logger.error(e, "Internal Server Error (500) for request %s", request.method + " " + request.url); } catch (Throwable ex) { Logger.error(e, "Internal Server Error (500) for request %s", request.method + " " + request.url); Logger.error(ex, "Error during the 500 response generation"); try { ChannelBuffer buf = ChannelBuffers.copiedBuffer("Internal Error (check logs)".getBytes("utf-8")); nettyResponse.setContent(buf); ChannelFuture writeFuture = ctx.getChannel().write(nettyResponse); writeFuture.addListener(ChannelFutureListener.CLOSE); } catch (UnsupportedEncodingException fex) { Logger.error(fex, "(utf-8 ?)"); } } } catch (Throwable exxx) { try { ChannelBuffer buf = ChannelBuffers.copiedBuffer("Internal Error (check logs)".getBytes("utf-8")); nettyResponse.setContent(buf); ChannelFuture writeFuture = ctx.getChannel().write(nettyResponse); writeFuture.addListener(ChannelFutureListener.CLOSE); } catch (Exception fex) { Logger.error(fex, "(utf-8 ?)"); } if (exxx instanceof RuntimeException) { throw (RuntimeException) exxx; } throw new RuntimeException(exxx); } Logger.trace("serve500: end"); } public static void serveStatic(RenderStatic renderStatic, ChannelHandlerContext ctx, Request request, Response response, HttpRequest nettyRequest, MessageEvent e) { Logger.trace("serveStatic: begin"); HttpResponse nettyResponse = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.valueOf(response.status)); nettyResponse.setHeader("Server", signature); try { VirtualFile file = Play.getVirtualFile(renderStatic.file); if (file != null && file.exists() && file.isDirectory()) { file = file.child("index.html"); if (file != null) { renderStatic.file = file.relativePath(); } } if ((file == null || !file.exists())) { serve404(new NotFound("The file " + renderStatic.file + " does not exist"), ctx, request, nettyRequest); } else { boolean raw = false; for (PlayPlugin plugin : Play.plugins) { if (plugin.serveStatic(file, Request.current(), Response.current())) { raw = true; break; } } if (raw) { copyResponse(ctx, request, response, nettyRequest); } else { final File localFile = file.getRealFile(); final boolean keepAlive = isKeepAlive(nettyRequest); nettyResponse = addEtag(nettyRequest, nettyResponse, localFile); if (nettyResponse.getStatus().equals(HttpResponseStatus.NOT_MODIFIED)) { Channel ch = e.getChannel(); // Write the initial line and the header. ChannelFuture writeFuture = ch.write(nettyResponse); if (!keepAlive) { // Write the content. writeFuture.addListener(ChannelFutureListener.CLOSE); } } else { RandomAccessFile raf; raf = new RandomAccessFile(localFile, "r"); long fileLength = raf.length(); Logger.trace("keep alive " + keepAlive); Logger.trace("content type " + (MimeTypes.getContentType(localFile.getName(), "text/plain"))); if (keepAlive && !nettyResponse.getStatus().equals(HttpResponseStatus.NOT_MODIFIED)) { // Add 'Content-Length' header only for a keep-alive connection. Logger.trace("file length " + fileLength); setContentLength(nettyResponse, fileLength); } nettyResponse.setHeader(CONTENT_TYPE, (MimeTypes.getContentType(localFile.getName(), "text/plain"))); Channel ch = e.getChannel(); // Write the initial line and the header. ch.write(nettyResponse); // Write the content. ChannelFuture writeFuture = ch.write(new ChunkedFile(raf, 0, fileLength, 8192)); if (!keepAlive) { // Close the connection when the whole content is written out. writeFuture.addListener(ChannelFutureListener.CLOSE); } } } } } catch (Exception ez) { Logger.error(ez, "serveStatic for request %s", request.method + " " + request.url); try { ChannelBuffer buf = ChannelBuffers.copiedBuffer("Internal Error (check logs)".getBytes("utf-8")); nettyResponse.setContent(buf); ChannelFuture future = ctx.getChannel().write(nettyResponse); future.addListener(ChannelFutureListener.CLOSE); } catch (Exception ex) { Logger.error(ez, "serveStatic for request %s", request.method + " " + request.url); } } Logger.trace("serveStatic: end"); } public static boolean isModified(String etag, long last, HttpRequest nettyRequest) { if (nettyRequest.containsHeader(IF_NONE_MATCH)) { final String browserEtag = nettyRequest.getHeader(IF_NONE_MATCH); if (browserEtag.equals(etag)) { return false; } return true; } if (nettyRequest.containsHeader(IF_MODIFIED_SINCE)) { final String ifModifiedSince = nettyRequest.getHeader(IF_MODIFIED_SINCE); if (!StringUtils.isEmpty(ifModifiedSince)) { try { Date browserDate = Utils.getHttpDateFormatter().parse(ifModifiedSince); if (browserDate.getTime() >= last) { return false; } } catch (ParseException ex) { Logger.warn("Can't parse HTTP date", ex); } return true; } } return true; } private static HttpResponse addEtag(HttpRequest nettyRequest, HttpResponse httpResponse, File file) { if (Play.mode == Play.Mode.DEV) { httpResponse.setHeader(CACHE_CONTROL, "no-cache"); } else { String maxAge = Play.configuration.getProperty("http.cacheControl", "3600"); if (maxAge.equals("0")) { httpResponse.setHeader(CACHE_CONTROL, "no-cache"); } else { httpResponse.setHeader(CACHE_CONTROL, "max-age=" + maxAge); } } boolean useEtag = Play.configuration.getProperty("http.useETag", "true").equals("true"); long last = file.lastModified(); final String etag = "\"" + last + "-" + file.hashCode() + "\""; if (!isModified(etag, last, nettyRequest)) { if (nettyRequest.getMethod().equals(HttpMethod.GET)) { httpResponse.setStatus(HttpResponseStatus.NOT_MODIFIED); } if (useEtag) { httpResponse.setHeader(ETAG, etag); } } else { httpResponse.setHeader(LAST_MODIFIED, Utils.getHttpDateFormatter().format(new Date(last))); if (useEtag) { httpResponse.setHeader(ETAG, etag); } } return httpResponse; } public static boolean isKeepAlive(HttpMessage message) { return HttpHeaders.isKeepAlive(message) && message.getProtocolVersion().equals(HttpVersion.HTTP_1_1); } public static void setContentLength(HttpMessage message, long contentLength) { message.setHeader(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(contentLength)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.matrix.data; import java.util.ArrayList; import java.util.concurrent.Callable; import org.apache.sysml.api.DMLScript; import org.apache.sysml.runtime.matrix.data.LibMatrixDNNIm2ColHelper.Im2colWorker; import org.apache.sysml.utils.NativeHelper; /** * This class contains the set of operators used for performing conv2d */ public class LibMatrixDNNConv2dHelper { /** * Performs convolution via: partialCopy1(filter %*% im2col(input)) = output. * This operator has less memory pressure than LoopedIm2ColConv2dAllChannels. */ public static class LoopedIm2ColConv2dOneChan implements Callable<Long> { protected final int _rl, _ru; protected final ConvolutionParameters _params; protected final ArrayList<MatrixBlock> _filters; public LoopedIm2ColConv2dOneChan(int rl, int ru, ConvolutionParameters params, ArrayList<MatrixBlock> filters) { _rl = rl; _ru = ru; _params = params; _filters = filters; } @Override public Long call() throws Exception { int PQ = _params.P*_params.Q; int K = _params.K; int RS = _params.R*_params.S; MatrixBlock im2ColOutBlock = new MatrixBlock(RS, PQ, false); Im2colWorker im2ColWorker = Im2colWorker.getWorker( _params.input1, im2ColOutBlock, _params, false, false); long time1 = 0; long time2 = 0; for(int n = _rl; n < _ru; n++) { for(int c = 0; c < _params.C; c++) { // im2col(input) => _im2ColOutBlock long t1 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; im2ColWorker.execute(n, c); long t2 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; // filter %*% _im2ColOutBlock => matMultOutBlock MatrixBlock matMultOutBlock = new MatrixBlock(K, PQ, false); LibMatrixDNNHelper.singleThreadedMatMult(_filters.get(c), im2ColOutBlock, matMultOutBlock, false, true, _params); long t3 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; if(DMLScript.FINEGRAINED_STATISTICS) { time1 += t2 - t1; time2 += t3 - t2; } // Add the matrix matMultOutBlock of shape [K X PQ] to params.output.denseBlock + destPos add(matMultOutBlock, _params.output.getDenseBlock(), n*K*PQ, K, PQ); } // Add bias to current row if necessary, always dense if(_params.bias != null) LibMatrixDNNHelper.addBias(n, _params.output.getDenseBlock(), _params.bias.getDenseBlock(), K, PQ); } if(DMLScript.FINEGRAINED_STATISTICS) { LibMatrixDNN.loopedConvIm2ColTime.addAndGet(time1); LibMatrixDNN.loopedConvMatMultTime.addAndGet(time2); } //multi-threaded nnz maintenance of current working set return _params.output.recomputeNonZeros(_rl, _ru-1); } // Copy the matrix src of shape [K X PQ] to params.output.denseBlock + destPos private static void add(MatrixBlock src, double [] dest, int destPos, int K, int PQ) { // Copying is required as LibMatrixMult.matrixMult (and/or Java) is not pointer aware. // This is not required in Native implementation if(!src.isEmptyBlock()) { if(src.isInSparseFormat()) { // Copy the sparse matrix matMultOutBlock of shape [K X PQ] to // params.output.denseBlock + destPos for(int k = 0; k < src.getNumRows(); k++) { if( !src.sparseBlock.isEmpty(k) ) { int apos = src.sparseBlock.pos(k); int alen = src.sparseBlock.size(k); int[] aix = src.sparseBlock.indexes(k); double[] avals = src.sparseBlock.values(k); int desPosK = destPos + k*PQ; for(int j = apos; j < apos+alen; j++) { int pqIndex = aix[j]; dest[desPosK + pqIndex ] += avals[j]; } } } } else { LibMatrixMult.vectAdd(src.denseBlock, dest, 0, destPos, K*PQ); } } } } /** * Performs convolution via: partialCopy1(filter %*% im2col(input)) = output */ public static class LoopedIm2ColConv2dAllChan implements Callable<Long> { protected final int _rl, _ru; protected final ConvolutionParameters _params; public LoopedIm2ColConv2dAllChan(int rl, int ru, ConvolutionParameters params) { _rl = rl; _ru = ru; _params = params; } @Override public Long call() throws Exception { final int PQ = _params.P*_params.Q, K = _params.K, CRS = _params.C*_params.R*_params.S; MatrixBlock outIm2col = new MatrixBlock(CRS, PQ, false); MatrixBlock outMM = new MatrixBlock(K, PQ, false); Im2colWorker im2ColWorker = Im2colWorker.getWorker( _params.input1, outIm2col, _params, true, false); long time1 = 0; long time2 = 0; for(int n = _rl; n < _ru; n++) { // im2col(input) => _im2ColOutBlock long t1 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; im2ColWorker.execute(n); long t2 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; // filter %*% _im2ColOutBlock => matMultOutBlock outMM.reset(outMM.rlen, outMM.clen, false); LibMatrixDNNHelper.singleThreadedMatMult(_params.input2, outIm2col, outMM, false, true, _params); long t3 = DMLScript.FINEGRAINED_STATISTICS ? System.nanoTime() : 0; if(DMLScript.FINEGRAINED_STATISTICS) { time1 += t2 - t1; time2 += t3 - t2; } // Copy the matrix matMultOutBlock of shape [K X PQ] to params.output.denseBlock + destPos partialCopy1(outMM, _params.output.getDenseBlock(), n*K*PQ, K, PQ); // Add bias to current row if necessary, always dense if(_params.bias != null) LibMatrixDNNHelper.addBias(n, _params.output.getDenseBlock(), _params.bias.getDenseBlock(), K, PQ); } if(DMLScript.FINEGRAINED_STATISTICS) { LibMatrixDNN.loopedConvIm2ColTime.addAndGet(time1); LibMatrixDNN.loopedConvMatMultTime.addAndGet(time2); } //multi-threaded nnz maintenance of current working set return _params.output.recomputeNonZeros(_rl, _ru-1); } // Copy the matrix src of shape [K X PQ] to params.output.denseBlock + destPos private static void partialCopy1(MatrixBlock src, double [] dest, int destPos, int K, int PQ) { // Copying is required as LibMatrixMult.matrixMult (and/or Java) is not pointer aware. // This is not required in Native implementation if( src.isEmptyBlock() ) return; if(src.isInSparseFormat()) { SparseBlock sblock = src.sparseBlock; for(int k = 0; k < src.getNumRows(); k++) { if( sblock.isEmpty(k) ) continue; int apos = sblock.pos(k); int alen = sblock.size(k); int[] aix = sblock.indexes(k); double[] avals = sblock.values(k); int desPosK = destPos + k*PQ; for(int j = apos; j < apos+alen; j++) dest[desPosK+aix[j]] = avals[j]; } } else System.arraycopy(src.denseBlock, 0, dest, destPos, K * PQ); } } /** * This implementation is similar to LoopedIm2ColConv2dAllChan, except for using a * sparse-dense matrix multiplication with t(t(Xi) %*% t(F)) instead of a * dense-sparse matrix multiplication with Xi %*% F. * * NOTE: this implementation assumes that the filter is passed in transposed form * in order to share this temporary matrix (and its creation cost) across threads. */ public static class LoopedIm2ColConv2dTransAllChan extends LoopedIm2ColConv2dAllChan { public LoopedIm2ColConv2dTransAllChan(int rl, int ru, ConvolutionParameters params) { super(rl, ru, params); } @Override public Long call() throws Exception { final int PQ = _params.P*_params.Q, K = _params.K, CRS = _params.C*_params.R*_params.S; MatrixBlock outIm2col = new MatrixBlock(PQ, CRS, false); MatrixBlock outMM = new MatrixBlock(PQ, K, false); Im2colWorker im2ColWorker = Im2colWorker.getWorker( _params.input1, outIm2col, _params, true, true); for(int n = _rl; n < _ru; n++) { // im2col(input) => _im2ColOutBlock im2ColWorker.execute(n); // t(_im2ColOutBlock) %*% t(filter) => t(matMultOutBlock) outMM.reset(outMM.rlen, outMM.clen, false); LibMatrixDNNHelper.singleThreadedMatMult(outIm2col, _params.input2, outMM, false, false, _params); // Copy the matrix matMultOutBlock of shape [K X PQ] to params.output.denseBlock + destPos partialCopyTrans(outMM, _params.output, n*K*PQ, K, PQ); // Add bias to current row if necessary, always dense if(_params.bias != null) LibMatrixDNNHelper.addBias(n, _params.output.getDenseBlock(), _params.bias.getDenseBlock(), K, PQ); } //multi-threaded nnz maintenance of current working set return _params.output.recomputeNonZeros(_rl, _ru-1); } private static void partialCopyTrans(MatrixBlock src, MatrixBlock dest, int destPos, int K, int PQ) { if( src.isEmptyBlock() ) return; //copy src into its destination row w/ piggybacked transpose //src is [PQ x K] -> [K x PQ] -> [1 x KPQ] if(src.isInSparseFormat()) { SparseBlock sblock = src.sparseBlock; double[] c = dest.denseBlock; for(int i = 0; i < src.getNumRows(); i++) { if( sblock.isEmpty(i) ) continue; int apos = sblock.pos(i); int alen = sblock.size(i); int[] aix = sblock.indexes(i); double[] avals = sblock.values(i); int desPosK = destPos + i; for(int j = apos; j < apos+alen; j++) c[desPosK+aix[j]*PQ] = avals[j]; } } else { double[] a = src.denseBlock; double[] c = dest.denseBlock; final int blocksizeIJ = 128; //128KB for L2 //cache-conscious blocked execution for( int bi = 0; bi < PQ; bi+=blocksizeIJ ) for( int bj = 0; bj < K; bj+=blocksizeIJ ) { int bimin = Math.min(bi+blocksizeIJ, PQ); int bjmin = Math.min(bj+blocksizeIJ, K); //core transpose operation for(int i=bi, aix=bi*K+bj, cix=bj*PQ+bi; i<bimin; i++, aix+=K, cix++) LibMatrixReorg.transposeRow(a, c, aix, destPos+cix, PQ, bjmin-bj); } } } } /** * This operator is used only if native is enabled, filter is dense and input is sparse */ public static class SparseNativeConv2d implements Callable<Long> { public int _rl; public int _ru; private final ConvolutionParameters _params; public SparseNativeConv2d(int rl, int ru, ConvolutionParameters params) { _rl = rl; _ru = ru; _params = params; } @Override public Long call() throws Exception { int KPQ = _params.K*_params.P*_params.Q; double[] temp = new double[KPQ]; for(int n = _rl; n < _ru; n++) { if( !_params.input1.getSparseBlock().isEmpty(n) ) { int apos = _params.input1.getSparseBlock().pos(n); int alen = _params.input1.getSparseBlock().size(n); int[] aix = _params.input1.getSparseBlock().indexes(n); double[] avals = _params.input1.getSparseBlock().values(n); NativeHelper.conv2dSparse(apos, alen, aix, avals, _params.input2.getDenseBlock(), temp, 1, _params.C, _params.H, _params.W, _params.K, _params.R, _params.S, _params.stride_h, _params.stride_w, _params.pad_h, _params.pad_w, _params.P, _params.Q, 1); System.arraycopy(temp, 0, _params.output.denseBlock, n*KPQ, KPQ); } } //multi-threaded nnz maintenance of current working set return _params.output.recomputeNonZeros(_rl, _ru-1); } } }
package com.haskforce.utils; import com.google.common.collect.Lists; import com.haskforce.index.HaskellModuleIndex; import com.haskforce.psi.*; import com.intellij.lang.ASTNode; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementResolveResult; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiNamedElement; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * General util class. Provides methods for finding named nodes in the Psi tree. */ public class HaskellUtil { /** * Finds name definition across all Haskell files in the project. All * definitions are found when name is null. */ @NotNull public static List<FoundDefinition> findDefinitionNode(@NotNull Project project, @Nullable String name, @NotNull PsiNamedElement e) { // Guess where the name could be defined by lookup up potential modules. // TODO This removing duplicates, for example importing the same module twice. Fair enough final List<HaskellPsiUtil.Import> potentialModules = getPotentialDefinitionModuleNames(e, HaskellPsiUtil.parseImports(e.getContainingFile())); final Set<String> potentialModuleNames = new HashSet<String>(); for (HaskellPsiUtil.Import i : potentialModules) { potentialModuleNames.add(i.module); } List<FoundDefinition> results = ContainerUtil.newArrayList(); final String qPrefix = getQualifiedPrefix(e); final PsiFile psiFile = e.getContainingFile().getOriginalFile(); if (psiFile instanceof HaskellFile) { List<PsiNamedElement> result = ContainerUtil.newArrayList(); findDefinitionNode((HaskellFile)psiFile, name, e, result); addFoundDefinition(result, null, results); } for (HaskellPsiUtil.Import potentialModule : potentialModules) { List<PsiNamedElement> result = ContainerUtil.newArrayList(); List<HaskellFile> files = HaskellModuleIndex.getFilesByModuleName(project, potentialModule.module, GlobalSearchScope.allScope(project)); for (HaskellFile f : files) { final boolean returnAllReferences = name == null; final boolean inLocalModule = f != null && qPrefix == null && f.equals(psiFile); final boolean inImportedModule = f != null && potentialModuleNames.contains(f.getModuleName()); if (returnAllReferences || inLocalModule || inImportedModule) { findDefinitionNode(f, name, e, result); findDefinitionNodeInExport(project, f, name, e, result); } } addFoundDefinition(result, potentialModule, results); } return results; } /** * Find definitions that have been re-exported. * * <code> * module Foo (module Bar, foo) where * import Bar * import Baz (foo) * </code> */ private static void findDefinitionNodeInExport(@NotNull Project project, HaskellFile f, @Nullable String name, @Nullable PsiNamedElement e, List<PsiNamedElement> result) { List<HaskellPsiUtil.Import> imports = HaskellPsiUtil.parseImports(f); for (HaskellExport export : PsiTreeUtil.findChildrenOfType(f, HaskellExport.class)) { boolean exportFn = export.getQvar() != null && export.getQvar().getQvarid() != null && export.getQvar().getQvarid().getVarid().getName().equals(name); String moduleName = exportFn ? getModule(export.getQvar().getQvarid().getConidList()) : export.getModuletoken() != null && export.getQconid() != null ? export.getQconid().getText() : null; if (!exportFn && moduleName == null) continue; for (HaskellPsiUtil.Import imprt : imports) { if (moduleName != null && !moduleName.equals(imprt.module) && !moduleName.equals(imprt.alias)) continue; boolean hidden = imprt.getHidingNames() != null && ArrayUtil.contains(name, imprt.getHidingNames()); boolean notImported = imprt.getImportedNames() != null && !ArrayUtil.contains(name, imprt.getImportedNames()); if (hidden || notImported) continue; for (HaskellFile f2 : HaskellModuleIndex.getFilesByModuleName(project, imprt.module, GlobalSearchScope.allScope(project))) { findDefinitionNode(f2, name, e, result); findDefinitionNodeInExport(project, f2, name, e, result); } } } } /** * Finds a name definition inside a Haskell file. All definitions are found when name * is null. */ public static void findDefinitionNode(@Nullable HaskellFile file, @Nullable String name, @Nullable PsiNamedElement e, @NotNull List<PsiNamedElement> result) { if (file == null) return; // We only want to look for classes that match the element we are resolving (e.g. varid, conid, etc.) final Class<? extends PsiNamedElement> elementClass; if (e instanceof HaskellVarid) { elementClass = HaskellVarid.class; } else if (e instanceof HaskellConid) { elementClass = HaskellConid.class; } else { elementClass = PsiNamedElement.class; } final boolean isType = PsiTreeUtil.getParentOfType(e, HaskellGendecl.class) != null; Collection<PsiNamedElement> namedElements = PsiTreeUtil.findChildrenOfType(file, elementClass); for (PsiNamedElement namedElement : namedElements) { if ((name == null || name.equals(namedElement.getName())) && definitionNode(namedElement)) { result.add(namedElement); } else if (isType && name != null && name.equals(namedElement.getName()) && typeNode(name, namedElement)) { result.add(namedElement); } } } private static boolean typeNode(@NotNull String name, @NotNull PsiNamedElement e) { HaskellDatadecl datadecl = PsiTreeUtil.getParentOfType(e, HaskellDatadecl.class); if (datadecl != null) { return datadecl.getTypeeList().get(0).getAtypeList().get(0).getText().equals(name); } HaskellNewtypedecl newtypedecl = PsiTreeUtil.getParentOfType(e, HaskellNewtypedecl.class); if (newtypedecl != null && newtypedecl.getTycon() != null) { return name.equals(newtypedecl.getTycon().getConid().getName()); } HaskellTypedecl typedecl = PsiTreeUtil.getParentOfType(e, HaskellTypedecl.class); if (typedecl != null) { return name.equals(typedecl.getTypeeList().get(0).getAtypeList().get(0).getText()); } HaskellClassdecl classdecl = PsiTreeUtil.getParentOfType(e, HaskellClassdecl.class); if (classdecl != null && classdecl.getCtype() != null) { HaskellCtype ctype = classdecl.getCtype(); while (ctype.getCtype() != null) { ctype = ctype.getCtype(); } if (ctype.getTypee() == null) return false; HaskellAtype haskellAtype = ctype.getTypee().getAtypeList().get(0); return haskellAtype.getOqtycon() != null && haskellAtype.getOqtycon().getQtycon() != null && name.equals(haskellAtype.getOqtycon().getQtycon().getTycon().getConid().getName()); } return false; } /** * Finds a name definition inside a Haskell file. All definitions are found when name * is null. */ @NotNull public static List<PsiNamedElement> findDefinitionNodes(@Nullable HaskellFile haskellFile, @Nullable String name) { List<PsiNamedElement> ret = ContainerUtil.newArrayList(); findDefinitionNode(haskellFile, name, null, ret); return ret; } /** * Finds name definitions that are within the scope of a file, including imports (to some degree). */ @NotNull public static List<PsiNamedElement> findDefinitionNodes(@NotNull HaskellFile psiFile) { return findDefinitionNodes(psiFile, null); } /** * Tells whether a named node is a definition node based on its context. * * Precondition: Element is in a Haskell file. */ public static boolean definitionNode(@NotNull PsiNamedElement e) { if (e instanceof HaskellVarid) return definitionNode((HaskellVarid)e); if (e instanceof HaskellConid) return definitionNode((HaskellConid)e); return false; } public static boolean definitionNode(@NotNull HaskellConid e) { final HaskellConstr constr = PsiTreeUtil.getParentOfType(e, HaskellConstr.class); final HaskellCon con; if (constr != null) { con = constr.getCon(); } else { final HaskellNewconstr newconstr = PsiTreeUtil.getParentOfType(e, HaskellNewconstr.class); con = newconstr == null ? null : newconstr.getCon(); } final HaskellConid conid = con == null ? null : con.getConid(); return e.equals(conid); } public static boolean definitionNode(@NotNull HaskellVarid e) { final PsiElement parent = e.getParent(); if (parent == null) return false; // If we are in a variable declaration (which has a type signature), return true. if (HaskellPsiUtil.isType(parent, HaskellTypes.VARS)) return true; // Now we have to figure out if the current varid, e, is the first top-level declaration in the file. // Check each top-level declaration. When we find the first one that matches our element's name we'll return // true if the elements are equal, false otherwise. final String name = e.getName(); final PsiFile file = e.getContainingFile(); if (!(file instanceof HaskellFile)) return false; final HaskellBody body = ((HaskellFile)file).getBody(); if (body == null) return false; for (PsiElement child : body.getChildren()) { // If we hit a declaration with a type signature, this shouldn't match our element's name. if (child instanceof HaskellGendecl) { final HaskellVars vars = ((HaskellGendecl)child).getVars(); if (vars == null) continue; // If it matches our elements name, return false. for (HaskellVarid varid : vars.getVaridList()) { if (name.equals(varid.getName())) return false; } } else if (child instanceof HaskellFunorpatdecl) { final HaskellFunorpatdecl f = (HaskellFunorpatdecl)child; final HaskellVarop varop = f.getVarop(); // Check if the function is defined as infix. if (varop != null) { final HaskellVarid varid = varop.getVarid(); if (varid != null && name.equals(varid.getName())) { return e.equals(varid); } } else { // If there is a pat in the declaration then there should only be one since the only case of having // more than one is when using a varop, which was already accounted for above. List<HaskellPat> pats = f.getPatList(); if (pats.size() == 1 && pats.get(0).getVaridList().contains(e)) return true; // There can be multiple varids in a declaration, so we'll need to grab the first one. List<HaskellVarid> varids = f.getVaridList(); if (varids.size() > 0) { final HaskellVarid varid = varids.get(0); if (name.equals(varid.getName())) { return e.equals(varid); } } } } } return false; } /** * Tells whether a node is a definition node based on its context. */ public static boolean definitionNode(@NotNull ASTNode node) { final PsiElement element = node.getPsi(); return element instanceof PsiNamedElement && definitionNode((PsiNamedElement)element); } @Nullable public static String getQualifiedPrefix(@NotNull PsiElement e) { final PsiElement q = PsiTreeUtil.getParentOfType(e, HaskellQcon.class, HaskellQvar.class); if (q == null) { return null; } final String qText = q.getText(); final int lastDotPos = qText.lastIndexOf('.'); if (lastDotPos == -1) { return null; } return qText.substring(0, lastDotPos); } @NotNull public static List<HaskellPsiUtil.Import> getPotentialDefinitionModuleNames(@NotNull PsiElement e, @NotNull List<HaskellPsiUtil.Import> imports) { final String qPrefix = getQualifiedPrefix(e); if (qPrefix == null) { return imports; } List<HaskellPsiUtil.Import> result = new ArrayList<HaskellPsiUtil.Import>(2); for (HaskellPsiUtil.Import anImport : imports) { if (qPrefix.equals(anImport.module) || qPrefix.equals(anImport.alias)) { result.add(anImport); } } return result; } public static @Nullable PsiElement lookForFunOrPatDeclWithCorrectName( @NotNull PsiElement element, @NotNull String matcher){ /** * A FunOrPatDecl with as parent haskellbody is one of the 'leftmost' function declarations. * Those should not be taken into account, the definition will already be found from the stub. * It will cause problems if we also start taking those into account over here. */ if (element instanceof HaskellFunorpatdecl && ! (element.getParent() instanceof HaskellBody)) { PsiElement[] children = element.getChildren(); for (PsiElement child : children) { if (child instanceof HaskellVarid) { PsiElement psiElement = checkForMatchingVariable(child,matcher); if (psiElement != null){ return psiElement; } } if (child instanceof HaskellPat){ HaskellPat pat = (HaskellPat)child; List<HaskellVarid> varIds = extractAllHaskellVarids(pat); for (HaskellVarid varId : varIds) { if (varId.getName().matches(matcher)){ return varId; }; } } } } return null; } public static List<HaskellVarid> extractAllHaskellVarids(HaskellPat pat) { List<HaskellVarid> varidList = pat.getVaridList(); List<HaskellPat> patList = pat.getPatList(); for (HaskellPat haskellPat : patList) { varidList.addAll(haskellPat.getVaridList()); } return varidList; } private static PsiElement checkForMatchingVariable(PsiElement child, String matcher) { HaskellVarid haskellVarid = (HaskellVarid) child; if (haskellVarid.getName().matches(matcher)) { return child; } else { return null; } } public static boolean isInsideBody(@NotNull PsiElement position) { HaskellGendecl haskellGendecl = PsiTreeUtil.getParentOfType(position, HaskellGendecl.class); return haskellGendecl != null; } public static @NotNull List<PsiElement> matchWhereClausesInScope( @NotNull PsiNamedElement myElement, String name) { return checkWhereClausesInScopeForVariableDeclaration(myElement, name); } public static @NotNull List<PsiElement> getAllDefinitionsInWhereClausesInScope( @NotNull PsiElement myElement) { return checkWhereClausesInScopeForVariableDeclaration(myElement, ".+"); } private static @NotNull List<PsiElement> checkWhereClausesInScopeForVariableDeclaration( @NotNull PsiElement myElement, String matcher) { List<PsiElement> results = Lists.newArrayList(); PsiElement parent = myElement.getParent(); do { if (parent instanceof HaskellRhs) { HaskellRhs rhs = (HaskellRhs) parent; PsiElement where = rhs.getWhere(); if (where == null) { parent = parent.getParent(); continue; } else { PsiElement psiElement = checkWhereClause(where, matcher); if (psiElement != null) { results.add(psiElement); } } } parent = parent.getParent(); } while (! (parent instanceof HaskellBody) && ! (parent == null)); return results; } private static @Nullable PsiElement checkWhereClause(@NotNull PsiElement where, String matcher) { PsiElement nextSibling = where.getNextSibling(); while(nextSibling != null){ if(nextSibling instanceof HaskellFunorpatdecl) { PsiElement psiElement = HaskellUtil.lookForFunOrPatDeclWithCorrectName(nextSibling, matcher); if (psiElement != null){ return psiElement; } } nextSibling = nextSibling.getNextSibling(); } return null; } public static @NotNull List<PsiElement> matchLocalDefinitionsInScope(PsiElement element, String name){ return checkLocalDefinitionsForVariableDeclarations(element,name); } public static @NotNull List<PsiElement> getAllDefinitionsInScope(PsiElement element){ return checkLocalDefinitionsForVariableDeclarations(element,".+"); } private static @NotNull List<PsiElement> checkLocalDefinitionsForVariableDeclarations(PsiElement element, String matcher){ List<PsiElement> results = Lists.newArrayList(); PsiElement parent = element; do { /** * This whole function needs to be re-evaluated, it's getting too much if,if,if. The logic * is getting extremely unclear. There should be tests for all (identified) cases so the refactor * should be feasible. */ if (parent instanceof HaskellNewtypedecl){ HaskellNewtypedecl haskellNewtypedecl = (HaskellNewtypedecl) parent; List<HaskellTyvar> tyvarList = haskellNewtypedecl.getTyvarList(); for (HaskellTyvar haskellTyvar : tyvarList) { HaskellVarid varId = haskellTyvar.getVarid(); if (varId.getName().matches(matcher)){ results.add(varId); } } } PsiElement prevSibling = parent.getPrevSibling(); while (prevSibling != null) { PsiElement possibleMatch = HaskellUtil.lookForFunOrPatDeclWithCorrectName(prevSibling, matcher); if (possibleMatch != null) { results.add(possibleMatch); } if (prevSibling instanceof HaskellPat && parent instanceof HaskellExp) { List<HaskellVarid> varIds = HaskellUtil.extractAllHaskellVarids((HaskellPat) prevSibling); for (HaskellVarid varId : varIds) { if (varId.getName().matches(matcher)) { results.add(varId); } } } if (prevSibling instanceof HaskellVarid){ HaskellVarid varId = (HaskellVarid) prevSibling; if (varId.getName().matches(matcher)){ results.add(varId); } } prevSibling = prevSibling.getPrevSibling(); } parent = parent.getParent(); } while(! (parent instanceof PsiFile)); return results; } public static List<PsiElement> matchGlobalNamesUnqualified(List<FoundDefinition> namedElements) { List<PsiElement> results = Lists.newArrayList(); for (FoundDefinition possibleReferences : namedElements) { if (possibleReferences.imprt == null || !possibleReferences.imprt.isQualified) { //noinspection ObjectAllocationInLoop results.add(possibleReferences.element); } } return results; } public static List<PsiElementResolveResult> matchGlobalNamesQualified( List<FoundDefinition> namedElements, String qualifiedCallName){ List<PsiElementResolveResult> results = Lists.newArrayList(); for (FoundDefinition possibleReference : namedElements) { if(possibleReference.imprt != null && possibleReference.imprt.alias != null && possibleReference.imprt.alias.equals(qualifiedCallName)){ results.add(new PsiElementResolveResult(possibleReference.element)); } } return results; } public static @NotNull String getModuleName(@NotNull PsiElement element) { HaskellFile containingFile = (HaskellFile)element.getContainingFile(); if (containingFile == null){ return ""; } String moduleName = containingFile.getModuleName(); if(moduleName != null){ return moduleName; } else { return ""; } } private static void addFoundDefinition(List<PsiNamedElement> result, HaskellPsiUtil.Import imprt, List<FoundDefinition> results) { for (PsiNamedElement element : result) { results.add(new FoundDefinition(element, imprt)); } } /** * Returns the textual representation of a qualified module. * * eg. From {@code A.B.C.d} return {@code A.B.C} */ @Nullable private static String getModule(@NotNull List<HaskellConid> conids) { if (conids.isEmpty()) return null; StringBuilder b = new StringBuilder(); for (HaskellConid cid : conids) { b.append(cid.getName()); b.append("."); } b.setLength(b.length() - 1); return b.toString(); } public static class FoundDefinition { @NotNull public PsiNamedElement element; @Nullable public HaskellPsiUtil.Import imprt; public FoundDefinition(@NotNull PsiNamedElement element, @Nullable HaskellPsiUtil.Import imprt) { this.element = element; this.imprt = imprt; } } }
/** * Copyright (c) 2012, University of Konstanz, Distributed Systems Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of Konstanz nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //Cleversafe open-source code header - Version 1.1 - December 1, 2006 // //Cleversafe Dispersed Storage(TM) is software for secure, private and //reliable storage of the world's data using information dispersal. // //Copyright (C) 2005-2007 Cleversafe, Inc. // //This program is free software; you can redistribute it and/or //modify it under the terms of the GNU General Public License //as published by the Free Software Foundation; either version 2 //of the License, or (at your option) any later version. // //This program is distributed in the hope that it will be useful, //but WITHOUT ANY WARRANTY; without even the implied warranty of //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //GNU General Public License for more details. // //You should have received a copy of the GNU General Public License //along with this program; if not, write to the Free Software //Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, //USA. // //Contact Information: // Cleversafe, 10 W. 35th Street, 16th Floor #84, // Chicago IL 60616 // email: licensing@cleversafe.org // //END-OF-HEADER //----------------------- //@author: John Quigley <jquigley@cleversafe.com> //@date: January 1, 2008 //--------------------- package org.jscsi.scsi.protocol.mode; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.DataInputStream; import java.io.DataOutputStream; import java.nio.BufferUnderflowException; import java.util.Collection; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class ModePageRegistryTest { private static class TestModePage extends ModePage { public TestModePage(byte pageCode, int subPageCode, int pageLength) { super(pageCode, subPageCode, pageLength); } public TestModePage(byte pageCode, int pageLength) { super(pageCode, pageLength); } @Override protected void decodeModeParameters(int dataLength, DataInputStream inputStream) throws BufferUnderflowException, IllegalArgumentException { // does nothing } @Override protected void encodeModeParameters(DataOutputStream output) { // does nothing } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final TestModePage other = (TestModePage) obj; if (this.getPageCode() != other.getPageCode()) return false; if (this.getSubPageCode() != other.getSubPageCode()) return false; return true; } @Override public String toString() { return "<TestModePage (" + this.getPageCode() + "," + this.getSubPageCode() + ")>"; } } @SuppressWarnings("unchecked") private static class TestModePageRegistry extends ModePageRegistry { @Override protected void populateModePages() { // does nothing } } @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } private static void register(ModePageRegistry registry, byte pageCode, int subPageCode) { registry.register(pageCode, subPageCode, new TestModePage(pageCode, subPageCode, 0)); } private static void check(Collection<ModePage> pages, byte pageCode, int subPageCode) { if (!pages.contains(new TestModePage(pageCode, subPageCode, 0))) { fail("Returned mode page list did not contain page: (" + pageCode + "," + subPageCode + ")"); } } private static ModePageRegistry getFixedRegistry() { /* * Contents * ---------------- * * 0x00, 0x00 * 0x01, 0x00 * 0x01, 0x01 * 0x02, 0x02 * 0x03, 0x00 * 0x04, 0x00 * 0x04, 0x01 * 0x05, 0x00 * */ ModePageRegistry registry = new TestModePageRegistry(); register(registry, (byte) 0x00, 0x00); register(registry, (byte) 0x01, 0x00); register(registry, (byte) 0x01, 0x01); register(registry, (byte) 0x02, 0x02); register(registry, (byte) 0x03, 0x00); register(registry, (byte) 0x04, 0x00); register(registry, (byte) 0x04, 0x01); register(registry, (byte) 0x05, 0x00); return registry; } @Test public void testContainsByte() { /* * Test with fixed test registry. * * Input Output * ---------------- -------------------- * 0x00 true * 0x01 true * 0x02 true * 0x03 true * 0x04 true * 0x05 true * 0x06 false * 0x07 false * * */ ModePageRegistry registry = getFixedRegistry(); assertEquals("Content query failed", true, registry.contains((byte) 0x00)); assertEquals("Content query failed", true, registry.contains((byte) 0x01)); assertEquals("Content query failed", true, registry.contains((byte) 0x02)); assertEquals("Content query failed", true, registry.contains((byte) 0x03)); assertEquals("Content query failed", true, registry.contains((byte) 0x04)); assertEquals("Content query failed", true, registry.contains((byte) 0x05)); assertEquals("Content query failed", false, registry.contains((byte) 0x06)); assertEquals("Content query failed", false, registry.contains((byte) 0x07)); } @Test public void testContainsByteInt() { /* * Test with fixed test registry. * * Input Output * ---------------- -------------------- * 0x00, 0x00 true * 0x00, 0x01 false * 0x01, 0x00 true * 0x01, 0x01 true * 0x01, 0x02 false * 0x02, 0x00 false * 0x02, 0x02 true * 0x03, 0x00 true * 0x04, 0x00 true * 0x04, 0x01 true * 0x05, 0x00 true * 0x06, 0x00 false * 0x06, 0x01 false */ ModePageRegistry registry = getFixedRegistry(); assertEquals("Content query failed", true, registry.contains((byte) 0x00, 0x00)); assertEquals("Content query failed", false, registry.contains((byte) 0x00, 0x01)); assertEquals("Content query failed", true, registry.contains((byte) 0x01, 0x00)); assertEquals("Content query failed", true, registry.contains((byte) 0x01, 0x01)); assertEquals("Content query failed", false, registry.contains((byte) 0x01, 0x02)); assertEquals("Content query failed", false, registry.contains((byte) 0x02, 0x00)); assertEquals("Content query failed", true, registry.contains((byte) 0x02, 0x02)); assertEquals("Content query failed", true, registry.contains((byte) 0x03, 0x00)); assertEquals("Content query failed", true, registry.contains((byte) 0x04, 0x00)); assertEquals("Content query failed", true, registry.contains((byte) 0x04, 0x01)); assertEquals("Content query failed", true, registry.contains((byte) 0x05, 0x00)); assertEquals("Content query failed", false, registry.contains((byte) 0x06, 0x00)); assertEquals("Content query failed", false, registry.contains((byte) 0x06, 0x01)); } @Test public void testGetBoolean() { /* * Test with fixed test registry. * * Input Output * ---------------- -------------------- * true [(0,0), (1,0), (1,1), (2,2), (3,0), (4,0), (4,1), (5,0)] * false [(0,0), (1,0), (3,0), (4,0), (5,0)] */ ModePageRegistry registry = getFixedRegistry(); Collection<ModePage> pages = registry.get(true); System.out.println(pages); assertEquals("Too many returned pages", 8, pages.size()); check(pages, (byte) 0, 0); check(pages, (byte) 1, 0); check(pages, (byte) 1, 1); check(pages, (byte) 2, 2); check(pages, (byte) 3, 0); check(pages, (byte) 4, 0); check(pages, (byte) 4, 1); check(pages, (byte) 5, 0); pages = registry.get(false); assertEquals("Too many returned pages", 5, pages.size()); check(pages, (byte) 0, 0); check(pages, (byte) 1, 0); check(pages, (byte) 3, 0); check(pages, (byte) 4, 0); check(pages, (byte) 5, 0); } @Test public void testGetByte() { /* * Test with fixed test registry. * * Input Output * ---------------- -------------------- * 0x00 [(0,0)] * 0x01 [(1,0), (1,1)] * 0x02 [(2,2)] * 0x03 [(3,0)] * 0x04 [(4,0), (4,1)] * 0x05 [(5,0)] * 0x06 [] */ ModePageRegistry registry = getFixedRegistry(); Collection<ModePage> pages = registry.get((byte) 0x00); assertEquals("Too many returned pages", 1, pages.size()); check(pages, (byte) 0, 0); pages = registry.get((byte) 0x01); assertEquals("Too many returned pages", 2, pages.size()); check(pages, (byte) 1, 0); check(pages, (byte) 1, 1); pages = registry.get((byte) 0x02); assertEquals("Too many returned pages", 1, pages.size()); check(pages, (byte) 2, 2); pages = registry.get((byte) 0x03); assertEquals("Too many returned pages", 1, pages.size()); check(pages, (byte) 3, 0); pages = registry.get((byte) 0x04); assertEquals("Too many returned pages", 2, pages.size()); check(pages, (byte) 4, 0); check(pages, (byte) 4, 1); pages = registry.get((byte) 0x01); assertEquals("Too many returned pages", 2, pages.size()); check(pages, (byte) 1, 0); check(pages, (byte) 1, 1); pages = registry.get((byte) 0x05); assertEquals("Too many returned pages", 1, pages.size()); check(pages, (byte) 5, 0); pages = registry.get((byte) 0x06); assertEquals("Too many returned pages", null, pages); } @Test public void testGetByteInt() { /* * Test with fixed test registry. * * Input Output * ---------------- -------------------- * 0x00, 0x00 <obj> * 0x00, 0x01 null * 0x01, 0x00 <obj> * 0x01, 0x01 <obj> * 0x01, 0x02 null * 0x02, 0x00 null * 0x02, 0x02 <obj> * 0x03, 0x00 <obj> * 0x04, 0x00 <obj> * 0x04, 0x01 <obj> * 0x05, 0x00 <obj> * 0x06, 0x00 null * 0x06, 0x01 null */ ModePageRegistry registry = getFixedRegistry(); assertTrue("Returned invalid mode page", registry.get((byte) 0x00, 0x00) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x00, 0x01) == null); assertTrue("Returned invalid mode page", registry.get((byte) 0x01, 0x00) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x01, 0x01) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x01, 0x02) == null); assertTrue("Returned invalid mode page", registry.get((byte) 0x02, 0x00) == null); assertTrue("Returned invalid mode page", registry.get((byte) 0x02, 0x02) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x03, 0x00) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x04, 0x00) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x04, 0x01) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x05, 0x00) != null); assertTrue("Returned invalid mode page", registry.get((byte) 0x06, 0x00) == null); assertTrue("Returned invalid mode page", registry.get((byte) 0x06, 0x01) == null); } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.main; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.ExtendedPropertyConfigurerGetter; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.spi.ConfigurerStrategy; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.main.RestConfigurationProperties; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class RestConfigurationPropertiesConfigurer extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.main.RestConfigurationProperties target = (org.apache.camel.main.RestConfigurationProperties) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "apicomponent": case "ApiComponent": target.setApiComponent(property(camelContext, java.lang.String.class, value)); return true; case "apicontextpath": case "ApiContextPath": target.setApiContextPath(property(camelContext, java.lang.String.class, value)); return true; case "apihost": case "ApiHost": target.setApiHost(property(camelContext, java.lang.String.class, value)); return true; case "apiproperties": case "ApiProperties": target.setApiProperties(property(camelContext, java.util.Map.class, value)); return true; case "apivendorextension": case "ApiVendorExtension": target.setApiVendorExtension(property(camelContext, boolean.class, value)); return true; case "bindingmode": case "BindingMode": target.setBindingMode(property(camelContext, java.lang.String.class, value)); return true; case "clientrequestvalidation": case "ClientRequestValidation": target.setClientRequestValidation(property(camelContext, boolean.class, value)); return true; case "component": case "Component": target.setComponent(property(camelContext, java.lang.String.class, value)); return true; case "componentproperties": case "ComponentProperties": target.setComponentProperties(property(camelContext, java.util.Map.class, value)); return true; case "consumerproperties": case "ConsumerProperties": target.setConsumerProperties(property(camelContext, java.util.Map.class, value)); return true; case "contextpath": case "ContextPath": target.setContextPath(property(camelContext, java.lang.String.class, value)); return true; case "corsheaders": case "CorsHeaders": target.setCorsHeaders(property(camelContext, java.util.Map.class, value)); return true; case "dataformatproperties": case "DataFormatProperties": target.setDataFormatProperties(property(camelContext, java.util.Map.class, value)); return true; case "enablecors": case "EnableCORS": target.setEnableCORS(property(camelContext, boolean.class, value)); return true; case "endpointproperties": case "EndpointProperties": target.setEndpointProperties(property(camelContext, java.util.Map.class, value)); return true; case "host": case "Host": target.setHost(property(camelContext, java.lang.String.class, value)); return true; case "hostnameresolver": case "HostNameResolver": target.setHostNameResolver(property(camelContext, java.lang.String.class, value)); return true; case "jsondataformat": case "JsonDataFormat": target.setJsonDataFormat(property(camelContext, java.lang.String.class, value)); return true; case "port": case "Port": target.setPort(property(camelContext, int.class, value)); return true; case "producerapidoc": case "ProducerApiDoc": target.setProducerApiDoc(property(camelContext, java.lang.String.class, value)); return true; case "producercomponent": case "ProducerComponent": target.setProducerComponent(property(camelContext, java.lang.String.class, value)); return true; case "scheme": case "Scheme": target.setScheme(property(camelContext, java.lang.String.class, value)); return true; case "skipbindingonerrorcode": case "SkipBindingOnErrorCode": target.setSkipBindingOnErrorCode(property(camelContext, boolean.class, value)); return true; case "usexforwardheaders": case "UseXForwardHeaders": target.setUseXForwardHeaders(property(camelContext, boolean.class, value)); return true; case "xmldataformat": case "XmlDataFormat": target.setXmlDataFormat(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "apicomponent": case "ApiComponent": return java.lang.String.class; case "apicontextpath": case "ApiContextPath": return java.lang.String.class; case "apihost": case "ApiHost": return java.lang.String.class; case "apiproperties": case "ApiProperties": return java.util.Map.class; case "apivendorextension": case "ApiVendorExtension": return boolean.class; case "bindingmode": case "BindingMode": return java.lang.String.class; case "clientrequestvalidation": case "ClientRequestValidation": return boolean.class; case "component": case "Component": return java.lang.String.class; case "componentproperties": case "ComponentProperties": return java.util.Map.class; case "consumerproperties": case "ConsumerProperties": return java.util.Map.class; case "contextpath": case "ContextPath": return java.lang.String.class; case "corsheaders": case "CorsHeaders": return java.util.Map.class; case "dataformatproperties": case "DataFormatProperties": return java.util.Map.class; case "enablecors": case "EnableCORS": return boolean.class; case "endpointproperties": case "EndpointProperties": return java.util.Map.class; case "host": case "Host": return java.lang.String.class; case "hostnameresolver": case "HostNameResolver": return java.lang.String.class; case "jsondataformat": case "JsonDataFormat": return java.lang.String.class; case "port": case "Port": return int.class; case "producerapidoc": case "ProducerApiDoc": return java.lang.String.class; case "producercomponent": case "ProducerComponent": return java.lang.String.class; case "scheme": case "Scheme": return java.lang.String.class; case "skipbindingonerrorcode": case "SkipBindingOnErrorCode": return boolean.class; case "usexforwardheaders": case "UseXForwardHeaders": return boolean.class; case "xmldataformat": case "XmlDataFormat": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.main.RestConfigurationProperties target = (org.apache.camel.main.RestConfigurationProperties) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "apicomponent": case "ApiComponent": return target.getApiComponent(); case "apicontextpath": case "ApiContextPath": return target.getApiContextPath(); case "apihost": case "ApiHost": return target.getApiHost(); case "apiproperties": case "ApiProperties": return target.getApiProperties(); case "apivendorextension": case "ApiVendorExtension": return target.isApiVendorExtension(); case "bindingmode": case "BindingMode": return target.getBindingMode(); case "clientrequestvalidation": case "ClientRequestValidation": return target.isClientRequestValidation(); case "component": case "Component": return target.getComponent(); case "componentproperties": case "ComponentProperties": return target.getComponentProperties(); case "consumerproperties": case "ConsumerProperties": return target.getConsumerProperties(); case "contextpath": case "ContextPath": return target.getContextPath(); case "corsheaders": case "CorsHeaders": return target.getCorsHeaders(); case "dataformatproperties": case "DataFormatProperties": return target.getDataFormatProperties(); case "enablecors": case "EnableCORS": return target.isEnableCORS(); case "endpointproperties": case "EndpointProperties": return target.getEndpointProperties(); case "host": case "Host": return target.getHost(); case "hostnameresolver": case "HostNameResolver": return target.getHostNameResolver(); case "jsondataformat": case "JsonDataFormat": return target.getJsonDataFormat(); case "port": case "Port": return target.getPort(); case "producerapidoc": case "ProducerApiDoc": return target.getProducerApiDoc(); case "producercomponent": case "ProducerComponent": return target.getProducerComponent(); case "scheme": case "Scheme": return target.getScheme(); case "skipbindingonerrorcode": case "SkipBindingOnErrorCode": return target.isSkipBindingOnErrorCode(); case "usexforwardheaders": case "UseXForwardHeaders": return target.isUseXForwardHeaders(); case "xmldataformat": case "XmlDataFormat": return target.getXmlDataFormat(); default: return null; } } @Override public Object getCollectionValueType(Object target, String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "apiproperties": case "ApiProperties": return java.lang.Object.class; case "componentproperties": case "ComponentProperties": return java.lang.Object.class; case "consumerproperties": case "ConsumerProperties": return java.lang.Object.class; case "corsheaders": case "CorsHeaders": return java.lang.String.class; case "dataformatproperties": case "DataFormatProperties": return java.lang.Object.class; case "endpointproperties": case "EndpointProperties": return java.lang.Object.class; default: return null; } } }
/* * Jigasi, the JItsi GAteway to SIP. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.jigasi; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.event.*; import net.java.sip.communicator.util.Logger; import org.jitsi.util.*; import org.jivesoftware.smack.packet.*; import java.text.*; /** * Class represents gateway session which manages single SIP call instance * (outgoing or incoming). * * @author Pawel Domas */ public class GatewaySession implements OperationSetJitsiMeetTools.JitsiMeetRequestListener, DTMFListener { /** * The logger. */ private final static Logger logger = Logger.getLogger(GatewaySession.class); /** * The <tt>SipGateway</tt> that manages this session. */ private SipGateway sipGateway; /** * The {@link OperationSetJitsiMeetTools} for SIP leg. */ private final OperationSetJitsiMeetTools jitsiMeetTools; /** * The <tt>JvbConference</tt> that handles current JVB conference. */ private JvbConference jvbConference; /** * The SIP call instance if any SIP call is active. */ private Call call; /** * Stores JVB call instance that will be merged into single conference with * SIP call. */ private Call jvbConferenceCall; /** * Object listens for SIP call state changes. */ private final SipCallStateListener callStateListener = new SipCallStateListener(); /** * Peers state listener that publishes peer state in MUC presence status. */ private CallPeerListener peerStateListener; /** * IF we work in outgoing connection mode then this field contains the SIP * number to dial. */ private String destination; /** * The call resource assigned by {@link CallsControl} for the current call. */ private String callResource; /** * SIP protocol provider instance. */ private ProtocolProviderService sipProvider; /** * FIXME: to be removed ? */ private final Object waitLock = new Object(); /** * FIXME: JVB room name property is not available at the moment when call * is created, because header is not parsed yet */ private WaitForJvbRoomNameThread waitThread; /** * Gateway session listener. */ private GatewaySessionListener listener; /** * Creates new <tt>GatewaySession</tt> for given <tt>callResource</tt> * and <tt>sipCall</tt>. We already have SIP call instance, so this session * can be considered "incoming" SIP session(was created after incoming call * had been received). * * @param gateway the <tt>SipGateway</tt> instance that will control this * session. * @param callResource the call resource/URI that identifies this session. * @param sipCall the incoming SIP call instance which will be handled by * this session. */ public GatewaySession(SipGateway gateway, String callResource, Call sipCall) { this(gateway); this.callResource = callResource; this.call = sipCall; } /** * Creates new <tt>GatewaySession</tt> that can be used to initiate outgoing * SIP gateway session by using * {@link #createOutgoingCall(String, String, String, String)} method. * * @param gateway the {@link SipGateway} the <tt>SipGateway</tt> instance * that will control this session. */ public GatewaySession(SipGateway gateway) { this.sipGateway = gateway; this.sipProvider = gateway.getSipProvider(); this.jitsiMeetTools = sipProvider.getOperationSet( OperationSetJitsiMeetTools.class); } private void allCallsEnded() { String resource = callResource; destination = null; callResource = null; sipGateway.notifyCallEnded(resource); } private void cancelWaitThread() { if (waitThread != null) { waitThread.cancel(); } } /** * Starts new outgoing session by dialing given SIP number and joining JVB * conference held in given MUC room. * @param destination the destination SIP number that will be called. * @param jvbRoomName the name of MUC that holds JVB conference that will be * joined. * @param roomPass optional password required to enter MUC room. * @param callResource the call resource that will identify new call. */ public void createOutgoingCall( String destination, String jvbRoomName, String roomPass, String callResource) { if (jvbConference != null) { throw new IllegalStateException("Conference in progress"); } if (call != null) { throw new IllegalStateException("SIP call in progress"); } this.destination = destination; this.callResource = callResource; jvbConference = new JvbConference(this, jvbRoomName, roomPass); jvbConference.start(); } /** * Returns the call resource/URI for currently active call. * @return the call resource/URI for currently active call. */ public String getCallResource() { return callResource; } /** * Returns the <tt>CallsControl</tt> that manages this instance. * @return the <tt>CallsControl</tt> that manages this instance. */ public CallsControl getCallsControl() { return sipGateway.getCallsControl(); } /** * Returns the name of the chat room that holds current JVB conference or * <tt>null</tt> we're not in any room. * * @return the name of the chat room that holds current JVB conference or * <tt>null</tt> we're not in any room. */ public String getJvbRoomName() { return jvbConference != null ? jvbConference.getRoomName() : null; } /** * Returns <tt>ChatRoom</tt> that hosts JVB conference of this session * if we're already/still in this room or <tt>null</tt> otherwise. */ public ChatRoom getJvbChatRoom() { return jvbConference != null ? jvbConference.getJvbRoom() : null; } /** * Returns SIP destination address for outgoing SIP call. * @return SIP destination address for outgoing SIP call. */ public String getDestination() { return destination; } /** * Returns the instance of SIP call if any is currently in progress. * @return the instance of SIP call if any is currently in progress. */ public Call getSipCall() { return call; } /** * Returns name of the XMPP server that hosts JVB conference room. */ public String getXmppServerName() { return sipGateway.getXmppServerName(); } public void hangUp() { hangUp(-1, null); } /** * Cancels current session. */ public void hangUp(int reasonCode, String reason) { cancelWaitThread(); if (jvbConference != null) { jvbConference.stop(); } else if (call != null) { if (reasonCode != -1) CallManager.hangupCall(call, reasonCode, reason); else CallManager.hangupCall(call); } } private void joinJvbConference(String conferenceRoomName, String password) { cancelWaitThread(); jvbConference = new JvbConference(this, conferenceRoomName, password); jvbConference.start(); } /*private void joinSipWithJvbCalls() { List<Call> calls = new ArrayList<Call>(); calls.add(call); calls.add(jvbConferenceCall); CallManager.mergeExistingCalls( jvbConferenceCall.getConference(), calls); sendPresenceExtension( createPresenceExtension( SipGatewayExtension.STATE_IN_PROGRESS, null)); jvbConference.setPresenceStatus( SipGatewayExtension.STATE_IN_PROGRESS); }*/ void onConferenceCallInvited(Call incomingCall) { // Incoming SIP connection mode sets common conference here if (destination == null) { call.setConference(incomingCall.getConference()); } } /** * Method called by <tt>JvbConference</tt> to notify that JVB conference * call has started. * @param jvbConferenceCall JVB call instance. * @return any <tt>Exception</tt> that might occurred during handling of the * event. FIXME: is this still needed ? */ Exception onConferenceCallStarted(Call jvbConferenceCall) { this.jvbConferenceCall = jvbConferenceCall; if (destination == null) { CallManager.acceptCall(call); } else { //sendPresenceExtension( // createPresenceExtension( // SipGatewayExtension.STATE_RINGING, null)); //if (jvbConference != null) //{ // jvbConference.setPresenceStatus( // SipGatewayExtension.STATE_RINGING); //} // Make an outgoing call OperationSetBasicTelephony tele = sipProvider.getOperationSet( OperationSetBasicTelephony.class); try { this.call = tele.createCall(destination); peerStateListener = new CallPeerListener(this.call); // Outgoing SIP connection mode sets common conference object // just after the call has been created call.setConference(jvbConferenceCall.getConference()); logger.info( "Created outgoing call to " + destination + " " + call); this.call.addCallChangeListener(callStateListener); //FIXME: It might be already in progress or ended ?! if (!CallState.CALL_INITIALIZATION.equals(call.getCallState())) { callStateListener.handleCallState(call, null); } } catch (OperationFailedException e) { return e; } catch (ParseException e) { return e; } } return null; } /** * Caled by <tt>JvbConference</tt> to notify that JVB call has ended. * @param jvbConference <tt>JvbConference</tt> instance. */ void onJvbConferenceStopped(JvbConference jvbConference, int reasonCode, String reason) { this.jvbConference = null; if (call != null) { hangUp(reasonCode, reason); } else { allCallsEnded(); } } private void sendPresenceExtension(PacketExtension extension) { if (jvbConference != null) { jvbConference.sendPresenceExtension(extension); } else { logger.error( "JVB conference unavailable. Failed to send: " + extension.toXML()); } } private void sipCallEnded() { if (call == null) return; logger.info("Sip call ended: " + call.toString()); call.removeCallChangeListener(callStateListener); call = null; if (jvbConference != null) { jvbConference.stop(); } else { allCallsEnded(); } } @Override public void onJoinJitsiMeetRequest(Call call, String room, String pass) { if (jvbConference == null && this.call == call) { if (room != null) { joinJvbConference(room, pass); } } } /** * Initializes this instance for incoming call which was passed to the * constructor {@link #GatewaySession(SipGateway, String, Call)}. */ void initIncomingCall() { call.addCallChangeListener(callStateListener); peerStateListener = new CallPeerListener(call); if (jvbConference != null) { // Reject incoming call CallManager.hangupCall(call); } else { waitForRoomName(); } } private void waitForRoomName() { if (waitThread != null) { throw new IllegalStateException("Wait thread exists"); } waitThread = new WaitForJvbRoomNameThread(); jitsiMeetTools.addRequestListener(this); waitThread.start(); } /** * Returns {@link Call} instance for JVB leg of the conference. */ public Call getJvbCall() { return jvbConferenceCall; } /** * Returns {@link GatewaySessionListener} currently bound to this instance. */ public GatewaySessionListener getListener() { return listener; } /** * Sets new {@link GatewaySessionListener} on this instance. * @param listener sets new {@link GatewaySessionListener} that will receive * updates from this instance. */ public void setListener(GatewaySessionListener listener) { this.listener = listener; } /** * Notifies {@link GatewaySessionListener}(if any) that we have just joined * the conference room(call is not started yet - just the MUC). */ void notifyJvbRoomJoined() { if (listener != null) { listener.onJvbRoomJoined(this); } } /** * {@inheritDoc} */ @Override public void toneReceived(DTMFReceivedEvent dtmfReceivedEvent) { if (dtmfReceivedEvent != null && dtmfReceivedEvent.getSource() == jvbConferenceCall) { OperationSetDTMF opSet = sipProvider.getOperationSet(OperationSetDTMF.class); if (opSet != null && dtmfReceivedEvent.getStart() != null) { if (dtmfReceivedEvent.getStart()) { try { opSet.startSendingDTMF( peerStateListener.thePeer, dtmfReceivedEvent.getValue()); } catch (OperationFailedException ofe) { logger.info("Failed to forward a DTMF tone: " + ofe); } } else { opSet.stopSendingDTMF(peerStateListener.thePeer); } } } } class SipCallStateListener implements CallChangeListener { @Override public void callPeerAdded(CallPeerEvent evt) { } @Override public void callPeerRemoved(CallPeerEvent evt) { //if (evt.getSourceCall().getCallPeerCount() == 0) // sipCallEnded(); } @Override public void callStateChanged(CallChangeEvent evt) { //logger.info("SIP call " + evt); handleCallState(evt.getSourceCall(), evt.getCause()); } public void handleCallState(Call call, CallPeerChangeEvent cause) { // Once call is started notify SIP gateway if (call.getCallState() == CallState.CALL_IN_PROGRESS) { logger.info("Sip call IN_PROGRESS: " + call); //sendPresenceExtension( // createPresenceExtension( // SipGatewayExtension.STATE_IN_PROGRESS, null)); //jvbConference.setPresenceStatus( // SipGatewayExtension.STATE_IN_PROGRESS); logger.info("SIP call format used: " + Util.getFirstPeerMediaFormat(call)); } else if(call.getCallState() == CallState.CALL_ENDED) { // If we have something to show and we're still in the MUC // then we display error reason string and leave the room with // 5 sec delay. if (cause != null && jvbConference != null && jvbConference.isInTheRoom()) { // Show reason instead of disconnected if (!StringUtils.isNullOrEmpty(cause.getReasonString())) { peerStateListener.unregister(); jvbConference.setPresenceStatus( cause.getReasonString()); } // Delay 5 seconds new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(5000); sipCallEnded(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }).start(); } else { sipCallEnded(); } } } } class CallPeerListener extends CallPeerAdapter { CallPeer thePeer; CallPeerListener(Call call) { thePeer = call.getCallPeers().next(); thePeer.addCallPeerListener(this); } @Override public void peerStateChanged(final CallPeerChangeEvent evt) { CallPeerState callPeerState = (CallPeerState)evt.getNewValue(); String stateString = callPeerState.getStateString(); logger.info(callResource + " SIP peer state: " + stateString); if (jvbConference != null) jvbConference.setPresenceStatus(stateString); if (CallPeerState.BUSY.equals(callPeerState)) { // Hangup the call with 5 sec delay, so that we can see BUSY // status in jitsi-meet new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(5000); } catch (InterruptedException e) { throw new RuntimeException(e); } CallManager.hangupCall( evt.getSourceCallPeer().getCall()); } }).start(); } } public void unregister() { thePeer.removeCallPeerListener(this); } } /** * FIXME: to be removed */ class WaitForJvbRoomNameThread extends Thread { private boolean cancel = false; @Override public void run() { synchronized (waitLock) { try { waitLock.wait(1000); if (cancel) { logger.info("Wait thread cancelled"); return; } if (getJvbRoomName() == null && !CallState.CALL_ENDED.equals(call.getCallState())) { String defaultRoom = JigasiBundleActivator .getConfigurationService() .getString( SipGateway.P_NAME_DEFAULT_JVB_ROOM); if (defaultRoom != null) { logger.info( "Using default JVB room name property " + defaultRoom); joinJvbConference(defaultRoom, null); } else { logger.info( "No JVB room name provided in INVITE header"); hangUp( OperationSetBasicTelephony.HANGUP_REASON_BUSY_HERE, "No JVB room name provided"); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } finally { jitsiMeetTools.removeRequestListener(GatewaySession.this); } } } public void cancel() { if (Thread.currentThread() == waitThread) { waitThread = null; return; } synchronized (waitLock) { cancel = true; waitLock.notifyAll(); } try { waitThread.join(); waitThread = null; } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }
/* * Copyright (c) 2013-2015 by appPlant UG. All rights reserved. * * @APPPLANT_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apache License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://opensource.org/licenses/Apache-2.0/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPPLANT_LICENSE_HEADER_END@ */ package com.datum.hotline.plugin.hlpush.notification; import android.app.AlarmManager; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Build; import android.support.v4.app.NotificationCompat; import org.json.JSONException; import org.json.JSONObject; import java.util.Date; /** * Wrapper class around OS notification class. Handles basic operations * like show, delete, cancel for a single local notification instance. */ public class Notification { // Used to differ notifications by their life cycle state public enum Type { ALL, SCHEDULED, TRIGGERED } // Default receiver to handle the trigger event private static Class<?> defaultReceiver = TriggerReceiver.class; // Key for private preferences static final String PREF_KEY = "LocalNotification"; static final String TAG = "HotlineNotifications"; // Application context passed by constructor private final Context context; // Notification options passed by JS private final Options options; // Builder with full configuration private final NotificationCompat.Builder builder; // Receiver to handle the trigger event private Class<?> receiver = defaultReceiver; /** * Constructor * * @param context * Application context * @param options * Parsed notification options * @param builder * Pre-configured notification builder */ protected Notification (Context context, Options options, NotificationCompat.Builder builder, Class<?> receiver) { this.context = context; this.options = options; this.builder = builder; this.receiver = receiver != null ? receiver : defaultReceiver; } /** * Get application context. */ public Context getContext () { return context; } /** * Get notification options. */ public Options getOptions () { return options; } /** * Get notification ID. */ public int getId () { return options.getId(); } /** * If it's a repeating notification. */ public boolean isRepeating () { return getOptions().getRepeatInterval() > 0; } /** * If the notification was in the past. */ public boolean wasInThePast () { return new Date().after(options.getTriggerDate()); } /** * If the notification is scheduled. */ public boolean isScheduled () { return isRepeating() || !wasInThePast(); } /** * If the notification is triggered. */ public boolean isTriggered () { return wasInThePast(); } /** * If the notification is an update. */ protected boolean isUpdate () { if (!options.getDict().has("updatedAt")) return false; long now = new Date().getTime(); long updatedAt = options.getDict().optLong("updatedAt", now); return (now - updatedAt) < 1000; } /** * Notification type can be one of pending or scheduled. */ public Type getType () { return isTriggered() ? Type.TRIGGERED : Type.SCHEDULED; } /** * Schedule the local notification. */ public void schedule() { long triggerTime = options.getTriggerTime(); persist(); // Intent gets called when the Notification gets fired Intent intent = new Intent(context, receiver) .setAction(options.getIdStr()) .putExtra(Options.EXTRA, options.toString()); PendingIntent pi = PendingIntent.getBroadcast( context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT); if (isRepeating()) { getAlarmMgr().setRepeating(AlarmManager.RTC_WAKEUP, triggerTime, options.getRepeatInterval(), pi); } else { getAlarmMgr().set(AlarmManager.RTC_WAKEUP, triggerTime, pi); } } /** * Clear the local notification without canceling repeating alarms. * */ public void clear () { if (!isRepeating() && wasInThePast()) { unpersist(); } else { getNotMgr().cancel(TAG,getId()); } } /** * Cancel the local notification. * * Create an intent that looks similar, to the one that was registered * using schedule. Making sure the notification id in the action is the * same. Now we can search for such an intent using the 'getService' * method and cancel it. */ public void cancel() { Intent intent = new Intent(context, receiver) .setAction(options.getIdStr()); PendingIntent pi = PendingIntent. getBroadcast(context, 0, intent, 0); getAlarmMgr().cancel(pi); getNotMgr().cancel(TAG,options.getId()); unpersist(); } /** * Present the local notification to user. */ public void show () { // TODO Show dialog when in foreground showNotification(); } /** * Show as local notification when in background. */ @SuppressWarnings("deprecation") private void showNotification () { int id = getOptions().getId(); if (Build.VERSION.SDK_INT <= 15) { // Notification for HoneyComb to ICS //getNotMgr().notify(id, builder.getNotification()); getNotMgr().notify(TAG,id, builder.getNotification()); } else { // Notification for Jellybean and above //getNotMgr().notify(id, builder.build()); getNotMgr().notify(TAG,id, builder.build()); } } /** * Show as modal dialog when in foreground. */ private void showDialog () { // TODO } /** * Count of triggers since schedule. */ public int getTriggerCountSinceSchedule() { long now = System.currentTimeMillis(); long triggerTime = options.getTriggerTime(); if (!wasInThePast()) return 0; if (!isRepeating()) return 1; return (int) ((now - triggerTime) / options.getRepeatInterval()); } /** * Encode options to JSON. */ public String toString() { JSONObject dict = options.getDict(); JSONObject json = new JSONObject(); try { json = new JSONObject(dict.toString()); } catch (JSONException e) { e.printStackTrace(); } json.remove("firstAt"); json.remove("updatedAt"); json.remove("soundUri"); json.remove("iconUri"); return json.toString(); } /** * Persist the information of this notification to the Android Shared * Preferences. This will allow the application to restore the notification * upon device reboot, app restart, retrieve notifications, aso. */ private void persist () { SharedPreferences.Editor editor = getPrefs().edit(); editor.putString(options.getIdStr(), options.toString()); if (Build.VERSION.SDK_INT < 9) { editor.commit(); } else { editor.apply(); } } /** * Remove the notification from the Android shared Preferences. */ private void unpersist () { SharedPreferences.Editor editor = getPrefs().edit(); editor.remove(options.getIdStr()); if (Build.VERSION.SDK_INT < 9) { editor.commit(); } else { editor.apply(); } } /** * Shared private preferences for the application. */ private SharedPreferences getPrefs () { return context.getSharedPreferences(PREF_KEY, Context.MODE_PRIVATE); } /** * Notification manager for the application. */ private NotificationManager getNotMgr () { return (NotificationManager) context .getSystemService(Context.NOTIFICATION_SERVICE); } /** * Alarm manager for the application. */ private AlarmManager getAlarmMgr () { return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); } /** * Set default receiver to handle the trigger event. * * @param receiver * broadcast receiver */ public static void setDefaultTriggerReceiver (Class<?> receiver) { defaultReceiver = receiver; } }
// Copyright (c) 2013 Richard Long & HexBeerium // // Released under the MIT license ( http://opensource.org/licenses/MIT ) // package jsonbroker.library.server.http; import java.io.InputStream; import java.util.StringTokenizer; import jsonbroker.library.common.auxiliary.DataHelper; import jsonbroker.library.common.auxiliary.InputStreamHelper; import jsonbroker.library.common.auxiliary.MutableData; import jsonbroker.library.common.auxiliary.NumericUtilities; import jsonbroker.library.common.http.Entity; import jsonbroker.library.common.http.StreamEntity; import jsonbroker.library.common.log.Log; public class HttpRequestReader { private static final int LINE_LENGTH_UPPER_BOUND = 512; private static final int NUMBER_HEADERS_UPPER_BOUND = 32; private static final boolean[] INVALID_CHARS = new boolean[256]; static { // valid chars are 'cr', 'nl', and all the chars between 'space' and '~' for( int i = 0; i < 256; i++ ) { INVALID_CHARS[i] = true; } INVALID_CHARS[0x0d] = false; // 0x0d = 'cr' INVALID_CHARS[0x0a] = false; // 0x0a = 'nl' for( int i = 0x20; i <= 0x7e; i++ ) { // 0x20 = 'space'; 0x7e = '~' INVALID_CHARS[i] = false; } } private static final Log log = Log.getLog(HttpRequestReader.class); private static void setOperationDetailsForRequest(HttpRequest request, String line) { // inherited from c-sharp ... not sure this will happen in the java world if (line == null) { log.error( "line == null"); throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } // HTTP request lines are of the form: // [METHOD] [Encoded URL] HTTP/1.? StringTokenizer tokenizer = new StringTokenizer( line ); if( 3 != tokenizer.countTokens() ) { log.errorFormat("3 != tokenizer.countTokens(); tokenizer.countTokens() = %d; line = '%s'", tokenizer.countTokens(), line); throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } /* * HTTP method ... */ String method = tokenizer.nextToken(); if( HttpMethod.GET.matches( method ) ) { request.setMethod( HttpMethod.GET ); } else if( HttpMethod.POST.matches( method ) ) { request.setMethod( HttpMethod.POST ); } else if (HttpMethod.OPTIONS.matches(method)) { request.setMethod( HttpMethod.OPTIONS ); } else { log.errorFormat( "unknown HTTP method; method = '%s'; line = '%s'" , method, line ); throw HttpErrorHelper.methodNotImplemented501FromOriginator( HttpRequestReader.class); } /* * HTTP request-uri ... */ String requestUri = tokenizer.nextToken(); request.setRequestUri( requestUri ); } // null corresponds to the end of a stream private static String readLine(InputStream inputStream, MutableData buffer) { int byteRead = InputStreamHelper.readByte( inputStream, HttpRequestReader.class); if( -1 == byteRead ) { return null; } int i = 0; do { if( -1 != byteRead ) { if( INVALID_CHARS[ byteRead ] ) { log.errorFormat( "INVALID_CHARS[ byteRead ]; byteRead = 0x%x" , byteRead ); // unexpected character throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } } // end of stream or end of the line if( -1 == byteRead || '\n' == byteRead ) { return DataHelper.toUtf8String( buffer ); } // filter out '\r' if( '\r' != byteRead ) { buffer.append( (byte)byteRead ); } byteRead = InputStreamHelper.readByte( inputStream, HttpRequestReader.class); i++; } while( i < LINE_LENGTH_UPPER_BOUND ); log.errorFormat( "line too long; i = %d", i); // line is too long throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } private static void addHeader(String header, HttpRequest request) { String name; String value; int firstColon = header.indexOf(':'); if (-1 == firstColon) { log.errorFormat( "-1 == firstColon; header = '%s'" , header); throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } name = header.substring( 0, firstColon).toLowerCase(); // headers are case insensitive value = header.substring(firstColon + 1).trim(); if( Log.isDebugEnabled() ) { if( "authorization".equals( name ) ) { log.debug( value, name); } } request.setHttpHeader( name, value); } // null corresponds to the end of a stream public static HttpRequest readRequest(InputStream inputStream) { MutableData buffer = new MutableData(); String firstLine = readLine(inputStream,buffer); // log.debug(firstLine, "firstLine"); // null corresponds to the end of a stream if( null == firstLine ) { return null; } HttpRequest answer = new HttpRequest(); setOperationDetailsForRequest(answer, firstLine); int i = 0; do { buffer.clear(); String line = readLine(inputStream,buffer); if (0 == line.length()) { break; } else { addHeader(line, answer); } i++; } while( i < NUMBER_HEADERS_UPPER_BOUND ); if( i > NUMBER_HEADERS_UPPER_BOUND ) { log.errorFormat( "i > NUMBER_HEADERS_UPPER_BOUND; i = %d", i); throw HttpErrorHelper.badRequest400FromOriginator(HttpRequestReader.class); } String contentLengthString = null; if( answer.getHeaders().containsKey( "content-length") ) { contentLengthString = answer.getHeaders().get( "content-length" ); } // no body ? if (null == contentLengthString) { // log.debug("null == contentLengthString"); return answer; } long contentLength = NumericUtilities.parseLong(contentLengthString); Entity body = new StreamEntity( inputStream, contentLength ); answer.setEntity( body ); return answer; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.initialization; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; import org.apache.druid.guice.annotations.Self; import org.apache.druid.java.util.common.lifecycle.Lifecycle; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.HttpClientConfig; import org.apache.druid.java.util.http.client.HttpClientInit; import org.apache.druid.server.DruidNode; import org.apache.druid.server.initialization.jetty.JettyServerInitUtils; import org.apache.druid.server.initialization.jetty.JettyServerInitializer; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.joda.time.Duration; import org.junit.After; import org.junit.Before; import javax.net.ssl.SSLContext; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.zip.Deflater; public abstract class BaseJettyTest { protected static final String DEFAULT_RESPONSE_CONTENT = "hello"; protected Lifecycle lifecycle; protected HttpClient client; protected Server server; protected int port = -1; protected int tlsPort = -1; protected void setProperties() { System.setProperty("druid.server.http.numThreads", "20"); System.setProperty("druid.server.http.maxIdleTime", "PT1S"); System.setProperty("druid.global.http.readTimeout", "PT1S"); } @Before public void setup() throws Exception { setProperties(); Injector injector = setupInjector(); final DruidNode node = injector.getInstance(Key.get(DruidNode.class, Self.class)); port = node.getPlaintextPort(); tlsPort = node.getTlsPort(); lifecycle = injector.getInstance(Lifecycle.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); server = injector.getInstance(Server.class); client = holder.getClient(); } protected abstract Injector setupInjector(); @After public void teardown() { lifecycle.stop(); } public static class ClientHolder { HttpClient client; ClientHolder() { this(1); } ClientHolder(int maxClientConnections) { final Lifecycle druidLifecycle = new Lifecycle(); try { this.client = HttpClientInit.createClient( HttpClientConfig.builder().withNumConnections(maxClientConnections).withSslContext(SSLContext.getDefault()).withReadTimeout(Duration.ZERO).build(), druidLifecycle ); } catch (Exception e) { throw new RuntimeException(e); } } public HttpClient getClient() { return client; } } public static class JettyServerInit implements JettyServerInitializer { @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); JettyServerInitUtils.addExtensionFilters(root, injector); root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); handlerList.setHandlers( new Handler[]{JettyServerInitUtils.wrapWithDefaultGzipHandler( root, ServerConfig.DEFAULT_GZIP_INFLATE_BUFFER_SIZE, Deflater.DEFAULT_COMPRESSION )} ); server.setHandler(handlerList); } } @Path("/slow") public static class SlowResource { @GET @Path("/hello") @Produces(MediaType.APPLICATION_JSON) public Response hello() { try { TimeUnit.MILLISECONDS.sleep(500 + ThreadLocalRandom.current().nextInt(1600)); } catch (InterruptedException e) { // } return Response.ok(DEFAULT_RESPONSE_CONTENT).build(); } } @Path("/latched") public static class LatchedResource { private final LatchedRequestStateHolder state; @Inject public LatchedResource(LatchedRequestStateHolder state) { this.state = state; } @GET @Path("/hello") @Produces(MediaType.APPLICATION_JSON) public Response hello() { state.serverStartRequest(); try { state.serverWaitForClientReadyToFinishRequest(); } catch (InterruptedException ignored) { } return Response.ok(DEFAULT_RESPONSE_CONTENT).build(); } } public static class LatchedRequestStateHolder { private static final int TIMEOUT_MILLIS = 10_000; private CountDownLatch requestStartLatch; private CountDownLatch requestEndLatch; public LatchedRequestStateHolder() { reset(); } public void reset() { requestStartLatch = new CountDownLatch(1); requestEndLatch = new CountDownLatch(1); } public void clientWaitForServerToStartRequest() throws InterruptedException { requestStartLatch.await(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); } public void serverStartRequest() { requestStartLatch.countDown(); } public void serverWaitForClientReadyToFinishRequest() throws InterruptedException { requestEndLatch.await(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); } public void clientReadyToFinishRequest() { requestEndLatch.countDown(); } } @Path("/default") public static class DefaultResource { @DELETE @Path("{resource}") @Produces(MediaType.APPLICATION_JSON) public Response delete() { return Response.ok(DEFAULT_RESPONSE_CONTENT).build(); } @GET @Produces(MediaType.APPLICATION_JSON) public Response get() { return Response.ok(DEFAULT_RESPONSE_CONTENT).build(); } @POST @Produces(MediaType.APPLICATION_JSON) public Response post() { return Response.ok(DEFAULT_RESPONSE_CONTENT).build(); } } @Path("/return") public static class DirectlyReturnResource { @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public Response postText(String text) { return Response.ok(text).build(); } } @Path("/exception") public static class ExceptionResource { @GET @Path("/exception") @Produces(MediaType.APPLICATION_JSON) public Response exception( @Context HttpServletResponse resp ) throws IOException { final ServletOutputStream outputStream = resp.getOutputStream(); outputStream.println("hello"); outputStream.flush(); try { TimeUnit.MILLISECONDS.sleep(200); } catch (InterruptedException e) { // } throw new IOException(); } } public static class DummyAuthFilter implements Filter { public static final String AUTH_HDR = "secretUser"; public static final String SECRET_USER = "bob"; @Override public void init(FilterConfig filterConfig) { } @Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; if (request.getHeader(AUTH_HDR) == null || request.getHeader(AUTH_HDR).equals(SECRET_USER)) { chain.doFilter(req, resp); } else { HttpServletResponse response = (HttpServletResponse) resp; response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Failed even fake authentication."); } } @Override public void destroy() { } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.memory; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.QueryExecution; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.memory.LowMemoryKiller.QueryMemoryInfo; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.server.ServerConfig; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.QueryId; import com.facebook.presto.spi.memory.ClusterMemoryPoolManager; import com.facebook.presto.spi.memory.MemoryPoolId; import com.facebook.presto.spi.memory.MemoryPoolInfo; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Streams; import io.airlift.http.client.HttpClient; import io.airlift.json.JsonCodec; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.weakref.jmx.JmxException; import org.weakref.jmx.MBeanExporter; import org.weakref.jmx.Managed; import org.weakref.jmx.ObjectNames; import javax.annotation.PreDestroy; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import static com.facebook.presto.ExceededMemoryLimitException.exceededGlobalLimit; import static com.facebook.presto.SystemSessionProperties.RESOURCE_OVERCOMMIT; import static com.facebook.presto.SystemSessionProperties.getQueryMaxMemory; import static com.facebook.presto.SystemSessionProperties.resourceOvercommit; import static com.facebook.presto.memory.LocalMemoryManager.GENERAL_POOL; import static com.facebook.presto.memory.LocalMemoryManager.RESERVED_POOL; import static com.facebook.presto.spi.NodeState.ACTIVE; import static com.facebook.presto.spi.NodeState.SHUTTING_DOWN; import static com.facebook.presto.spi.StandardErrorCode.CLUSTER_OUT_OF_MEMORY; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.MoreCollectors.toOptional; import static com.google.common.collect.Sets.difference; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.Duration.nanosSince; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class ClusterMemoryManager implements ClusterMemoryPoolManager { private static final Logger log = Logger.get(ClusterMemoryManager.class); private final ExecutorService listenerExecutor = Executors.newSingleThreadExecutor(); private final InternalNodeManager nodeManager; private final LocationFactory locationFactory; private final HttpClient httpClient; private final MBeanExporter exporter; private final JsonCodec<MemoryInfo> memoryInfoCodec; private final JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec; private final DataSize maxQueryMemory; private final boolean enabled; private final LowMemoryKiller lowMemoryKiller; private final Duration killOnOutOfMemoryDelay; private final String coordinatorId; private final AtomicLong memoryPoolAssignmentsVersion = new AtomicLong(); private final AtomicLong clusterMemoryUsageBytes = new AtomicLong(); private final AtomicLong clusterMemoryBytes = new AtomicLong(); private final AtomicLong queriesKilledDueToOutOfMemory = new AtomicLong(); private final Map<String, RemoteNodeMemory> nodes = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, List<Consumer<MemoryPoolInfo>>> changeListeners = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, ClusterMemoryPool> pools = new HashMap<>(); @GuardedBy("this") private long lastTimeNotOutOfMemory = System.nanoTime(); @GuardedBy("this") private QueryId lastKilledQuery; @Inject public ClusterMemoryManager( @ForMemoryManager HttpClient httpClient, InternalNodeManager nodeManager, LocationFactory locationFactory, MBeanExporter exporter, JsonCodec<MemoryInfo> memoryInfoCodec, JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec, QueryIdGenerator queryIdGenerator, LowMemoryKiller lowMemoryKiller, ServerConfig serverConfig, MemoryManagerConfig config) { requireNonNull(config, "config is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); this.exporter = requireNonNull(exporter, "exporter is null"); this.memoryInfoCodec = requireNonNull(memoryInfoCodec, "memoryInfoCodec is null"); this.assignmentsRequestJsonCodec = requireNonNull(assignmentsRequestJsonCodec, "assignmentsRequestJsonCodec is null"); this.lowMemoryKiller = requireNonNull(lowMemoryKiller, "lowMemoryKiller is null"); this.maxQueryMemory = config.getMaxQueryMemory(); this.coordinatorId = queryIdGenerator.getCoordinatorId(); this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); } @Override public synchronized void addChangeListener(MemoryPoolId poolId, Consumer<MemoryPoolInfo> listener) { changeListeners.computeIfAbsent(poolId, id -> new ArrayList<>()).add(listener); } public synchronized void process(Iterable<QueryExecution> queries) { if (!enabled) { return; } boolean outOfMemory = isClusterOutOfMemory(); if (!outOfMemory) { lastTimeNotOutOfMemory = System.nanoTime(); } boolean queryKilled = false; long totalBytes = 0; for (QueryExecution query : queries) { long bytes = query.getTotalMemoryReservation(); DataSize sessionMaxQueryMemory = getQueryMaxMemory(query.getSession()); long queryMemoryLimit = Math.min(maxQueryMemory.toBytes(), sessionMaxQueryMemory.toBytes()); totalBytes += bytes; if (resourceOvercommit(query.getSession()) && outOfMemory) { // If a query has requested resource overcommit, only kill it if the cluster has run out of memory DataSize memory = succinctBytes(bytes); query.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, format("The cluster is out of memory and %s=true, so this query was killed. It was using %s of memory", RESOURCE_OVERCOMMIT, memory))); queryKilled = true; } if (!resourceOvercommit(query.getSession()) && bytes > queryMemoryLimit) { DataSize maxMemory = succinctBytes(queryMemoryLimit); query.fail(exceededGlobalLimit(maxMemory)); queryKilled = true; } } clusterMemoryUsageBytes.set(totalBytes); if (!(lowMemoryKiller instanceof NoneLowMemoryKiller) && outOfMemory && !queryKilled && nanosSince(lastTimeNotOutOfMemory).compareTo(killOnOutOfMemoryDelay) > 0 && isLastKilledQueryGone()) { List<QueryMemoryInfo> queryMemoryInfoList = Streams.stream(queries) .map(query -> new QueryMemoryInfo(query.getQueryId(), query.getMemoryPool().getId(), query.getTotalMemoryReservation())) .collect(toImmutableList()); List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); Optional<QueryId> chosenQueryId = lowMemoryKiller.chooseQueryToKill(queryMemoryInfoList, nodeMemoryInfos); if (chosenQueryId.isPresent()) { Optional<QueryExecution> chosenQuery = Streams.stream(queries).filter(query -> chosenQueryId.get().equals(query.getQueryId())).collect(toOptional()); if (chosenQuery.isPresent()) { // See comments in isLastKilledQueryGone for why chosenQuery might be absent. chosenQuery.get().fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, "Query killed because the cluster is out of memory. Please try again in a few minutes.")); queriesKilledDueToOutOfMemory.incrementAndGet(); lastKilledQuery = chosenQueryId.get(); logQueryKill(chosenQueryId.get(), nodeMemoryInfos); } } } Map<MemoryPoolId, Integer> countByPool = new HashMap<>(); for (QueryExecution query : queries) { MemoryPoolId id = query.getMemoryPool().getId(); countByPool.put(id, countByPool.getOrDefault(id, 0) + 1); } updatePools(countByPool); updateNodes(updateAssignments(queries)); } @GuardedBy("this") private boolean isLastKilledQueryGone() { if (lastKilledQuery == null) { return true; } // pools fields is updated based on nodes field. // Therefore, if the query is gone from pools field, it should also be gone from nodes field. // However, since nodes can updated asynchronously, it has the potential of coming back after being gone. // Therefore, even if the query appears to be gone here, it might be back when one inspects nodes later. ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); if (generalPool == null) { return false; } return !generalPool.getQueryMemoryReservations().containsKey(lastKilledQuery); } private void logQueryKill(QueryId killedQueryId, List<MemoryInfo> nodes) { if (!log.isInfoEnabled()) { return; } StringBuilder nodeDescription = new StringBuilder(); nodeDescription.append("Query Kill Decision: Killed ").append(killedQueryId).append("\n"); for (MemoryInfo node : nodes) { MemoryPoolInfo memoryPoolInfo = node.getPools().get(GENERAL_POOL); if (memoryPoolInfo == null) { continue; } nodeDescription.append("Query Kill Scenario: "); nodeDescription.append("MaxBytes ").append(memoryPoolInfo.getMaxBytes()).append(' '); nodeDescription.append("FreeBytes ").append(memoryPoolInfo.getFreeBytes() + memoryPoolInfo.getReservedRevocableBytes()).append(' '); nodeDescription.append("Queries "); Joiner.on(",").withKeyValueSeparator("=").appendTo(nodeDescription, memoryPoolInfo.getQueryMemoryReservations()); nodeDescription.append('\n'); } log.info(nodeDescription.toString()); } @VisibleForTesting synchronized Map<MemoryPoolId, ClusterMemoryPool> getPools() { return ImmutableMap.copyOf(pools); } private synchronized boolean isClusterOutOfMemory() { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); return reservedPool != null && generalPool != null && reservedPool.getAssignedQueries() > 0 && generalPool.getBlockedNodes() > 0; } private synchronized MemoryPoolAssignmentsRequest updateAssignments(Iterable<QueryExecution> queries) { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); long version = memoryPoolAssignmentsVersion.incrementAndGet(); // Check that all previous assignments have propagated to the visible nodes. This doesn't account for temporary network issues, // and is more of a safety check than a guarantee if (reservedPool != null && generalPool != null && allAssignmentsHavePropagated(queries)) { if (reservedPool.getAssignedQueries() == 0 && generalPool.getBlockedNodes() > 0) { QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution queryExecution : queries) { if (resourceOvercommit(queryExecution.getSession())) { // Don't promote queries that requested resource overcommit to the reserved pool, // since their memory usage is unbounded. continue; } long bytesUsed = queryExecution.getTotalMemoryReservation(); if (bytesUsed > maxMemory) { biggestQuery = queryExecution; maxMemory = bytesUsed; } } if (biggestQuery != null) { biggestQuery.setMemoryPool(new VersionedMemoryPoolId(RESERVED_POOL, version)); } } } ImmutableList.Builder<MemoryPoolAssignment> assignments = ImmutableList.builder(); for (QueryExecution queryExecution : queries) { assignments.add(new MemoryPoolAssignment(queryExecution.getQueryId(), queryExecution.getMemoryPool().getId())); } return new MemoryPoolAssignmentsRequest(coordinatorId, version, assignments.build()); } private boolean allAssignmentsHavePropagated(Iterable<QueryExecution> queries) { if (nodes.isEmpty()) { // Assignments can't have propagated, if there are no visible nodes. return false; } long newestAssignment = ImmutableList.copyOf(queries).stream() .map(QueryExecution::getMemoryPool) .mapToLong(VersionedMemoryPoolId::getVersion) .min() .orElse(-1); long mostOutOfDateNode = nodes.values().stream() .mapToLong(RemoteNodeMemory::getCurrentAssignmentVersion) .min() .orElse(Long.MAX_VALUE); return newestAssignment <= mostOutOfDateNode; } private void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder<Node> builder = ImmutableSet.builder(); Set<Node> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream() .map(Node::getNodeIdentifier) .collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (Node node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put(node.getNodeIdentifier(), new RemoteNodeMemory(httpClient, memoryInfoCodec, assignmentsRequestJsonCodec, locationFactory.createMemoryInfoLocation(node))); } } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } } private synchronized void updatePools(Map<MemoryPoolId, Integer> queryCounts) { // Update view of cluster memory and pools List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); long totalClusterMemory = nodeMemoryInfos.stream() .map(MemoryInfo::getTotalNodeMemory) .mapToLong(DataSize::toBytes) .sum(); clusterMemoryBytes.set(totalClusterMemory); Set<MemoryPoolId> activePoolIds = nodeMemoryInfos.stream() .flatMap(info -> info.getPools().keySet().stream()) .collect(toImmutableSet()); // Make a copy to materialize the set difference Set<MemoryPoolId> removedPools = ImmutableSet.copyOf(difference(pools.keySet(), activePoolIds)); for (MemoryPoolId removed : removedPools) { unexport(pools.get(removed)); pools.remove(removed); if (changeListeners.containsKey(removed)) { for (Consumer<MemoryPoolInfo> listener : changeListeners.get(removed)) { listenerExecutor.execute(() -> listener.accept(new MemoryPoolInfo(0, 0, 0, ImmutableMap.of(), ImmutableMap.of()))); } } } for (MemoryPoolId id : activePoolIds) { ClusterMemoryPool pool = pools.computeIfAbsent(id, poolId -> { ClusterMemoryPool newPool = new ClusterMemoryPool(poolId); String objectName = ObjectNames.builder(ClusterMemoryPool.class, newPool.getId().toString()).build(); try { exporter.export(objectName, newPool); } catch (JmxException e) { log.error(e, "Error exporting memory pool %s", poolId); } return newPool; }); pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); if (changeListeners.containsKey(id)) { MemoryPoolInfo info = pool.getInfo(); for (Consumer<MemoryPoolInfo> listener : changeListeners.get(id)) { listenerExecutor.execute(() -> listener.accept(info)); } } } } @PreDestroy public synchronized void destroy() { try { for (ClusterMemoryPool pool : pools.values()) { unexport(pool); } pools.clear(); } finally { listenerExecutor.shutdownNow(); } } private void unexport(ClusterMemoryPool pool) { try { String objectName = ObjectNames.builder(ClusterMemoryPool.class, pool.getId().toString()).build(); exporter.unexport(objectName); } catch (JmxException e) { log.error(e, "Failed to unexport pool %s", pool.getId()); } } @Managed public long getClusterMemoryUsageBytes() { return clusterMemoryUsageBytes.get(); } @Managed public long getClusterMemoryBytes() { return clusterMemoryBytes.get(); } @Managed public long getQueriesKilledDueToOutOfMemory() { return queriesKilledDueToOutOfMemory.get(); } }
/** * Copyright (C) 2015 Valkyrie RCP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.valkyriercp.form.binding.swing; import org.springframework.beans.support.PropertyComparator; import org.springframework.util.Assert; import org.valkyriercp.binding.form.ConfigurableFormModel; import org.valkyriercp.binding.form.FormModel; import org.valkyriercp.binding.form.support.DefaultFormModel; import org.valkyriercp.binding.value.ObservableList; import org.valkyriercp.binding.value.ValueModel; import org.valkyriercp.binding.value.support.BufferedCollectionValueModel; import org.valkyriercp.binding.value.support.ValueHolder; import org.valkyriercp.component.ShuttleList; import org.valkyriercp.form.binding.Binder; import org.valkyriercp.form.binding.Binding; import org.valkyriercp.form.binding.support.AbstractBindingFactory; import org.valkyriercp.list.BeanPropertyValueComboBoxEditor; import org.valkyriercp.list.BeanPropertyValueListRenderer; import org.valkyriercp.rules.closure.Closure; import javax.swing.*; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * A convenient implementation of <code>BindingFactory</code>. Provides a set * of methods that address the typical binding requirements of Swing based * forms. * * @author Oliver Hutchison */ public class SwingBindingFactory extends AbstractBindingFactory { public SwingBindingFactory(FormModel formModel) { super(formModel); } public Binding createBoundTextField(String formProperty) { return createBinding(JTextField.class, formProperty); } public Binding createBoundTextArea(String formProperty) { return createBinding(JTextArea.class, formProperty); } public Binding createBoundTextArea(String formProperty, int rows, int columns) { Map context = createContext(TextAreaBinder.ROWS_KEY, new Integer(rows)); context.put(TextAreaBinder.COLUMNS_KEY, new Integer(columns)); return createBinding(JTextArea.class, formProperty, context); } public Binding createBoundFormattedTextField(String formProperty) { return createBinding(JFormattedTextField.class, formProperty); } public Binding createBoundFormattedTextField(String formProperty, JFormattedTextField.AbstractFormatterFactory formatterFactory) { Map context = createContext(FormattedTextFieldBinder.FORMATTER_FACTORY_KEY, formatterFactory); return createBinding(JFormattedTextField.class, formProperty, context); } public Binding createBoundSpinner(String formProperty) { return createBinding(JSpinner.class, formProperty); } public Binding createBoundLabel(String formProperty) { return createBinding(JLabel.class, formProperty); } public Binding createBoundToggleButton(String formProperty) { return createBinding(JToggleButton.class, formProperty); } public Binding createBoundCheckBox(String formProperty) { return createBinding(JCheckBox.class, formProperty); } public Binding createBoundComboBox(String formProperty) { return createBinding(JComboBox.class, formProperty); } /** * * @param formProperty the property to be bound * @param selectableItems a Collection or array containing the list of items * that may be selected */ public Binding createBoundComboBox(String formProperty, Object selectableItems) { Map context = createContext(ComboBoxBinder.SELECTABLE_ITEMS_KEY, selectableItems); return createBinding(JComboBox.class, formProperty, context); } public Binding createBoundComboBox(String formProperty, ValueModel selectableItemsHolder) { return createBoundComboBox(formProperty, (Object)selectableItemsHolder); } public Binding createBoundComboBox(String formProperty, String selectableItemsProperty, String renderedItemProperty) { return createBoundComboBox(formProperty, getFormModel().getValueModel(selectableItemsProperty), renderedItemProperty); } public Binding createBoundComboBox(String formProperty, Object selectableItems, String renderedProperty) { Map context = createContext(ComboBoxBinder.SELECTABLE_ITEMS_KEY, selectableItems); context.put(ComboBoxBinder.RENDERER_KEY, new BeanPropertyValueListRenderer(renderedProperty)); context.put(ComboBoxBinder.EDITOR_KEY, new BeanPropertyEditorClosure(renderedProperty)); context.put(ComboBoxBinder.COMPARATOR_KEY, new PropertyComparator(renderedProperty, true, true)); return createBinding(JComboBox.class, formProperty, context); } public Binding createBoundComboBox(String formProperty, ValueModel selectableItemsHolder, String renderedProperty) { return createBoundComboBox(formProperty, (Object)selectableItemsHolder, renderedProperty); } /** * This method will most likely move over to FormModel * * @deprecated */ public ObservableList createBoundListModel(String formProperty) { final ConfigurableFormModel formModel = ((ConfigurableFormModel)getFormModel()); ValueModel valueModel = formModel.getValueModel(formProperty); if (!(valueModel instanceof BufferedCollectionValueModel)) { // XXX: HACK! valueModel = new BufferedCollectionValueModel((((DefaultFormModel) formModel).getFormObjectPropertyAccessStrategy()).getPropertyValueModel( formProperty), formModel.getFieldMetadata(formProperty).getPropertyType()); formModel.add(formProperty, valueModel); } return (ObservableList)valueModel.getValue(); } public Binding createBoundList(String formProperty) { Map context = createContext(ListBinder.SELECTABLE_ITEMS_KEY, createBoundListModel(formProperty)); return createBinding(JList.class, formProperty, context); } /** * Binds the values specified in the collection contained within * <code>selectableItems</code> to a {@link JList}, with any * user selection being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered as a String. Note that the selection in the * bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> * is not empty when the list is bound, then its content will be used * for the initial selection. This method uses default behavior to * determine the selection mode of the resulting <code>JList</code>: * if <code>selectionFormProperty</code> refers to a * {@link java.util.Collection} type property, then * {@link javax.swing.ListSelectionModel#MULTIPLE_INTERVAL_SELECTION} will * be used, otherwise * {@link javax.swing.ListSelectionModel#SINGLE_SELECTION} will be used. * * @param selectionFormProperty form property to hold user's selection. * This property must either be compatible * with the item objects contained in * <code>selectableItemsHolder</code> (in * which case only single selection makes * sense), or must be a * <code>Collection</code> type, which allows * for multiple selection. * @param selectableItems a Collection or array containing the items * with which to populate the list. * @return */ public Binding createBoundList(String selectionFormProperty, Object selectableItems) { return createBoundList(selectionFormProperty, new ValueHolder(selectableItems)); } public Binding createBoundList(String selectionFormProperty, Object selectableItems, String renderedProperty) { return createBoundList(selectionFormProperty, new ValueHolder(selectableItems), renderedProperty); } /** * Binds the values specified in the collection contained within * <code>selectableItemsHolder</code> to a {@link JList}, with any * user selection being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered by looking up a property on the item by the name contained * in <code>renderedProperty</code>, retrieving the value of the property, * and rendering that value in the UI. Note that the selection in the * bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> * is not empty when the list is bound, then its content will be used * for the initial selection. This method uses default behavior to * determine the selection mode of the resulting <code>JList</code>: * if <code>selectionFormProperty</code> refers to a * {@link java.util.Collection} type property, then * {@link javax.swing.ListSelectionModel#MULTIPLE_INTERVAL_SELECTION} will * be used, otherwise * {@link javax.swing.ListSelectionModel#SINGLE_SELECTION} will be used. * * @param selectionFormProperty form property to hold user's selection. * This property must either be compatible * with the item objects contained in * <code>selectableItemsHolder</code> (in * which case only single selection makes * sense), or must be a * <code>Collection</code> type, which allows * for multiple selection. * @param selectableItemsHolder <code>ValueModel</code> containing the * items with which to populate the list. * @param renderedProperty the property to be queried for each item * in the list, the result of which will be * used to render that item in the UI * * @return */ public Binding createBoundList(String selectionFormProperty, ValueModel selectableItemsHolder, String renderedProperty) { return createBoundList(selectionFormProperty, selectableItemsHolder, renderedProperty, null); } /** * Binds the values specified in the collection contained within * <code>selectableItemsHolder</code> to a {@link JList}, with any * user selection being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered as a String. Note that the selection in the * bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> * is not empty when the list is bound, then its content will be used * for the initial selection. This method uses default behavior to * determine the selection mode of the resulting <code>JList</code>: * if <code>selectionFormProperty</code> refers to a * {@link java.util.Collection} type property, then * {@link javax.swing.ListSelectionModel#MULTIPLE_INTERVAL_SELECTION} will * be used, otherwise * {@link javax.swing.ListSelectionModel#SINGLE_SELECTION} will be used. * * @param selectionFormProperty form property to hold user's selection. * This property must either be compatible * with the item objects contained in * <code>selectableItemsHolder</code> (in * which case only single selection makes * sense), or must be a * <code>Collection</code> type, which allows * for multiple selection. * @param selectableItemsHolder <code>ValueModel</code> containing the * items with which to populate the list. * * @return */ public Binding createBoundList(String selectionFormProperty, ValueModel selectableItemsHolder) { return createBoundList(selectionFormProperty, selectableItemsHolder, null, null); } /** * Binds the value(s) specified in <code>selectableItems</code> to * a {@link JList}, with any * user selection being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered by looking up a property on the item by the name contained * in <code>renderedProperty</code>, retrieving the value of the property, * and rendering that value in the UI. Note that the selection in the * bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> * is not empty when the list is bound, then its content will be used * for the initial selection. * * @param selectionFormProperty form property to hold user's selection. * This property must either be compatible * with the item objects contained in * <code>selectableItemsHolder</code> (in * which case only single selection makes * sense), or must be a * <code>Collection</code> type, which allows * for multiple selection. * @param selectableItems <code>Object</code> containing the * item(s) with which to populate the list. * Can be an instance Collection, Object[], * a ValueModel or Object * @param renderedProperty the property to be queried for each item * in the list, the result of which will be * used to render that item in the UI. * May be null, in which case the selectable * items will be rendered as strings. * @param forceSelectMode forces the list selection mode. Must be * one of the constants defined in * {@link javax.swing.ListSelectionModel} or * <code>null</code> for default behavior. * If <code>null</code>, then * {@link javax.swing.ListSelectionModel#MULTIPLE_INTERVAL_SELECTION} * will be used if * <code>selectionFormProperty</code> refers * to a {@link java.util.Collection} type * property, otherwise * {@link javax.swing.ListSelectionModel#SINGLE_SELECTION} * will be used. * * @return */ public Binding createBoundList(String selectionFormProperty, Object selectableItems, String renderedProperty, Integer forceSelectMode) { final Map context = new HashMap(); if (forceSelectMode != null) { context.put(ListBinder.SELECTION_MODE_KEY, forceSelectMode); } context.put(ListBinder.SELECTABLE_ITEMS_KEY, selectableItems); if (renderedProperty != null) { context.put(ListBinder.RENDERER_KEY, new BeanPropertyValueListRenderer(renderedProperty)); context.put(ListBinder.COMPARATOR_KEY, new PropertyComparator(renderedProperty, true, true)); } return createBinding(JList.class, selectionFormProperty, context); } /** * Binds the values specified in the collection contained within * <code>selectableItemsHolder</code> to a {@link ShuttleList}, with any * user selection being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered by looking up a property on the item by the name contained in * <code>renderedProperty</code>, retrieving the value of the property, * and rendering that value in the UI. * <p> * Note that the selection in the bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> is * not empty when the list is bound, then its content will be used for the * initial selection. * * @param selectionFormProperty form property to hold user's selection. This * property must be a <code>Collection</code> or array type. * @param selectableItemsHolder <code>ValueModel</code> containing the * items with which to populate the list. * @param renderedProperty the property to be queried for each item in the * list, the result of which will be used to render that item in the * UI. May be null, in which case the selectable items will be * rendered as strings. * @return constructed {@link Binding}. Note that the bound control is of * type {@link ShuttleList}. Access this component to set specific * display properties. */ public Binding createBoundShuttleList( String selectionFormProperty, ValueModel selectableItemsHolder, String renderedProperty ) { Map context = ShuttleListBinder.createBindingContext(getFormModel(), selectionFormProperty, selectableItemsHolder, renderedProperty); return createBinding(ShuttleList.class, selectionFormProperty, context); } /** * Binds the values specified in the collection contained within * <code>selectableItems</code> (which will be wrapped in a * {@link ValueHolder} to a {@link ShuttleList}, with any user selection * being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered by looking up a property on the item by the name contained in * <code>renderedProperty</code>, retrieving the value of the property, * and rendering that value in the UI. * <p> * Note that the selection in the bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> is * not empty when the list is bound, then its content will be used for the * initial selection. * * @param selectionFormProperty form property to hold user's selection. This * property must be a <code>Collection</code> or array type. * @param selectableItems Collection or array containing the items with * which to populate the selectable list (this object will be wrapped * in a ValueHolder). * @param renderedProperty the property to be queried for each item in the * list, the result of which will be used to render that item in the * UI. May be null, in which case the selectable items will be * rendered as strings. * @return constructed {@link Binding}. Note that the bound control is of * type {@link ShuttleList}. Access this component to set specific * display properties. */ public Binding createBoundShuttleList( String selectionFormProperty, Object selectableItems, String renderedProperty ) { return createBoundShuttleList(selectionFormProperty, new ValueHolder(selectableItems), renderedProperty); } /** * Binds the values specified in the collection contained within * <code>selectableItems</code> (which will be wrapped in a * {@link ValueHolder} to a {@link ShuttleList}, with any user selection * being placed in the form property referred to by * <code>selectionFormProperty</code>. Each item in the list will be * rendered as a String. * <p> * Note that the selection in the bound list will track any changes to the * <code>selectionFormProperty</code>. This is especially useful to * preselect items in the list - if <code>selectionFormProperty</code> is * not empty when the list is bound, then its content will be used for the * initial selection. * * @param selectionFormProperty form property to hold user's selection. This * property must be a <code>Collection</code> or array type. * @param selectableItems Collection or array containing the items with * which to populate the selectable list (this object will be wrapped * in a ValueHolder). * @return constructed {@link Binding}. Note that the bound control is of * type {@link ShuttleList}. Access this component to set specific * display properties. */ public Binding createBoundShuttleList( String selectionFormProperty, Object selectableItems ) { return createBoundShuttleList(selectionFormProperty, new ValueHolder(selectableItems), null); } /** * @see #createBinding(String, String, Map) */ public Binding createBinding(String propertyPath, String binderId) { return this.createBinding(propertyPath, binderId, Collections.EMPTY_MAP); } /** * Create a binding based on a specific binder id. * * @param propertyPath Path to property * @param binderId Id of the binder * @param context Context data (can be empty map) * @return Specific binding */ public Binding createBinding(String propertyPath, String binderId, Map context) { Assert.notNull(context, "Context must not be null"); Binder binder = ((SwingBinderSelectionStrategy)getBinderSelectionStrategy()).getIdBoundBinder(binderId); Binding binding = binder.bind(getFormModel(), propertyPath, context); interceptBinding(binding); return binding; } protected static class BeanPropertyEditorClosure implements Closure { private final String renderedProperty; public BeanPropertyEditorClosure(String renderedProperty) { this.renderedProperty = renderedProperty; } public Object call(Object argument) { Assert.isInstanceOf(ComboBoxEditor.class, argument); return new BeanPropertyValueComboBoxEditor((ComboBoxEditor) argument, renderedProperty); } String getRenderedProperty() { return renderedProperty; } } }
package ezvcard.io.json; import static ezvcard.util.IOUtils.utf8Writer; import java.io.File; import java.io.Flushable; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; import java.util.List; import ezvcard.VCard; import ezvcard.VCardDataType; import ezvcard.VCardVersion; import ezvcard.io.EmbeddedVCardException; import ezvcard.io.SkipMeException; import ezvcard.io.StreamWriter; import ezvcard.io.scribe.VCardPropertyScribe; import ezvcard.parameter.VCardParameters; import ezvcard.property.VCardProperty; /* Copyright (c) 2012-2015, Michael Angstadt All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project. */ /** * <p> * Writes {@link VCard} objects to a JSON data stream (jCard format). * </p> * <p> * <b>Example:</b> * * <pre class="brush:java"> * VCard vcard1 = ... * VCard vcard2 = ... * File file = new File("vcard.json"); * JCardWriter writer = null; * try { * writer = new JCardWriter(file); * writer.write(vcard1); * writer.write(vcard2); * } finally { * if (writer != null) writer.close(); * } * </pre> * * </p> * @author Michael Angstadt * @see <a href="http://tools.ietf.org/html/rfc7095">RFC 7095</a> */ public class JCardWriter extends StreamWriter implements Flushable { private final JCardRawWriter writer; private final VCardVersion targetVersion = VCardVersion.V4_0; /** * @param out the output stream to write to (UTF-8 encoding will be used) */ public JCardWriter(OutputStream out) { this(utf8Writer(out)); } /** * @param out the output stream to write to (UTF-8 encoding will be used) * @param wrapInArray true to enclose all written vCards in a JSON array, * false not to */ public JCardWriter(OutputStream out, boolean wrapInArray) { this(utf8Writer(out), wrapInArray); } /** * @param file the file to write to (UTF-8 encoding will be used) * @throws IOException if there's a problem opening the file */ public JCardWriter(File file) throws IOException { this(utf8Writer(file)); } /** * @param file the file to write to (UTF-8 encoding will be used) * @param wrapInArray true to enclose all written vCards in a JSON array, * false not to * @throws IOException if there's a problem opening the file */ public JCardWriter(File file, boolean wrapInArray) throws IOException { this(utf8Writer(file), wrapInArray); } /** * @param writer the writer to write to */ public JCardWriter(Writer writer) { this(writer, false); } /** * @param writer the writer to write to * @param wrapInArray true to enclose all written vCards in a JSON array, * false not to */ public JCardWriter(Writer writer, boolean wrapInArray) { this.writer = new JCardRawWriter(writer, wrapInArray); } /** * Writes a vCard to the stream. * @param vcard the vCard that is being written * @param properties the properties to write * @throws IOException if there's a problem writing to the output stream * @throws IllegalArgumentException if a scribe hasn't been registered for a * custom property class (see: {@link #registerScribe registerScribe}) */ @Override @SuppressWarnings({ "rawtypes", "unchecked" }) protected void _write(VCard vcard, List<VCardProperty> properties) throws IOException { writer.writeStartVCard(); writer.writeProperty("version", VCardDataType.TEXT, JCardValue.single(targetVersion.getVersion())); for (VCardProperty property : properties) { VCardPropertyScribe scribe = index.getPropertyScribe(property); //marshal the value JCardValue value; try { value = scribe.writeJson(property); } catch (SkipMeException e) { //property has requested not to be written continue; } catch (EmbeddedVCardException e) { //don't write because jCard does not support embedded vCards continue; } String group = property.getGroup(); String name = scribe.getPropertyName().toLowerCase(); VCardParameters parameters = scribe.prepareParameters(property, targetVersion, vcard); VCardDataType dataType = scribe.dataType(property, targetVersion); writer.writeProperty(group, name, parameters, dataType, value); } writer.writeEndVCard(); } @Override protected VCardVersion getTargetVersion() { return targetVersion; } /** * Gets whether or not the JSON will be pretty-printed. * @return true if it will be pretty-printed, false if not (defaults to * false) */ public boolean isIndent() { return writer.isIndent(); } /** * Sets whether or not to pretty-print the JSON. * @param indent true to pretty-print it, false not to (defaults to false) */ public void setIndent(boolean indent) { writer.setIndent(indent); } /** * Flushes the jCard data stream. * @throws IOException if there's a problem flushing the stream */ public void flush() throws IOException { writer.flush(); } /** * Ends the jCard data stream, but does not close the underlying writer. * @throws IOException if there's a problem closing the stream */ public void closeJsonStream() throws IOException { writer.closeJsonStream(); } /** * Ends the jCard data stream and closes the underlying writer. * @throws IOException if there's a problem closing the stream */ public void close() throws IOException { writer.close(); } }
package com.raweng.rawchat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Vector; import javax.microedition.rms.RecordStore; import javax.microedition.rms.RecordStoreException; import javax.microedition.rms.RecordStoreFullException; import javax.microedition.rms.RecordStoreNotFoundException; public class AppSavedData { public static BChat bchat; public static boolean autoRetry = true; public static int retryDelay = 10; public static int retryLimit = 10; public static int fontSize = 18; public static void resetData() { autoRetry = true; retryDelay = 10; retryLimit = 10; fontSize = 18; // clean login info destroyUserInfo(); //set default options saveOptions(); } public static void saveOptions() { try { RecordStore store = RecordStore.openRecordStore("options", true); int numRecord = store.getNumRecords(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream os = new DataOutputStream(baos); os.writeBoolean(autoRetry); os.writeInt(retryDelay); os.writeInt(retryLimit); os.writeInt(fontSize); byte[] data = baos.toByteArray(); if (numRecord == 0) { store.addRecord(data, 0, data.length); } else { store.setRecord(1, data, 0, data.length); } store.closeRecordStore(); } catch (RecordStoreFullException e) { e.printStackTrace(); } catch (RecordStoreNotFoundException e) { e.printStackTrace(); } catch (RecordStoreException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static void readOptions() { try { RecordStore store = RecordStore.openRecordStore("options", true); int numRecord = store.getNumRecords(); if (numRecord > 0) { byte[] data = store.getRecord(1); DataInputStream is = new DataInputStream(new ByteArrayInputStream(data)); autoRetry = is.readBoolean();//os.writeBoolean(autoRetry); retryDelay = is.readInt();//os.writeInt(retryDelay); retryLimit = is.readInt();//os.writeInt(retryLimit); fontSize = is.readInt();//os.writeInt(fontSize); } store.closeRecordStore(); } catch (RecordStoreFullException e) { e.printStackTrace(); } catch (RecordStoreNotFoundException e) { e.printStackTrace(); } catch (RecordStoreException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static void setUserInfo(String username, String password, String server, String port, boolean isBosh, String boshUrl, boolean usessl, int networkType) { RecordStore store = null; int numRecord = -1; try { store = RecordStore.openRecordStore("userinfo", true); numRecord = store.getNumRecords(); // convert user info into byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream os = new DataOutputStream(baos); os.writeBoolean(true); os.writeUTF(username); os.writeUTF(password); os.writeInt(networkType); os.writeUTF(server); os.writeUTF(port); os.writeBoolean(isBosh); os.writeUTF(boshUrl); os.writeBoolean(usessl); os.close(); // check if the store is empty if (numRecord == 0) { byte[] data = baos.toByteArray(); store.addRecord(data, 0, data.length); } else { byte[] data = baos.toByteArray(); store.setRecord(1, data, 0, data.length); } store.closeRecordStore(); } catch (RecordStoreFullException e) { e.printStackTrace(); } catch (RecordStoreNotFoundException e) { e.printStackTrace(); } catch (RecordStoreException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static Vector getUserInfo() { RecordStore store = null; int numRecord = -1; try { store = RecordStore.openRecordStore("userinfo", true); numRecord = store.getNumRecords(); // empty recordstore if (numRecord == 0) return null; byte[] data = store.getRecord(1); DataInputStream is = new DataInputStream(new ByteArrayInputStream(data)); boolean saved = is.readBoolean(); if (saved) { String username = is.readUTF(); String password = is.readUTF(); String networkType = String.valueOf(is.readInt()); ServerModel serverDef = new ServerModel(); Vector v = new Vector(); v.addElement(username); v.addElement(password); v.addElement(networkType); serverDef.server = is.readUTF(); serverDef.port = is.readUTF(); serverDef.useBosh = is.readBoolean(); serverDef.boshUrl = is.readUTF(); serverDef.usessl = is.readBoolean(); v.addElement(serverDef); store.closeRecordStore(); return v; } else { store.closeRecordStore(); return null; } } catch (RecordStoreFullException e) { e.printStackTrace(); } catch (RecordStoreNotFoundException e) { e.printStackTrace(); } catch (RecordStoreException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; } public static void destroyUserInfo() { RecordStore store = null; int numRecord = -1; try { store = RecordStore.openRecordStore("userinfo", true); numRecord = store.getNumRecords(); if (numRecord != 0) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream os = new DataOutputStream(baos); os.writeBoolean(false); os.close(); byte[] data = baos.toByteArray(); store.setRecord(1, data, 0, data.length); store.closeRecordStore(); } } catch (RecordStoreFullException e) { e.printStackTrace(); } catch (RecordStoreNotFoundException e) { e.printStackTrace(); } catch (RecordStoreException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
/** */ package com.github.lbroudoux.dsl.eip.presentation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.StringTokenizer; import org.eclipse.emf.common.CommonPlugin; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.xmi.XMLResource; import org.eclipse.emf.edit.ui.provider.ExtendedImageRegistry; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.Wizard; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.eclipse.ui.dialogs.WizardNewFileCreationPage; import org.eclipse.ui.part.FileEditorInput; import org.eclipse.ui.part.ISetSelectionTarget; import com.github.lbroudoux.dsl.eip.EipFactory; import com.github.lbroudoux.dsl.eip.EipPackage; import com.github.lbroudoux.dsl.eip.provider.EipEditPlugin; import org.eclipse.core.runtime.Path; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchPart; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PartInitException; /** * This is a simple wizard for creating a new model file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class EipModelWizard extends Wizard implements INewWizard { /** * The supported extensions for created files. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final List<String> FILE_EXTENSIONS = Collections.unmodifiableList(Arrays.asList(EipEditorPlugin.INSTANCE.getString("_UI_EipEditorFilenameExtensions").split("\\s*,\\s*"))); /** * A formatted list of supported file extensions, suitable for display. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final String FORMATTED_FILE_EXTENSIONS = EipEditorPlugin.INSTANCE.getString("_UI_EipEditorFilenameExtensions").replaceAll("\\s*,\\s*", ", "); /** * This caches an instance of the model package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EipPackage eipPackage = EipPackage.eINSTANCE; /** * This caches an instance of the model factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EipFactory eipFactory = eipPackage.getEipFactory(); /** * This is the file creation page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EipModelWizardNewFileCreationPage newFileCreationPage; /** * This is the initial object creation page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EipModelWizardInitialObjectCreationPage initialObjectCreationPage; /** * Remember the selection during initialization for populating the default container. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IStructuredSelection selection; /** * Remember the workbench during initialization. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IWorkbench workbench; /** * Caches the names of the types that can be created as the root object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected List<String> initialObjectNames; /** * This just records the information. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void init(IWorkbench workbench, IStructuredSelection selection) { this.workbench = workbench; this.selection = selection; setWindowTitle(EipEditorPlugin.INSTANCE.getString("_UI_Wizard_label")); setDefaultPageImageDescriptor(ExtendedImageRegistry.INSTANCE.getImageDescriptor(EipEditorPlugin.INSTANCE.getImage("full/wizban/NewEip"))); } /** * Returns the names of the types that can be created as the root object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<String> getInitialObjectNames() { if (initialObjectNames == null) { initialObjectNames = new ArrayList<String>(); for (EClassifier eClassifier : eipPackage.getEClassifiers()) { if (eClassifier instanceof EClass) { EClass eClass = (EClass)eClassifier; if (!eClass.isAbstract()) { initialObjectNames.add(eClass.getName()); } } } Collections.sort(initialObjectNames, CommonPlugin.INSTANCE.getComparator()); } return initialObjectNames; } /** * Create a new model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EObject createInitialModel() { EClass eClass = (EClass)eipPackage.getEClassifier(initialObjectCreationPage.getInitialObjectName()); EObject rootObject = eipFactory.create(eClass); return rootObject; } /** * Do the work after everything is specified. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean performFinish() { try { // Remember the file. // final IFile modelFile = getModelFile(); // Do the work within an operation. // WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor progressMonitor) { try { // Create a resource set // ResourceSet resourceSet = new ResourceSetImpl(); // Get the URI of the model file. // URI fileURI = URI.createPlatformResourceURI(modelFile.getFullPath().toString(), true); // Create a resource for this file. // Resource resource = resourceSet.createResource(fileURI); // Add the initial model object to the contents. // EObject rootObject = createInitialModel(); if (rootObject != null) { resource.getContents().add(rootObject); } // Save the contents of the resource to the file system. // Map<Object, Object> options = new HashMap<Object, Object>(); options.put(XMLResource.OPTION_ENCODING, initialObjectCreationPage.getEncoding()); resource.save(options); } catch (Exception exception) { EipEditorPlugin.INSTANCE.log(exception); } finally { progressMonitor.done(); } } }; getContainer().run(false, false, operation); // Select the new file resource in the current view. // IWorkbenchWindow workbenchWindow = workbench.getActiveWorkbenchWindow(); IWorkbenchPage page = workbenchWindow.getActivePage(); final IWorkbenchPart activePart = page.getActivePart(); if (activePart instanceof ISetSelectionTarget) { final ISelection targetSelection = new StructuredSelection(modelFile); getShell().getDisplay().asyncExec (new Runnable() { public void run() { ((ISetSelectionTarget)activePart).selectReveal(targetSelection); } }); } // Open an editor on the new file. // try { page.openEditor (new FileEditorInput(modelFile), workbench.getEditorRegistry().getDefaultEditor(modelFile.getFullPath().toString()).getId()); } catch (PartInitException exception) { MessageDialog.openError(workbenchWindow.getShell(), EipEditorPlugin.INSTANCE.getString("_UI_OpenEditorError_label"), exception.getMessage()); return false; } return true; } catch (Exception exception) { EipEditorPlugin.INSTANCE.log(exception); return false; } } /** * This is the one page of the wizard. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class EipModelWizardNewFileCreationPage extends WizardNewFileCreationPage { /** * Pass in the selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EipModelWizardNewFileCreationPage(String pageId, IStructuredSelection selection) { super(pageId, selection); } /** * The framework calls this to see if the file is correct. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected boolean validatePage() { if (super.validatePage()) { String extension = new Path(getFileName()).getFileExtension(); if (extension == null || !FILE_EXTENSIONS.contains(extension)) { String key = FILE_EXTENSIONS.size() > 1 ? "_WARN_FilenameExtensions" : "_WARN_FilenameExtension"; setErrorMessage(EipEditorPlugin.INSTANCE.getString(key, new Object [] { FORMATTED_FILE_EXTENSIONS })); return false; } return true; } return false; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IFile getModelFile() { return ResourcesPlugin.getWorkspace().getRoot().getFile(getContainerFullPath().append(getFileName())); } } /** * This is the page where the type of object to create is selected. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class EipModelWizardInitialObjectCreationPage extends WizardPage { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Combo initialObjectField; /** * @generated * <!-- begin-user-doc --> * <!-- end-user-doc --> */ protected List<String> encodings; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Combo encodingField; /** * Pass in the selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EipModelWizardInitialObjectCreationPage(String pageId) { super(pageId); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void createControl(Composite parent) { Composite composite = new Composite(parent, SWT.NONE); { GridLayout layout = new GridLayout(); layout.numColumns = 1; layout.verticalSpacing = 12; composite.setLayout(layout); GridData data = new GridData(); data.verticalAlignment = GridData.FILL; data.grabExcessVerticalSpace = true; data.horizontalAlignment = GridData.FILL; composite.setLayoutData(data); } Label containerLabel = new Label(composite, SWT.LEFT); { containerLabel.setText(EipEditorPlugin.INSTANCE.getString("_UI_ModelObject")); GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; containerLabel.setLayoutData(data); } initialObjectField = new Combo(composite, SWT.BORDER); { GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; data.grabExcessHorizontalSpace = true; initialObjectField.setLayoutData(data); } for (String objectName : getInitialObjectNames()) { initialObjectField.add(getLabel(objectName)); } if (initialObjectField.getItemCount() == 1) { initialObjectField.select(0); } initialObjectField.addModifyListener(validator); Label encodingLabel = new Label(composite, SWT.LEFT); { encodingLabel.setText(EipEditorPlugin.INSTANCE.getString("_UI_XMLEncoding")); GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; encodingLabel.setLayoutData(data); } encodingField = new Combo(composite, SWT.BORDER); { GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; data.grabExcessHorizontalSpace = true; encodingField.setLayoutData(data); } for (String encoding : getEncodings()) { encodingField.add(encoding); } encodingField.select(0); encodingField.addModifyListener(validator); setPageComplete(validatePage()); setControl(composite); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ModifyListener validator = new ModifyListener() { public void modifyText(ModifyEvent e) { setPageComplete(validatePage()); } }; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean validatePage() { return getInitialObjectName() != null && getEncodings().contains(encodingField.getText()); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setVisible(boolean visible) { super.setVisible(visible); if (visible) { if (initialObjectField.getItemCount() == 1) { initialObjectField.clearSelection(); encodingField.setFocus(); } else { encodingField.clearSelection(); initialObjectField.setFocus(); } } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getInitialObjectName() { String label = initialObjectField.getText(); for (String name : getInitialObjectNames()) { if (getLabel(name).equals(label)) { return name; } } return null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getEncoding() { return encodingField.getText(); } /** * Returns the label for the specified type name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected String getLabel(String typeName) { try { return EipEditPlugin.INSTANCE.getString("_UI_" + typeName + "_type"); } catch(MissingResourceException mre) { EipEditorPlugin.INSTANCE.log(mre); } return typeName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<String> getEncodings() { if (encodings == null) { encodings = new ArrayList<String>(); for (StringTokenizer stringTokenizer = new StringTokenizer(EipEditorPlugin.INSTANCE.getString("_UI_XMLEncodingChoices")); stringTokenizer.hasMoreTokens(); ) { encodings.add(stringTokenizer.nextToken()); } } return encodings; } } /** * The framework calls this to create the contents of the wizard. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void addPages() { // Create a page, set the title, and the initial model file name. // newFileCreationPage = new EipModelWizardNewFileCreationPage("Whatever", selection); newFileCreationPage.setTitle(EipEditorPlugin.INSTANCE.getString("_UI_EipModelWizard_label")); newFileCreationPage.setDescription(EipEditorPlugin.INSTANCE.getString("_UI_EipModelWizard_description")); newFileCreationPage.setFileName(EipEditorPlugin.INSTANCE.getString("_UI_EipEditorFilenameDefaultBase") + "." + FILE_EXTENSIONS.get(0)); addPage(newFileCreationPage); // Try and get the resource selection to determine a current directory for the file dialog. // if (selection != null && !selection.isEmpty()) { // Get the resource... // Object selectedElement = selection.iterator().next(); if (selectedElement instanceof IResource) { // Get the resource parent, if its a file. // IResource selectedResource = (IResource)selectedElement; if (selectedResource.getType() == IResource.FILE) { selectedResource = selectedResource.getParent(); } // This gives us a directory... // if (selectedResource instanceof IFolder || selectedResource instanceof IProject) { // Set this for the container. // newFileCreationPage.setContainerFullPath(selectedResource.getFullPath()); // Make up a unique new name here. // String defaultModelBaseFilename = EipEditorPlugin.INSTANCE.getString("_UI_EipEditorFilenameDefaultBase"); String defaultModelFilenameExtension = FILE_EXTENSIONS.get(0); String modelFilename = defaultModelBaseFilename + "." + defaultModelFilenameExtension; for (int i = 1; ((IContainer)selectedResource).findMember(modelFilename) != null; ++i) { modelFilename = defaultModelBaseFilename + i + "." + defaultModelFilenameExtension; } newFileCreationPage.setFileName(modelFilename); } } } initialObjectCreationPage = new EipModelWizardInitialObjectCreationPage("Whatever2"); initialObjectCreationPage.setTitle(EipEditorPlugin.INSTANCE.getString("_UI_EipModelWizard_label")); initialObjectCreationPage.setDescription(EipEditorPlugin.INSTANCE.getString("_UI_Wizard_initial_object_description")); addPage(initialObjectCreationPage); } /** * Get the file from the page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IFile getModelFile() { return newFileCreationPage.getModelFile(); } }
package com.ggstudios.dialogs; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.json.JSONException; import org.json.JSONObject; import com.ggstudios.lolcraft.ChampionInfo; import com.ggstudios.lolcraft.ChampionLibrary; import com.ggstudios.lolcraft.ItemInfo; import com.ggstudios.lolcraft.ItemLibrary; import com.ggstudios.lolcraft.LibraryManager; import com.ggstudios.lolcraft.LibraryUtils; import com.ggstudios.lolcraft.LibraryUtils.OnItemLoadListener; import com.ggstudios.lolcraft.R; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.graphics.drawable.Drawable; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.text.Editable; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.Window; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageView; import android.widget.ListAdapter; import android.widget.ProgressBar; import android.widget.TextView; import timber.log.Timber; public class ItemPickerDialogFragment extends DialogFragment { public static final String EXTRA_CHAMPION_ID = "champId"; public static final String EXTRA_MAP_ID = "mapId"; private GridView content; private List<ItemInfo> items; private EditText searchField; private int champId = -1; private int mapId = -1; private List<String> filterTags = new ArrayList<String>(); private Map<String, CheckBox> tagToCheckBox = new HashMap<String, CheckBox>(); private ProgressBar pbar; @Override public Dialog onCreateDialog(Bundle savedInstanceState) { Dialog dialog = super.onCreateDialog(savedInstanceState); if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB) { setStyle(DialogFragment.STYLE_NO_TITLE, android.R.style.Theme_Dialog); dialog.getWindow().setBackgroundDrawableResource(R.drawable.dialog_full_holo_light); } else { setStyle(DialogFragment.STYLE_NO_TITLE, android.R.style.Theme_Holo_Light_Dialog); } return dialog; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.dialog_fragment_item_picker, container, false); pbar = (ProgressBar) rootView.findViewById(R.id.pbar); content = (GridView) rootView.findViewById(R.id.itemGrid); items = LibraryManager.getInstance() .getItemLibrary().getPurchasableItemInfo(); searchField = (EditText) rootView.findViewById(R.id.searchField); searchField.addTextChangedListener(new TextWatcher(){ public void afterTextChanged(Editable s) { String str = s.toString(); ListAdapter adapter = content.getAdapter(); if (adapter != null) { ((ItemInfoAdapter) adapter).filter(str); } } public void beforeTextChanged(CharSequence s, int start, int count, int after){} public void onTextChanged(CharSequence s, int start, int before, int count){} }); Bundle args = getArguments(); if (args != null) { champId = args.getInt(EXTRA_CHAMPION_ID, -1); mapId = args.getInt(EXTRA_MAP_ID, -1); Timber.d("MapId: " + mapId); } if (items == null) { initializeItemInfo(); } else { pbar.setVisibility(View.GONE); filterAndShowItems(); } content.setOnItemLongClickListener(new OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) { ItemInfo info = ((ItemInfo) parent.getItemAtPosition(position)); Timber.d(info.id + ": " + info.rawJson.toString()); ItemDetailDialogFragment frag = ItemDetailDialogFragment.newInstance(info); frag.show(getFragmentManager(), "dialog"); return true; } }); content.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { ((ItemPickerDialogListener)getActivity()).onItemPicked((ItemInfo) parent.getItemAtPosition(position)); dismiss(); } }); final Map<CheckBox, String> checkBoxToTag = new HashMap<CheckBox, String>(); final Button btnClearFilter = (Button) rootView.findViewById(R.id.btnClearFilter); final CheckBox btnConsumables = (CheckBox) rootView.findViewById(R.id.btnConsumables); final CheckBox cbHp = (CheckBox) rootView.findViewById(R.id.cbHp); final CheckBox cbAr = (CheckBox) rootView.findViewById(R.id.cbAr); final CheckBox cbMr = (CheckBox) rootView.findViewById(R.id.cbMr); final CheckBox cbTenacity = (CheckBox) rootView.findViewById(R.id.cbTenacity); final CheckBox cbAd = (CheckBox) rootView.findViewById(R.id.cbAd); final CheckBox cbCrit = (CheckBox) rootView.findViewById(R.id.cbCrit); final CheckBox cbAs = (CheckBox) rootView.findViewById(R.id.cbAs); final CheckBox cbLs = (CheckBox) rootView.findViewById(R.id.cbLs); final CheckBox cbAp = (CheckBox) rootView.findViewById(R.id.cbAp); final CheckBox cbCdr = (CheckBox) rootView.findViewById(R.id.cbCdr); final CheckBox cbSpellVamp = (CheckBox) rootView.findViewById(R.id.cbSpellVamp); final CheckBox cbMana = (CheckBox) rootView.findViewById(R.id.cbMana); final CheckBox cbManaRegen = (CheckBox) rootView.findViewById(R.id.cbManaRegen); final CheckBox cbBoots = (CheckBox) rootView.findViewById(R.id.cbBoots); final CheckBox cbOtherMovement = (CheckBox) rootView.findViewById(R.id.cbOtherMovement); checkBoxToTag.put(btnConsumables, "Consumable"); checkBoxToTag.put(cbHp, "Health"); checkBoxToTag.put(cbAr, "Armor"); checkBoxToTag.put(cbMr, "SpellBlock"); checkBoxToTag.put(cbTenacity, "Tenacity"); checkBoxToTag.put(cbAd, "Damage"); checkBoxToTag.put(cbCrit, "CriticalStrike"); checkBoxToTag.put(cbAs, "AttackSpeed"); checkBoxToTag.put(cbLs, "LifeSteal"); checkBoxToTag.put(cbAp, "SpellDamage"); checkBoxToTag.put(cbCdr, "CooldownReduction"); checkBoxToTag.put(cbSpellVamp, "SpellVamp"); checkBoxToTag.put(cbMana, "Mana"); checkBoxToTag.put(cbManaRegen, "ManaRegen"); checkBoxToTag.put(cbBoots, "Boots"); checkBoxToTag.put(cbOtherMovement, "NonbootsMovement"); tagToCheckBox.put("Consumable", btnConsumables); tagToCheckBox.put("Health", cbHp); tagToCheckBox.put("Armor", cbAr); tagToCheckBox.put("SpellBlock", cbMr); tagToCheckBox.put("Tenacity", cbTenacity); tagToCheckBox.put("Damage", cbAd); tagToCheckBox.put("CriticalStrike", cbCrit); tagToCheckBox.put("AttackSpeed", cbAs); tagToCheckBox.put("LifeSteal", cbLs); tagToCheckBox.put("SpellDamage", cbAp); tagToCheckBox.put("CooldownReduction", cbCdr); tagToCheckBox.put("SpellVamp", cbSpellVamp); tagToCheckBox.put("Mana", cbMana); tagToCheckBox.put("ManaRegen", cbManaRegen); tagToCheckBox.put("Boots", cbBoots); tagToCheckBox.put("NonbootsMovement", cbOtherMovement); btnClearFilter.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { for (Entry<String, CheckBox> entry : tagToCheckBox.entrySet()) { entry.getValue().setChecked(false); } } }); OnCheckedChangeListener listener = new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { filterTags.add(checkBoxToTag.get(buttonView)); } else { filterTags.remove(checkBoxToTag.get(buttonView)); } ListAdapter adapter = content.getAdapter(); if (adapter != null) { ((ItemInfoAdapter) adapter).filter(filterTags); } } }; btnConsumables.setOnCheckedChangeListener(listener); cbHp.setOnCheckedChangeListener(listener); cbAr.setOnCheckedChangeListener(listener); cbMr.setOnCheckedChangeListener(listener); cbTenacity.setOnCheckedChangeListener(listener); cbAd.setOnCheckedChangeListener(listener); cbCrit.setOnCheckedChangeListener(listener); cbAs.setOnCheckedChangeListener(listener); cbLs.setOnCheckedChangeListener(listener); cbAp.setOnCheckedChangeListener(listener); cbCdr.setOnCheckedChangeListener(listener); cbSpellVamp.setOnCheckedChangeListener(listener); cbMana.setOnCheckedChangeListener(listener); cbManaRegen.setOnCheckedChangeListener(listener); cbBoots.setOnCheckedChangeListener(listener); cbOtherMovement.setOnCheckedChangeListener(listener); return rootView; } @Override public void onAttach(Activity activity) { super.onAttach(activity); Activity act = getActivity(); if (!(act instanceof ItemPickerDialogListener)) { throw new ClassCastException(act.getClass() + " must implement ItemPickerDialogListener"); } } private void filterAndShowItems() { ChampionLibrary champLib = LibraryManager.getInstance().getChampionLibrary(); ChampionInfo info = champLib.getChampionInfo(champId); items = new ArrayList<ItemInfo>(); List<ItemInfo> fullList = LibraryManager.getInstance() .getItemLibrary().getPurchasableItemInfo(); for (ItemInfo i : fullList) { if (i.notOnMap != null) { if (i.notOnMap.contains(mapId)) { continue; } } if (i.requiredChamp != null) { if (champLib.getChampionInfo(i.requiredChamp) == info) { items.add(i); } } else { items.add(i); } } ItemInfoAdapter adapter = new ItemInfoAdapter(getActivity(), items); adapter.setOnItemFilterListener(new OnItemFilterListener() { @Override public void onItemFiltered(ItemInfoAdapter adapter) { Set<String> tags = adapter.getAvailableTags(); for (Entry<String, CheckBox> entry : tagToCheckBox.entrySet()) { if (tags.contains(entry.getKey())) { entry.getValue().setEnabled(true); } else { entry.getValue().setEnabled(false); } } } }); adapter.filter(filterTags); content.setAdapter(adapter); } private void initializeItemInfo() { new AsyncTask<Void, ItemInfo, Void>() { @Override protected Void doInBackground(Void... params) { Activity act = getActivity(); if (act != null) { try { LibraryUtils.getAllItemInfo(act, new OnItemLoadListener() { @Override public void onStartLoadPortrait(final List<ItemInfo> items) { final ItemLibrary itemLib = LibraryManager.getInstance().getItemLibrary(); itemLib.initialize(items); content.post(new Runnable() { @Override public void run() { filterAndShowItems(); } }); } @Override public void onPortraitLoad(int position, ItemInfo info) { publishProgress(info); } @Override public void onCompleteLoadPortrait(List<ItemInfo> items) { } }); } catch (IOException e) { Timber.e("", e); } catch (JSONException e) { Timber.e("", e); } } return null; } protected void onProgressUpdate(ItemInfo... progress) { if (pbar.getVisibility() != View.GONE) { pbar.setVisibility(View.GONE); } ItemInfo info = progress[0]; int start = content.getFirstVisiblePosition(); for(int i = start, j = content.getLastVisiblePosition(); i <= j; i++) { if(info == content.getItemAtPosition(i)){ View view = content.getChildAt(i - start); content.getAdapter().getView(i, view, content); break; } } } }.execute(); } private class ViewHolder { ImageView icon; TextView gold; } public class ItemInfoAdapter extends BaseAdapter { private Context context; private List<ItemInfo> itemInfoAll; private List<ItemInfo> itemInfo; private List<ItemInfo> filtered = new ArrayList<ItemInfo>(); private LayoutInflater inflater; private List<String> tags; private Drawable placeHolder; private String lastQuery; private Set<String> availableTags = new HashSet<String>(); private OnItemFilterListener listener; public ItemInfoAdapter(Context c, List<ItemInfo> Items) { context = c; itemInfoAll = Items; itemInfo = Items; inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); } public void filter(String s) { if (s == null || s.length() == 0) { itemInfo = itemInfoAll; } else { List<ItemInfo> last = itemInfo; itemInfo = new ArrayList<ItemInfo>(); s = s.toLowerCase(Locale.US); if (lastQuery != null && s.startsWith(lastQuery)) { for (ItemInfo i : last) { if (i.lowerName.contains(s) || i.colloq.contains(s)) { itemInfo.add(i); } } } else { for (ItemInfo i : itemInfoAll) { if (i.lowerName.contains(s) || i.colloq.contains(s)) { itemInfo.add(i); } } } } lastQuery = s; filter(tags); } public void filter(List<String> tags) { this.tags = tags; filtered.clear(); availableTags.clear(); for (ItemInfo item : itemInfo) { if (tags.size() == 0) { filtered.add(item); } else { boolean add = true; for (String tag : tags) { if (!item.tags.contains(tag)) { add = false; break; } } if (add) { filtered.add(item); } } } for (ItemInfo item : filtered) { for (String tag : item.tags) { if (!availableTags.contains(tag)) { availableTags.add(tag); } } } if (listener != null) { listener.onItemFiltered(this); } notifyDataSetChanged(); } public void setOnItemFilterListener(OnItemFilterListener listener) { this.listener = listener; } public Set<String> getAvailableTags() { return availableTags; } public int getCount() { return filtered.size(); } public Object getItem(int position) { return filtered.get(position); } public long getItemId(int position) { return 0; } // create a new ImageView for each item referenced by the Adapter public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { // if it's not recycled, initialize some attributes holder = new ViewHolder(); convertView = inflater.inflate(R.layout.item_item_info, parent, false); holder.icon = (ImageView) convertView.findViewById(R.id.icon); holder.gold = (TextView) convertView.findViewById(R.id.gold); placeHolder = holder.icon.getDrawable(); convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } ItemInfo info = filtered.get(position); holder.gold.setText("" + info.totalGold); if (info.icon != null) { holder.icon.setImageDrawable(info.icon); } else { holder.icon.setImageDrawable(placeHolder); } return convertView; } } public interface ItemPickerDialogListener { public void onItemPicked(ItemInfo item); } public interface OnItemFilterListener { public void onItemFiltered(ItemInfoAdapter adapter); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.tools.ant.taskdefs.optional.i18n; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.Hashtable; import java.util.Locale; import java.util.Vector; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.MatchingTask; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.util.FileUtils; import org.apache.tools.ant.util.LineTokenizer; /** * Translates text embedded in files using Resource Bundle files. * Since ant 1.6 preserves line endings * */ public class Translate extends MatchingTask { /** * search a bundle matching the specified language, the country and the variant */ private static final int BUNDLE_SPECIFIED_LANGUAGE_COUNTRY_VARIANT = 0; /** * search a bundle matching the specified language, and the country */ private static final int BUNDLE_SPECIFIED_LANGUAGE_COUNTRY = 1; /** * search a bundle matching the specified language only */ private static final int BUNDLE_SPECIFIED_LANGUAGE = 2; /** * search a bundle matching nothing special */ private static final int BUNDLE_NOMATCH = 3; /** * search a bundle matching the language, the country and the variant * of the current locale of the computer */ private static final int BUNDLE_DEFAULT_LANGUAGE_COUNTRY_VARIANT = 4; /** * search a bundle matching the language, and the country * of the current locale of the computer */ private static final int BUNDLE_DEFAULT_LANGUAGE_COUNTRY = 5; /** * search a bundle matching the language only * of the current locale of the computer */ private static final int BUNDLE_DEFAULT_LANGUAGE = 6; /** * number of possibilities for the search */ private static final int BUNDLE_MAX_ALTERNATIVES = BUNDLE_DEFAULT_LANGUAGE + 1; /** * Family name of resource bundle */ private String bundle; /** * Locale specific language of the resource bundle */ private String bundleLanguage; /** * Locale specific country of the resource bundle */ private String bundleCountry; /** * Locale specific variant of the resource bundle */ private String bundleVariant; /** * Destination directory */ private File toDir; /** * Source file encoding scheme */ private String srcEncoding; /** * Destination file encoding scheme */ private String destEncoding; /** * Resource Bundle file encoding scheme, defaults to srcEncoding */ private String bundleEncoding; /** * Starting token to identify keys */ private String startToken; /** * Ending token to identify keys */ private String endToken; /** * Whether or not to create a new destination file. * Defaults to <code>false</code>. */ private boolean forceOverwrite; /** * Vector to hold source file sets. */ private Vector filesets = new Vector(); /** * Holds key value pairs loaded from resource bundle file */ private Hashtable resourceMap = new Hashtable(); /** * Used to resolve file names. */ private static final FileUtils FILE_UTILS = FileUtils.getFileUtils(); /** * Last Modified Timestamp of resource bundle file being used. */ private long[] bundleLastModified = new long[BUNDLE_MAX_ALTERNATIVES]; /** * Last Modified Timestamp of source file being used. */ private long srcLastModified; /** * Last Modified Timestamp of destination file being used. */ private long destLastModified; /** * Has at least one file from the bundle been loaded? */ private boolean loaded = false; /** * Sets Family name of resource bundle; required. * @param bundle family name of resource bundle */ public void setBundle(String bundle) { this.bundle = bundle; } /** * Sets locale specific language of resource bundle; optional. * @param bundleLanguage language of the bundle */ public void setBundleLanguage(String bundleLanguage) { this.bundleLanguage = bundleLanguage; } /** * Sets locale specific country of resource bundle; optional. * @param bundleCountry country of the bundle */ public void setBundleCountry(String bundleCountry) { this.bundleCountry = bundleCountry; } /** * Sets locale specific variant of resource bundle; optional. * @param bundleVariant locale variant of resource bundle */ public void setBundleVariant(String bundleVariant) { this.bundleVariant = bundleVariant; } /** * Sets Destination directory; required. * @param toDir destination directory */ public void setToDir(File toDir) { this.toDir = toDir; } /** * Sets starting token to identify keys; required. * @param startToken starting token to identify keys */ public void setStartToken(String startToken) { this.startToken = startToken; } /** * Sets ending token to identify keys; required. * @param endToken ending token to identify keys */ public void setEndToken(String endToken) { this.endToken = endToken; } /** * Sets source file encoding scheme; optional, * defaults to encoding of local system. * @param srcEncoding source file encoding */ public void setSrcEncoding(String srcEncoding) { this.srcEncoding = srcEncoding; } /** * Sets destination file encoding scheme; optional. Defaults to source file * encoding * @param destEncoding destination file encoding scheme */ public void setDestEncoding(String destEncoding) { this.destEncoding = destEncoding; } /** * Sets Resource Bundle file encoding scheme; optional. Defaults to source file * encoding * @param bundleEncoding bundle file encoding scheme */ public void setBundleEncoding(String bundleEncoding) { this.bundleEncoding = bundleEncoding; } /** * Whether or not to overwrite existing file irrespective of * whether it is newer than the source file as well as the * resource bundle file. * Defaults to false. * @param forceOverwrite whether or not to overwrite existing files */ public void setForceOverwrite(boolean forceOverwrite) { this.forceOverwrite = forceOverwrite; } /** * Adds a set of files to translate as a nested fileset element. * @param set the fileset to be added */ public void addFileset(FileSet set) { filesets.addElement(set); } /** * Check attributes values, load resource map and translate * @throws BuildException if the required attributes are not set * Required : <ul> * <li>bundle</li> * <li>starttoken</li> * <li>endtoken</li> * </ul> */ public void execute() throws BuildException { if (bundle == null) { throw new BuildException("The bundle attribute must be set.", getLocation()); } if (startToken == null) { throw new BuildException("The starttoken attribute must be set.", getLocation()); } if (endToken == null) { throw new BuildException("The endtoken attribute must be set.", getLocation()); } if (bundleLanguage == null) { Locale l = Locale.getDefault(); bundleLanguage = l.getLanguage(); } if (bundleCountry == null) { bundleCountry = Locale.getDefault().getCountry(); } if (bundleVariant == null) { Locale l = new Locale(bundleLanguage, bundleCountry); bundleVariant = l.getVariant(); } if (toDir == null) { throw new BuildException("The todir attribute must be set.", getLocation()); } if (!toDir.exists()) { toDir.mkdirs(); } else if (toDir.isFile()) { throw new BuildException(toDir + " is not a directory"); } if (srcEncoding == null) { srcEncoding = System.getProperty("file.encoding"); } if (destEncoding == null) { destEncoding = srcEncoding; } if (bundleEncoding == null) { bundleEncoding = srcEncoding; } loadResourceMaps(); translate(); } /** * Load resource maps based on resource bundle encoding scheme. * The resource bundle lookup searches for resource files with various * suffixes on the basis of (1) the desired locale and (2) the default * locale (basebundlename), in the following order from lower-level * (more specific) to parent-level (less specific): * * basebundlename + "_" + language1 + "_" + country1 + "_" + variant1 * basebundlename + "_" + language1 + "_" + country1 * basebundlename + "_" + language1 * basebundlename * basebundlename + "_" + language2 + "_" + country2 + "_" + variant2 * basebundlename + "_" + language2 + "_" + country2 * basebundlename + "_" + language2 * * To the generated name, a ".properties" string is appeneded and * once this file is located, it is treated just like a properties file * but with bundle encoding also considered while loading. */ private void loadResourceMaps() throws BuildException { Locale locale = new Locale(bundleLanguage, bundleCountry, bundleVariant); String language = locale.getLanguage().length() > 0 ? "_" + locale.getLanguage() : ""; String country = locale.getCountry().length() > 0 ? "_" + locale.getCountry() : ""; String variant = locale.getVariant().length() > 0 ? "_" + locale.getVariant() : ""; String bundleFile = bundle + language + country + variant; processBundle(bundleFile, BUNDLE_SPECIFIED_LANGUAGE_COUNTRY_VARIANT, false); bundleFile = bundle + language + country; processBundle(bundleFile, BUNDLE_SPECIFIED_LANGUAGE_COUNTRY, false); bundleFile = bundle + language; processBundle(bundleFile, BUNDLE_SPECIFIED_LANGUAGE, false); bundleFile = bundle; processBundle(bundleFile, BUNDLE_NOMATCH, false); //Load default locale bundle files //using default file encoding scheme. locale = Locale.getDefault(); language = locale.getLanguage().length() > 0 ? "_" + locale.getLanguage() : ""; country = locale.getCountry().length() > 0 ? "_" + locale.getCountry() : ""; variant = locale.getVariant().length() > 0 ? "_" + locale.getVariant() : ""; bundleEncoding = System.getProperty("file.encoding"); bundleFile = bundle + language + country + variant; processBundle(bundleFile, BUNDLE_DEFAULT_LANGUAGE_COUNTRY_VARIANT, false); bundleFile = bundle + language + country; processBundle(bundleFile, BUNDLE_DEFAULT_LANGUAGE_COUNTRY, false); bundleFile = bundle + language; processBundle(bundleFile, BUNDLE_DEFAULT_LANGUAGE, true); } /** * Process each file that makes up this bundle. */ private void processBundle(final String bundleFile, final int i, final boolean checkLoaded) throws BuildException { final File propsFile = getProject().resolveFile(bundleFile + ".properties"); FileInputStream ins = null; try { ins = new FileInputStream(propsFile); loaded = true; bundleLastModified[i] = propsFile.lastModified(); log("Using " + propsFile, Project.MSG_DEBUG); loadResourceMap(ins); } catch (IOException ioe) { log(propsFile + " not found.", Project.MSG_DEBUG); //if all resource files associated with this bundle //have been scanned for and still not able to //find a single resrouce file, throw exception if (!loaded && checkLoaded) { throw new BuildException(ioe.getMessage(), getLocation()); } } } /** * Load resourceMap with key value pairs. Values of existing keys * are not overwritten. Bundle's encoding scheme is used. */ private void loadResourceMap(FileInputStream ins) throws BuildException { try { BufferedReader in = null; InputStreamReader isr = new InputStreamReader(ins, bundleEncoding); in = new BufferedReader(isr); String line = null; while ((line = in.readLine()) != null) { //So long as the line isn't empty and isn't a comment... if (line.trim().length() > 1 && '#' != line.charAt(0) && '!' != line.charAt(0)) { //Legal Key-Value separators are :, = and white space. int sepIndex = line.indexOf('='); if (-1 == sepIndex) { sepIndex = line.indexOf(':'); } if (-1 == sepIndex) { for (int k = 0; k < line.length(); k++) { if (Character.isSpaceChar(line.charAt(k))) { sepIndex = k; break; } } } //Only if we do have a key is there going to be a value if (-1 != sepIndex) { String key = line.substring(0, sepIndex).trim(); String value = line.substring(sepIndex + 1).trim(); //Handle line continuations, if any while (value.endsWith("\\")) { value = value.substring(0, value.length() - 1); line = in.readLine(); if (line != null) { value = value + line.trim(); } else { break; } } if (key.length() > 0) { //Has key already been loaded into resourceMap? if (resourceMap.get(key) == null) { resourceMap.put(key, value); } } } } } if (in != null) { in.close(); } } catch (IOException ioe) { throw new BuildException(ioe.getMessage(), getLocation()); } } /** * Reads source file line by line using the source encoding and * searches for keys that are sandwiched between the startToken * and endToken. The values for these keys are looked up from * the hashtable and substituted. If the hashtable doesn't * contain the key, they key itself is used as the value. * Detination files and directories are created as needed. * The destination file is overwritten only if * the forceoverwritten attribute is set to true if * the source file or any associated bundle resource file is * newer than the destination file. */ private void translate() throws BuildException { int filesProcessed = 0; final int size = filesets.size(); for (int i = 0; i < size; i++) { FileSet fs = (FileSet) filesets.elementAt(i); DirectoryScanner ds = fs.getDirectoryScanner(getProject()); String[] srcFiles = ds.getIncludedFiles(); for (int j = 0; j < srcFiles.length; j++) { try { File dest = FILE_UTILS.resolveFile(toDir, srcFiles[j]); //Make sure parent dirs exist, else, create them. try { File destDir = new File(dest.getParent()); if (!destDir.exists()) { destDir.mkdirs(); } } catch (Exception e) { log("Exception occurred while trying to check/create " + " parent directory. " + e.getMessage(), Project.MSG_DEBUG); } destLastModified = dest.lastModified(); File src = FILE_UTILS.resolveFile(ds.getBasedir(), srcFiles[j]); srcLastModified = src.lastModified(); //Check to see if dest file has to be recreated boolean needsWork = forceOverwrite || destLastModified < srcLastModified; if (!needsWork) { for (int icounter = 0; icounter < BUNDLE_MAX_ALTERNATIVES; icounter++) { needsWork = (destLastModified < bundleLastModified[icounter]); if (needsWork) { break; } } } if (needsWork) { log("Processing " + srcFiles[j], Project.MSG_DEBUG); translateOneFile(src, dest); ++filesProcessed; } else { log("Skipping " + srcFiles[j] + " as destination file is up to date", Project.MSG_VERBOSE); } } catch (IOException ioe) { throw new BuildException(ioe.getMessage(), getLocation()); } } } log("Translation performed on " + filesProcessed + " file(s).", Project.MSG_DEBUG); } private void translateOneFile(File src, File dest) throws IOException { BufferedWriter out = null; BufferedReader in = null; try { FileOutputStream fos = new FileOutputStream(dest); out = new BufferedWriter(new OutputStreamWriter(fos, destEncoding)); FileInputStream fis = new FileInputStream(src); in = new BufferedReader(new InputStreamReader(fis, srcEncoding)); String line; LineTokenizer lineTokenizer = new LineTokenizer(); lineTokenizer.setIncludeDelims(true); line = lineTokenizer.getToken(in); while ((line) != null) { // 2003-02-21 new replace algorithm by tbee (tbee@tbee.org) // because it wasn't able to replace something like "@aaa;@bbb;" // is there a startToken // and there is still stuff following the startToken int startIndex = line.indexOf(startToken); while (startIndex >= 0 && (startIndex + startToken.length()) <= line.length()) { // the new value, this needs to be here // because it is required to calculate the next position to // search from at the end of the loop String replace = null; // we found a starttoken, is there an endtoken following? // start at token+tokenlength because start and end // token may be indentical int endIndex = line.indexOf(endToken, startIndex + startToken.length()); if (endIndex < 0) { startIndex += 1; } else { // grab the token String token = line.substring(startIndex + startToken.length(), endIndex); // If there is a white space or = or :, then // it isn't to be treated as a valid key. boolean validToken = true; for (int k = 0; k < token.length() && validToken; k++) { char c = token.charAt(k); if (c == ':' || c == '=' || Character.isSpaceChar(c)) { validToken = false; } } if (!validToken) { startIndex += 1; } else { // find the replace string if (resourceMap.containsKey(token)) { replace = (String) resourceMap.get(token); } else { log("Replacement string missing for: " + token, Project.MSG_VERBOSE); replace = startToken + token + endToken; } // generate the new line line = line.substring(0, startIndex) + replace + line.substring(endIndex + endToken.length()); // set start position for next search startIndex += replace.length(); } } // find next starttoken startIndex = line.indexOf(startToken, startIndex); } out.write(line); line = lineTokenizer.getToken(in); } } finally { FileUtils.close(in); FileUtils.close(out); } } }
/* * Copyright 2016 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.ospf.controller.lsdb; import com.google.common.base.Objects; import org.jboss.netty.channel.Channel; import org.onosproject.ospf.controller.LsaBin; import org.onosproject.ospf.controller.LsaWrapper; import org.onosproject.ospf.controller.LsdbAge; import org.onosproject.ospf.controller.OspfArea; import org.onosproject.ospf.controller.area.OspfAreaImpl; import org.onosproject.ospf.protocol.util.OspfParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * Representation of LSDB Aging process. */ public class LsdbAgeImpl implements LsdbAge { private static final Logger log = LoggerFactory.getLogger(LsdbAgeImpl.class); protected int ageCounter = 0; private InternalAgeTimer dbAgeTimer; private ScheduledExecutorService exServiceage; // creating age bins of MAXAGE private Map<Integer, LsaBin> ageBins = new ConcurrentHashMap<>(OspfParameters.MAXAGE); private LsaBin maxAgeBin = new LsaBinImpl(OspfParameters.MAXAGE); private int ageCounterRollOver = 0; private Channel channel = null; private LsaQueueConsumer queueConsumer = null; private BlockingQueue<LsaWrapper> lsaQueue = new ArrayBlockingQueue(1024); private OspfArea ospfArea = null; /** * Creates an instance of LSDB age. * * @param ospfArea OSPF area instance */ public LsdbAgeImpl(OspfArea ospfArea) { // create LSBin's in the HashMap. for (int i = 0; i < OspfParameters.MAXAGE; i++) { LsaBin lsaBin = new LsaBinImpl(i); ageBins.put(i, lsaBin); } this.ospfArea = ospfArea; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } LsdbAgeImpl that = (LsdbAgeImpl) o; return Objects.equal(ageBins, that.ageBins) && Objects.equal(ageCounter, that.ageCounter) && Objects.equal(ageCounterRollOver, that.ageCounterRollOver) && Objects.equal(lsaQueue, lsaQueue); } @Override public int hashCode() { return Objects.hashCode(ageBins, ageCounter, ageCounterRollOver, lsaQueue); } /** * Adds LSA to bin. * * @param binKey key to store in bin * @param lsaBin LSA bin instance */ public void addLsaBin(Integer binKey, LsaBin lsaBin) { if (!ageBins.containsKey(binKey)) { ageBins.put(binKey, lsaBin); } } /** * Gets LSA from Bin. * * @param binKey key * @return bin instance */ public LsaBin getLsaBin(Integer binKey) { return ageBins.get(binKey); } /** * Adds the LSA to maxAge bin. * * @param key key * @param wrapper wrapper instance */ public void addLsaToMaxAgeBin(String key, LsaWrapper wrapper) { maxAgeBin.addOspfLsa(key, wrapper); } /** * Removes LSA from Bin. * * @param lsaWrapper wrapper instance */ public void removeLsaFromBin(LsaWrapper lsaWrapper) { if (ageBins.containsKey(lsaWrapper.binNumber())) { LsaBin lsaBin = ageBins.get(lsaWrapper.binNumber()); lsaBin.removeOspfLsa(((OspfAreaImpl) ospfArea).getLsaKey(((LsaWrapperImpl) lsaWrapper).lsaHeader()), lsaWrapper); } } /** * Starts the aging timer and queue consumer. */ public void startDbAging() { startDbAgeTimer(); queueConsumer = new LsaQueueConsumer(lsaQueue, channel, ospfArea); new Thread(queueConsumer).start(); } /** * Gets called every 1 second as part of the timer. */ public void ageLsaAndFlood() { //every 5 mins checksum validation checkAges(); //every 30 mins - flood LSA refreshLsa(); //every 60 mins - flood LSA maxAgeLsa(); if (ageCounter == OspfParameters.MAXAGE) { ageCounter = 0; ageCounterRollOver++; } else { //increment age bin ageCounter++; } } /** * If the LSA have completed the MaxAge - they are moved called stop aging and flooded. */ public void maxAgeLsa() { if (ageCounter == 0) { return; } //Get from Age Bins LsaBin lsaBin = ageBins.get(ageCounter - 1); if (lsaBin == null) { return; } Map lsaBinMap = lsaBin.listOfLsa(); for (Object key : lsaBinMap.keySet()) { LsaWrapper lsa = (LsaWrapper) lsaBinMap.get((String) key); if (lsa.currentAge() == OspfParameters.MAXAGE) { lsa.setLsaProcessing(OspfParameters.MAXAGELSA); log.debug("Lsa picked for maxage flooding. Age Counter: {}, AgeCounterRollover: {}, " + "AgeCounterRollover WhenAddedToDb: {}, LSA Type: {}, LSA Key: {}", ageCounter, ageCounterRollOver, lsa.currentAge(), lsa.lsaType(), key); //add it to lsaQueue for processing try { lsaQueue.put(lsa); //remove from bin lsaBin.removeOspfLsa((String) key, lsa); } catch (InterruptedException e) { log.debug("Error::LSDBAge::maxAgeLsa::{}", e.getMessage()); } } } //Get from maxAgeBin Map lsaMaxAgeBinMap = maxAgeBin.listOfLsa(); for (Object key : lsaMaxAgeBinMap.keySet()) { LsaWrapper lsa = (LsaWrapper) lsaMaxAgeBinMap.get((String) key); lsa.setLsaProcessing(OspfParameters.MAXAGELSA); log.debug("Lsa picked for maxage flooding. Age Counter: {}, LSA Type: {}, LSA Key: {}", ageCounter, lsa.lsaType(), key); //add it to lsaQueue for processing try { lsaQueue.put(lsa); //remove from bin maxAgeBin.removeOspfLsa((String) key, lsa); } catch (InterruptedException e) { log.debug("Error::LSDBAge::maxAgeLsa::{}", e.getMessage()); } } } /* * If the LSA is in age bin of 1800 - it's pushed into refresh list. */ public void refreshLsa() { int binNumber; if (ageCounter < OspfParameters.LSREFRESHTIME) { binNumber = ageCounter + OspfParameters.LSREFRESHTIME; } else { binNumber = ageCounter - OspfParameters.LSREFRESHTIME; } LsaBin lsaBin = ageBins.get(binNumber); if (lsaBin == null) { return; } Map lsaBinMap = lsaBin.listOfLsa(); for (Object key : lsaBinMap.keySet()) { LsaWrapper lsa = (LsaWrapper) lsaBinMap.get((String) key); try { if (lsa.isSelfOriginated()) { log.debug("Lsa picked for refreshLsa. binNumber: {}, LSA Type: {}, LSA Key: {}", binNumber, lsa.lsaType(), key); lsa.setLsaProcessing(OspfParameters.REFRESHLSA); lsaQueue.put(lsa); //remove from bin lsaBin.removeOspfLsa((String) key, lsa); } } catch (InterruptedException e) { log.debug("Error::LSDBAge::refreshLsa::{}", e.getMessage()); } } } /** * Verify the checksum for the LSAs who are in bins of 300 and it's multiples. */ public void checkAges() { //evry 5 min age counter + multiples of 300 for (int age = OspfParameters.CHECKAGE; age < OspfParameters.MAXAGE; age += OspfParameters.CHECKAGE) { LsaBin lsaBin = ageBins.get(age2Bin(age)); if (lsaBin == null) { continue; } Map lsaBinMap = lsaBin.listOfLsa(); for (Object key : lsaBinMap.keySet()) { LsaWrapper lsa = (LsaWrapper) lsaBinMap.get((String) key); lsa.setLsaProcessing(OspfParameters.VERIFYCHECKSUM); try { lsaQueue.put(lsa); } catch (InterruptedException e) { log.debug("Error::LSDBAge::checkAges::{}", e.getMessage()); } } } } /** * Starts DB age timer method start the aging task. */ private void startDbAgeTimer() { log.debug("OSPFNbr::startWaitTimer"); dbAgeTimer = new InternalAgeTimer(); //from 1 sec exServiceage = Executors.newSingleThreadScheduledExecutor(); exServiceage.scheduleAtFixedRate(dbAgeTimer, OspfParameters.AGECOUNTER, OspfParameters.AGECOUNTER, TimeUnit.SECONDS); } /** * Stops the aging task. */ private void stopDbAgeTimer() { log.debug("OSPFNbr::stopWaitTimer "); exServiceage.shutdown(); } /** * Gets the netty channel. * * @return netty channel */ public Channel getChannel() { return channel; } /** * Sets the netty channel. * * @param channel netty channel */ public void setChannel(Channel channel) { this.channel = channel; if (queueConsumer != null) { queueConsumer.setChannel(channel); } } /** * Gets the age counter. * * @return ageCounter */ public int getAgeCounter() { return ageCounter; } /** * Gets the age counter roll over value. * * @return the age counter roll over value */ public int getAgeCounterRollOver() { return ageCounterRollOver; } /** * Gets the max age bin. * * @return lsa bin instance */ public LsaBin getMaxAgeBin() { return maxAgeBin; } /** * Gets the bin number. * * @param x Can be either age or ageCounter * @return bin number. */ public int age2Bin(int x) { if (x <= ageCounter) { return (ageCounter - x); } else { return ((OspfParameters.MAXAGE - 1) + (ageCounter - x)); } } /** * Runnable task which runs every second and calls aging process. */ private class InternalAgeTimer implements Runnable { /** * Constructor. */ InternalAgeTimer() { log.debug("Starts::LsdbAge::AgeTimer...!!! "); } @Override public void run() { ageLsaAndFlood(); } } }
package org.nuxeo.ecm.restapi.server.jaxrs.firstvoices; import static ca.firstvoices.security.utils.CustomSecurityConstants.APPROVE; import static ca.firstvoices.security.utils.CustomSecurityConstants.RECORD; import static org.nuxeo.ecm.core.api.security.SecurityConstants.EVERYTHING; import ca.firstvoices.core.io.utils.DialectUtils; import ca.firstvoices.core.io.utils.PrincipalUtils; import ca.firstvoices.core.io.utils.StateUtils; import ca.firstvoices.core.io.utils.filters.NotTrashedFilter; import ca.firstvoices.operations.FVRequestToJoinDialect; import ca.firstvoices.rest.data.Site; import ca.firstvoices.rest.data.SiteList; import ca.firstvoices.rest.data.SiteMembershipRequest; import ca.firstvoices.rest.data.SiteMembershipStatus; import ca.firstvoices.rest.data.SiteMembershipUpdateRequest; import ca.firstvoices.rest.data.SiteMenu; import ca.firstvoices.rest.helpers.DialectMembershipHelper; import ca.firstvoices.rest.helpers.EtagHelper; import ca.firstvoices.rest.helpers.PageProviderHelper; import ca.firstvoices.utils.CustomSecurityConstants; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.CacheControl; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHeaders; import org.nuxeo.ecm.automation.AutomationService; import org.nuxeo.ecm.automation.OperationContext; import org.nuxeo.ecm.automation.OperationException; import org.nuxeo.ecm.automation.features.PrincipalHelper; import org.nuxeo.ecm.core.api.CoreInstance; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.DocumentModelList; import org.nuxeo.ecm.core.api.DocumentRef; import org.nuxeo.ecm.core.api.DocumentSecurityException; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.NuxeoPrincipal; import org.nuxeo.ecm.core.api.UnrestrictedSessionRunner; import org.nuxeo.ecm.core.api.model.Property; import org.nuxeo.ecm.core.api.security.ACE; import org.nuxeo.ecm.core.api.security.ACL; import org.nuxeo.ecm.core.api.security.PermissionProvider; import org.nuxeo.ecm.core.api.security.SecurityConstants; import org.nuxeo.ecm.platform.usermanager.UserManager; import org.nuxeo.ecm.webengine.model.WebObject; import org.nuxeo.ecm.webengine.model.impl.DefaultObject; import org.nuxeo.runtime.api.Framework; @WebObject(type = "site") @Produces({MediaType.APPLICATION_JSON}) public class SitesObject extends DefaultObject { private static final Log log = LogFactory.getLog(SitesObject.class); protected static final String PORTALS_LIST_WORKSPACES_PP = "PORTALS_LIST_WORKSPACES_PP"; protected static final String DIALECTS_LIST_WORKSPACES_PP = "DIALECTS_LIST_WORKSPACES_PP"; // they'll be tried in priority order, with the first one producing a result returning protected static final List<String> SITES_FIND_PPLIST = Arrays.asList("SITES_FIND_PP_PRIORITY1", "SITES_FIND_PP_PRIORITY2", "SITES_FIND_PP_PRIORITY3", "SITES_FIND_PP_PRIORITY4"); protected static final String PORTAL_FOR_DIALECT_PP = "PORTAL_FOR_DIALECT_PP"; /** * Retrieve the PageProvider results for the given PageProvider. * * @param doPrivileged if the query should be run in an unrestricted session */ @SuppressWarnings("java:S107") private Response simplePageProviderResponse( HttpServletRequest request, List<String> pageProviderNames, List<String> cacheCheckOnlyPageProviderNames, boolean singleResult, Integer pageSize, Integer currentPage, boolean doPrivileged, ResultFilter rf, Object... params) { ResponseGeneratingQueryRunner runner = new ResponseGeneratingQueryRunner(ctx.getCoreSession(), request, pageProviderNames, cacheCheckOnlyPageProviderNames, singleResult, pageSize, currentPage, rf, params); if (doPrivileged) { runner.runUnrestricted(); } else { runner.run(); } return runner.getResponse(); } @GET @Path("") public Response getSites( @Context HttpServletRequest request, @QueryParam(value = "pageSize") Integer pageSize, @QueryParam(value = "currentPage") Integer currentPage, @QueryParam(value = "mySites") @DefaultValue("false") boolean mySites, @QueryParam(value = "region") @DefaultValue("") String region) { if (mySites) { ResultFilter permissionsGrantedFilter = ((s, pageProviderName, d) -> { DocumentModel dialect = null; try { DocumentRef parentRef = d.getParentRef(); if (parentRef == null) { return false; } dialect = s.getDocument(parentRef); if (dialect == null) { return false; } } catch (DocumentSecurityException e) { return false; } return this.checkSitePermissions(d.getCoreSession(), dialect); }); // Return only sites from "Workspaces" // Will filter based on permissions the user has return simplePageProviderResponse(request, Collections.singletonList(PORTALS_LIST_WORKSPACES_PP), Collections.singletonList(DIALECTS_LIST_WORKSPACES_PP), false, pageSize, currentPage, false, permissionsGrantedFilter); } if (!region.isEmpty()) { // Return sites filtered by a region // Used for YNLC explore languages page return simplePageProviderResponse(request, Collections.singletonList(PORTALS_LIST_WORKSPACES_PP), Collections.singletonList(DIALECTS_LIST_WORKSPACES_PP), false, pageSize, currentPage, true, ((s, pageProviderName, d) -> { DocumentModel dialect = DialectUtils.getDialect(d); return region.equals(s.getDocument(dialect.getRef()).getPropertyValue("fvdialect:region")); })); } ResultFilter enabledOrPublishedFilter = ((s, pageProviderName, d) -> { if (pageProviderName.equals(PORTALS_LIST_WORKSPACES_PP)) { DocumentRef parentRef = d.getParentRef(); if (parentRef == null) { return false; } DocumentModel parent = s.getDocument(parentRef); if (parent == null) { return false; } String lcs = parent.getCurrentLifeCycleState(); if (lcs == null) { return false; } return lcs.equalsIgnoreCase("enabled") || lcs.equalsIgnoreCase("published"); } return true; }); return simplePageProviderResponse(request, Collections.singletonList(PORTALS_LIST_WORKSPACES_PP), Collections.singletonList(DIALECTS_LIST_WORKSPACES_PP), false, pageSize, currentPage, true, enabledOrPublishedFilter); } @GET @Path("{site}") public Response getSite( @Context HttpServletRequest request, @PathParam("site") String site, @QueryParam(value = "currentPage") Integer currentPage) { return simplePageProviderResponse(request, SITES_FIND_PPLIST, Collections.emptyList(), true, null, null, true, ACCEPT_ALL, site); } @GET @Path("{site}/administration/joinRequests") public Response listJoinRequests( @Context HttpServletRequest request, @PathParam("site") String site) { Optional<String> dialectId = this.resolveDialectId(request, site); if (!ctx.getPrincipal().isAdministrator() && !callingUserHasLanguageAdministratorPermissions( dialectId.get())) { return Response.status(403).build(); } return new SiteAdministrationDelegate(ctx.getCoreSession(), dialectId).listJoinRequests(); } @GET @Path("{site}/administration/joinRequests/{requestId}") public Response getJoinRequest( @Context HttpServletRequest request, @PathParam("site") String site, @PathParam("requestId") String requestId) { Optional<String> dialectId = this.resolveDialectId(request, site); if (!ctx.getPrincipal().isAdministrator() && !callingUserHasLanguageAdministratorPermissions( dialectId.get())) { return Response.status(403).build(); } return new SiteAdministrationDelegate(ctx.getCoreSession(), dialectId).getJoinRequest(requestId); } @POST @Path("{site}/administration/joinRequests/{requestId}") public Response updateJoinRequest( @Context HttpServletRequest request, @PathParam("site") String site, @PathParam("requestId") String requestId, SiteMembershipUpdateRequest updateRequest) { Optional<String> dialectId = this.resolveDialectId(request, site); if (!ctx.getPrincipal().isAdministrator() && !callingUserHasLanguageAdministratorPermissions( dialectId.get())) { return Response.status(403).build(); } return new SiteAdministrationDelegate(ctx.getCoreSession(), dialectId).updateJoinRequest(requestId, updateRequest); } // used for access control checks on administrative actions private boolean callingUserHasLanguageAdministratorPermissions(String dialectId) { final NuxeoPrincipal callingUser = ctx.getPrincipal(); if (dialectId == null) { throw new IllegalArgumentException("no dialect specified"); } DocumentModel dialectDocument = ctx.getCoreSession().getDocument(new IdRef(dialectId)); if (dialectDocument == null) { return false; // if we can't read it with our current permissions, we're definitely not an // admin } String languageAdminGroupName = null; // resolve admin group name for (ACE ace : dialectDocument.getACP().getACL(ACL.LOCAL_ACL).getACEs()) { String acePrincipal = ace.getUsername(); if (acePrincipal.contains(CustomSecurityConstants.LANGUAGE_ADMINS_GROUP) && ace.isGranted()) { languageAdminGroupName = acePrincipal; } } return callingUser.isMemberOf(languageAdminGroupName); } @GET @Path("{site}/membership") public Response getSiteMembership( @Context HttpServletRequest request, @PathParam("site") String site) { final Optional<String> dialectId = resolveDialectId(request, site); if (!dialectId.isPresent()) { return Response.status(404).build(); } DialectMembershipHelper.DialectMembershipStatus status = DialectMembershipHelper.getMembershipStatus(ctx.getCoreSession(), ctx.getPrincipal(), dialectId.get()); SiteMembershipStatus membershipStatus = new SiteMembershipStatus(status.getStatus()); return Response.ok(membershipStatus).build(); } @POST @Path("{site}/membership") public Response requestSiteMembership( @Context HttpServletRequest request, @PathParam("site") String site, SiteMembershipRequest membershipRequest) { final Optional<String> dialectId = resolveDialectId(request, site); if (!dialectId.isPresent()) { return Response.status(404).build(); } DialectMembershipHelper.DialectMembershipStatus status = DialectMembershipHelper.getMembershipStatus(ctx.getCoreSession(), ctx.getPrincipal(), dialectId.get()); boolean canProceed = status.equals(DialectMembershipHelper.DialectMembershipStatus.AVAILABLE); if (!canProceed || ctx.getPrincipal().isAdministrator()) { return Response.status(400).entity( "Preconditions for joining this dialect have not been " + "satisfied").build(); } final AutomationService automationService = Framework.getService(AutomationService.class); OperationContext operationContext = new OperationContext(ctx.getCoreSession()); Map<String, Object> params = new HashMap<>(); params.put("dialect", dialectId.get()); params.put("interestReason", membershipRequest.getInterestReason()); params.put("comment", membershipRequest.getComment()); params.put("languageTeam", membershipRequest.isLanguageTeam()); params.put("communityMember", membershipRequest.isCommunityMember()); try { automationService.run(operationContext, FVRequestToJoinDialect.ID, params); } catch (OperationException e) { return Response.status(500).entity( "Failed to request joining this dialect " + membershipRequest.toString()).build(); } return Response.status(200).entity("Your request to join this dialect is now pending").build(); } private Optional<String> resolveDialectId(HttpServletRequest request, String site) { ResponseGeneratingQueryRunner queryRunner = new ResponseGeneratingQueryRunner(ctx.getCoreSession(), request, SITES_FIND_PPLIST, Collections.emptyList(), true, null, null, ACCEPT_ALL, site); queryRunner.run(); if (queryRunner.getPortalId() == null || queryRunner.getDialectId() == null) { UnrestrictedSessionRunner checkIdDirectly = new UnrestrictedSessionRunner(ctx.getCoreSession()) { @Override public void run() { DocumentModel res = this.session.getDocument(new IdRef(site)); if (!res.getType().equals("FVDialect")) { throw new IllegalArgumentException("ID is not for a dialect"); } } }; try { checkIdDirectly.runUnrestricted(); return Optional.of(site); } catch (IllegalArgumentException e) { return Optional.of(null); } } return Optional.of(queryRunner.getDialectId()); } private boolean checkSitePermissions(CoreSession userSession, DocumentModel dialect) { NuxeoPrincipal currentUser = userSession.getPrincipal(); return CoreInstance.doPrivileged(dialect.getCoreSession(), s -> s.hasPermission(currentUser, dialect.getRef(), EVERYTHING) || s.hasPermission(currentUser, dialect.getRef(), APPROVE) || s.hasPermission(currentUser, dialect.getRef(), RECORD) || PrincipalUtils.isMemberOf(dialect, currentUser) ); } private static class ResponseGeneratingQueryRunner extends UnrestrictedSessionRunner { private Response response; public Response getResponse() { return response; } public String getDialectId() { return dialectId; } public String getPortalId() { return portalId; } private final HttpServletRequest request; private final List<String> pageProviderNames; private final List<String> cacheCheckOnlyPageProviderNames; private final Integer pageSize; private final Integer currentPage; private final ResultFilter resultFilter; private final boolean singleResult; private final Object[] params; private String dialectId = null; private String portalId = null; @SuppressWarnings("java:S107") ResponseGeneratingQueryRunner( CoreSession session, HttpServletRequest request, List<String> pageProviderNames, List<String> cacheCheckOnlyPageProviderNames, boolean singleResult, Integer pageSize, Integer currentPage, ResultFilter rf, Object... params) { super(session); this.request = request; this.pageProviderNames = pageProviderNames; this.cacheCheckOnlyPageProviderNames = cacheCheckOnlyPageProviderNames; this.pageSize = pageSize; this.currentPage = currentPage; this.resultFilter = rf; this.singleResult = singleResult; this.params = params; } /* * When finished, getResponse() can be used to retrieve the response object */ @Override public void run() { List<DocumentModel> results = new LinkedList<>(); List<DocumentModel> cacheComputationResults = new LinkedList<>(); for (String pageProviderName : pageProviderNames) { if (singleResult && !results.isEmpty()) { //we already found a match break; } List<DocumentModel> localResults = PageProviderHelper.getPageProviderResults(session, pageProviderName, pageSize, currentPage, params); localResults .stream() .filter(dm -> resultFilter.accept(session, pageProviderName, dm)) .forEach(r -> { results.add(r); cacheComputationResults.add(r); }); } for (String cacheCheckPageProviderName : cacheCheckOnlyPageProviderNames) { List<DocumentModel> localResults = PageProviderHelper.getPageProviderResults(session, cacheCheckPageProviderName, pageSize, currentPage, params); localResults.stream().filter(dm -> resultFilter.accept(session, cacheCheckPageProviderName, dm)).forEach(cacheComputationResults::add); } String etag = EtagHelper.computeEtag(cacheComputationResults, EtagHelper.DC_MODIFIED_AND_NAME_MAPPER); String ifNoneMatch = request.getHeader(HttpHeaders.IF_NONE_MATCH); if (ifNoneMatch != null && ifNoneMatch.equals(etag)) { this.response = Response.notModified().build(); return; } List<Site> sites = results.stream().map(dm -> { DocumentModel associatedDialect = null; DocumentModel portal = null; if (dm.getType().equalsIgnoreCase("fvportal")) { // We have the portal, get the dialect if (!session.exists(dm.getParentRef())) { // If parent dialect does not exist, something is wrong with FVPortal, skip return null; } associatedDialect = session.getDocument(dm.getParentRef()); portal = dm; } else { // We have the dialect, find the portal associatedDialect = dm; List<DocumentModel> foundPortals = PageProviderHelper.getPageProviderResults(session, PORTAL_FOR_DIALECT_PP, null, null, dm.getId()); if (foundPortals.size() != 1) { return null; // We have an unexpected number of portals -- skip } portal = foundPortals.get(0); } if (associatedDialect == null || portal == null) { // If for whatever reason we could not resolve the portal or dialect, skip return null; } if (this.singleResult) { this.dialectId = associatedDialect.getId(); this.portalId = portal.getId(); } String logoImageId = null; if (portal.isProxy()) { logoImageId = (String) portal.getProperty("fvproxy", "proxied_logo"); } else { // Do not fetch images for private dialects when displayed in the public view logoImageId = (String) portal.getProperty("fv-portal", "logo"); if (logoImageId == null) { // Attempt to fetch from dialect if nothing on portal logoImageId = (String) associatedDialect.getProperty("fvdialect", "logo"); } } Set<String> roles = new HashSet<>(); if (associatedDialect.getACP() != null && associatedDialect.getACP().getACL("local") != null) { for (ACE ace : associatedDialect.getACP().getACL("local").getACEs()) { if (SecurityConstants.READ.equals(ace.getPermission()) && session.getPrincipal() != null && session.getPrincipal().isMemberOf(ace.getUsername())) { roles.add("Member"); } } } // Set groups so that response can be cached but we can still do // conditional presentation based on the user's groups. Set<String> groups = new HashSet<>(); if (associatedDialect.getACP() != null && associatedDialect.getACP().getACL("local") != null) { UserManager userManager = Framework.getService(UserManager.class); PrincipalHelper principalHelper = new PrincipalHelper(userManager, Framework.getService(PermissionProvider.class)); groups = principalHelper .getUserAndGroupIdsForPermission(associatedDialect, SecurityConstants.READ, false, false, true) .stream() .filter(id -> id.startsWith("group:")) .collect(Collectors.toSet()); } HashMap<String, String> children = new HashMap<>(); if (singleResult) { // Populate children when getting 1 site back DocumentModelList childDocs = session.getChildren(associatedDialect.getRef(), null, new NotTrashedFilter(), null); for (DocumentModel child : childDocs) { children.put(child.getName(), child.getId()); } } String joinText = (String) associatedDialect.getPropertyValue("fvdialect:join_text"); String contactEmail = (String) associatedDialect.getPropertyValue("fvdialect:contact_email"); boolean hasContactUs = contactEmail != null && contactEmail.length() > 0; SiteMenu siteMenu = null; List<String> activeFeatures = null; if (singleResult) { // Grab site menu if returning a single result String siteMenuJson = (String) associatedDialect.getPropertyValue("fvdialect:site_menu"); ObjectMapper mapper = new ObjectMapper(); if (siteMenuJson != null) { // Construct site menu from configured JSON try { JsonParser jsonParser = mapper.getFactory().createParser(siteMenuJson); siteMenu = mapper.readValue(jsonParser, SiteMenu.class); } catch (JsonParseException jpe) { log.error(String.format("Could not parse JSON for site menu for site id %s", associatedDialect.getId())); log.error(jpe.getMessage()); } catch (IOException e) { log.error(String.format("Could not convert site menu for site id %s", associatedDialect.getId())); log.error(e.getMessage()); } } else { // Use default site menu siteMenu = new SiteMenu(); } // Grab feature list if available Property featureProp = associatedDialect.getProperty("fv-features:features"); if (featureProp.getValue() != null && featureProp.isList() && featureProp.isContainer()) { ArrayList<HashMap<String, Object>> featurePropValue = (ArrayList<HashMap<String, Object>>) associatedDialect.getPropertyValue("fv-features:features"); activeFeatures = featurePropValue.stream().filter(f -> (Boolean) f.get("enabled")) .map(f -> f.get("feature_id").toString()).collect(Collectors.toList()); } } return new Site(associatedDialect.getPathAsString(), associatedDialect.getId(), roles, groups, String.valueOf(associatedDialect.getPropertyValue("dc:title")), (String) associatedDialect.getPropertyValue("fvdialect:short_url"), (String) associatedDialect.getPropertyValue("fvdialect:parent_language"), logoImageId, (String) associatedDialect.getProperty("fvdialect", "background_top_image"), (String) associatedDialect.getProperty("fvdialect", "background_top_video"), joinText, children, siteMenu, hasContactUs, activeFeatures, !StateUtils.isPublished(associatedDialect)); }).filter(Objects::nonNull).collect(Collectors.toList()); if (singleResult) { if (!sites.isEmpty()) { Response.ResponseBuilder responseBuilder = Response .ok() .entity(sites.get(0)) .cacheControl(CacheControl.valueOf("must-revalidate")); if (etag != null) { responseBuilder.header(HttpHeaders.ETAG, etag); } this.response = responseBuilder.build(); } else { this.response = Response.status(404).build(); } } else { SiteList mappedResults = new SiteList(sites); Response.ResponseBuilder responseBuilder = Response.ok().entity(mappedResults).cacheControl( CacheControl.valueOf("must-revalidate")); if (etag != null) { responseBuilder.header(HttpHeaders.ETAG, etag); } this.response = responseBuilder.build(); } } } private interface ResultFilter { boolean accept(final CoreSession session, final String pageProviderName, final DocumentModel d); } private static final ResultFilter ACCEPT_ALL = (s, ppName, dm) -> true; }
package org.zstack.storage.primary.local; import org.springframework.beans.factory.annotation.Autowired; import org.zstack.core.cloudbus.CloudBusCallBack; import org.zstack.core.cloudbus.CloudBusListCallBack; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.thread.ChainTask; import org.zstack.core.thread.SyncTaskChain; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.core.Completion; import org.zstack.header.core.NopeCompletion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.core.validation.Validation; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.host.*; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.ImageInventory; import org.zstack.header.image.ImageVO; import org.zstack.header.image.ImageVO_; import org.zstack.header.message.MessageReply; import org.zstack.header.storage.backup.*; import org.zstack.header.storage.primary.*; import org.zstack.header.storage.primary.CreateTemplateFromVolumeSnapshotOnPrimaryStorageMsg.SnapshotDownloadInfo; import org.zstack.header.storage.snapshot.CreateTemplateFromVolumeSnapshotReply.CreateTemplateFromVolumeSnapshotResult; import org.zstack.header.storage.snapshot.VolumeSnapshotConstant; import org.zstack.header.storage.snapshot.VolumeSnapshotInventory; import org.zstack.header.storage.snapshot.VolumeSnapshotVO; import org.zstack.header.vm.VmInstanceSpec.ImageSpec; import org.zstack.header.vm.VmInstanceState; import org.zstack.header.vm.VmInstanceVO; import org.zstack.header.vm.VmInstanceVO_; import org.zstack.header.volume.VolumeInventory; import org.zstack.header.volume.VolumeType; import org.zstack.header.volume.VolumeVO; import org.zstack.identity.AccountManager; import org.zstack.kvm.KVMConstant; import org.zstack.kvm.KVMHostAsyncHttpCallMsg; import org.zstack.kvm.KVMHostAsyncHttpCallReply; import org.zstack.kvm.MergeVolumeSnapshotOnKvmMsg; import org.zstack.storage.primary.PrimaryStoragePathMaker; import org.zstack.utils.CollectionUtils; import org.zstack.utils.DebugUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.Function; import org.zstack.utils.logging.CLogger; import org.zstack.utils.path.PathUtil; import java.io.File; import java.util.*; /** * Created by frank on 6/30/2015. */ public class LocalStorageKvmBackend extends LocalStorageHypervisorBackend { private final static CLogger logger = Utils.getLogger(LocalStorageKvmBackend.class); @Autowired private AccountManager acntMgr; @Autowired private LocalStorageFactory localStorageFactory; public static class AgentCommand { } public static class AgentResponse { private Long totalCapacity; private Long availableCapacity; private boolean success = true; private String error; public boolean isSuccess() { return success; } public void setSuccess(boolean success) { this.success = success; } public String getError() { return error; } public void setError(String error) { this.error = error; } public Long getTotalCapacity() { return totalCapacity; } public void setTotalCapacity(Long totalCapacity) { this.totalCapacity = totalCapacity; } public Long getAvailableCapacity() { return availableCapacity; } public void setAvailableCapacity(Long availableCapacity) { this.availableCapacity = availableCapacity; } } public static class InitCmd extends AgentCommand { private String path; private String hostUuid; public String getHostUuid() { return hostUuid; } public void setHostUuid(String hostUuid) { this.hostUuid = hostUuid; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } } public static class CreateEmptyVolumeCmd extends AgentCommand { private String installUrl; private long size; private String accountUuid; private String name; private String volumeUuid; public String getInstallUrl() { return installUrl; } public void setInstallUrl(String installUrl) { this.installUrl = installUrl; } public long getSize() { return size; } public void setSize(long size) { this.size = size; } public String getAccountUuid() { return accountUuid; } public void setAccountUuid(String accountUuid) { this.accountUuid = accountUuid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getVolumeUuid() { return volumeUuid; } public void setVolumeUuid(String volumeUuid) { this.volumeUuid = volumeUuid; } } public static class CreateEmptyVolumeRsp extends AgentResponse { } public static class GetPhysicalCapacityCmd extends AgentCommand { private String hostUuid; public String getHostUuid() { return hostUuid; } public void setHostUuid(String hostUuid) { this.hostUuid = hostUuid; } } public static class CreateVolumeFromCacheCmd extends AgentCommand { private String templatePathInCache; private String installUrl; private String volumeUuid; public String getTemplatePathInCache() { return templatePathInCache; } public void setTemplatePathInCache(String templatePathInCache) { this.templatePathInCache = templatePathInCache; } public String getInstallUrl() { return installUrl; } public void setInstallUrl(String installUrl) { this.installUrl = installUrl; } public String getVolumeUuid() { return volumeUuid; } public void setVolumeUuid(String volumeUuid) { this.volumeUuid = volumeUuid; } } public static class CreateVolumeFromCacheRsp extends AgentResponse { } public static class DeleteBitsCmd extends AgentCommand { private String path; public String getPath() { return path; } public void setPath(String path) { this.path = path; } } public static class DeleteBitsRsp extends AgentResponse { } public static class CreateTemplateFromVolumeCmd extends AgentCommand { private String installPath; private String volumePath; public String getInstallPath() { return installPath; } public void setInstallPath(String installPath) { this.installPath = installPath; } public String getVolumePath() { return volumePath; } public void setVolumePath(String rootVolumePath) { this.volumePath = rootVolumePath; } } public static class CreateTemplateFromVolumeRsp extends AgentResponse { } public static class RevertVolumeFromSnapshotCmd extends AgentCommand { private String snapshotInstallPath; public String getSnapshotInstallPath() { return snapshotInstallPath; } public void setSnapshotInstallPath(String snapshotInstallPath) { this.snapshotInstallPath = snapshotInstallPath; } } public static class RevertVolumeFromSnapshotRsp extends AgentResponse { @Validation private String newVolumeInstallPath; public String getNewVolumeInstallPath() { return newVolumeInstallPath; } public void setNewVolumeInstallPath(String newVolumeInstallPath) { this.newVolumeInstallPath = newVolumeInstallPath; } } public static class MergeSnapshotCmd extends AgentCommand { private String snapshotInstallPath; private String workspaceInstallPath; public String getSnapshotInstallPath() { return snapshotInstallPath; } public void setSnapshotInstallPath(String snapshotInstallPath) { this.snapshotInstallPath = snapshotInstallPath; } public String getWorkspaceInstallPath() { return workspaceInstallPath; } public void setWorkspaceInstallPath(String workspaceInstallPath) { this.workspaceInstallPath = workspaceInstallPath; } } public static class MergeSnapshotRsp extends AgentResponse { private long size; public long getSize() { return size; } public void setSize(long size) { this.size = size; } } public static class RebaseAndMergeSnapshotsCmd extends AgentCommand { private List<String> snapshotInstallPaths; private String workspaceInstallPath; public List<String> getSnapshotInstallPaths() { return snapshotInstallPaths; } public void setSnapshotInstallPaths(List<String> snapshotInstallPaths) { this.snapshotInstallPaths = snapshotInstallPaths; } public String getWorkspaceInstallPath() { return workspaceInstallPath; } public void setWorkspaceInstallPath(String workspaceInstallPath) { this.workspaceInstallPath = workspaceInstallPath; } } public static class RebaseAndMergeSnapshotsRsp extends AgentResponse { private long size; public long getSize() { return size; } public void setSize(long size) { this.size = size; } } public static class OfflineMergeSnapshotCmd extends AgentCommand { private String srcPath; private String destPath; private boolean fullRebase; public boolean isFullRebase() { return fullRebase; } public void setFullRebase(boolean fullRebase) { this.fullRebase = fullRebase; } public String getSrcPath() { return srcPath; } public void setSrcPath(String srcPath) { this.srcPath = srcPath; } public String getDestPath() { return destPath; } public void setDestPath(String destPath) { this.destPath = destPath; } } public static class OfflineMergeSnapshotRsp extends AgentResponse { } public static final String INIT_PATH = "/localstorage/init"; public static final String GET_PHYSICAL_CAPACITY_PATH = "/localstorage/getphysicalcapacity"; public static final String CREATE_EMPTY_VOLUME_PATH = "/localstorage/volume/createempty"; public static final String CREATE_VOLUME_FROM_CACHE_PATH = "/localstorage/volume/createvolumefromcache"; public static final String DELETE_BITS_PATH = "/localstorage/delete"; public static final String CREATE_TEMPLATE_FROM_VOLUME = "/localstorage/volume/createtemplate"; public static final String REVERT_SNAPSHOT_PATH = "/localstorage/snapshot/revert"; public static final String MERGE_SNAPSHOT_PATH = "/localstorage/snapshot/merge"; public static final String MERGE_AND_REBASE_SNAPSHOT_PATH = "/localstorage/snapshot/mergeandrebase"; public static final String OFFLINE_MERGE_PATH = "/localstorage/snapshot/offlinemerge"; public LocalStorageKvmBackend(PrimaryStorageVO self) { super(self); } public String makeRootVolumeInstallUrl(VolumeInventory vol) { return PathUtil.join(self.getUrl(), PrimaryStoragePathMaker.makeRootVolumeInstallPath(vol)); } public String makeDataVolumeInstallUrl(String volUuid) { return PathUtil.join(self.getUrl(), PrimaryStoragePathMaker.makeDataVolumeInstallPath(volUuid)); } public String makeCachedImageInstallUrl(ImageInventory iminv) { return PathUtil.join(self.getUrl(), PrimaryStoragePathMaker.makeCachedImageInstallPath(iminv)); } public String makeTemplateFromVolumeInWorkspacePath(String imageUuid) { return PathUtil.join(self.getUrl(), "templateWorkspace", String.format("image-%s", imageUuid), String.format("%s.qcow2", imageUuid)); } public String makeSnapshotInstallPath(VolumeInventory vol, VolumeSnapshotInventory snapshot) { String volPath; if (VolumeType.Data.toString().equals(vol.getType())) { volPath = makeDataVolumeInstallUrl(vol.getUuid()); } else { volPath = makeRootVolumeInstallUrl(vol); } File volDir = new File(volPath).getParentFile(); return PathUtil.join(volDir.getAbsolutePath(), "snapshots", String.format("%s.qcow2", snapshot.getUuid())); } public String makeSnapshotWorkspacePath(String imageUuid) { return PathUtil.join( self.getUrl(), PrimaryStoragePathMaker.makeImageFromSnapshotWorkspacePath(imageUuid), String.format("%s.qcow2", imageUuid) ); } @Override void syncPhysicalCapacityInCluster(List<ClusterInventory> clusters, final ReturnValueCompletion<PhysicalCapacityUsage> completion) { List<String> clusterUuids = CollectionUtils.transformToList(clusters, new Function<String, ClusterInventory>() { @Override public String call(ClusterInventory arg) { return arg.getUuid(); } }); final PhysicalCapacityUsage ret = new PhysicalCapacityUsage(); SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.uuid); q.add(HostVO_.clusterUuid, Op.IN, clusterUuids); final List<String> hostUuids = q.listValue(); if (hostUuids.isEmpty()) { completion.success(ret); return; } List<KVMHostAsyncHttpCallMsg> msgs = CollectionUtils.transformToList(hostUuids, new Function<KVMHostAsyncHttpCallMsg, String>() { @Override public KVMHostAsyncHttpCallMsg call(String arg) { GetPhysicalCapacityCmd cmd = new GetPhysicalCapacityCmd(); cmd.setHostUuid(arg); KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setHostUuid(arg); msg.setCommand(cmd); msg.setPath(GET_PHYSICAL_CAPACITY_PATH); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, arg); return msg; } }); bus.send(msgs, new CloudBusListCallBack(completion) { @Override public void run(List<MessageReply> replies) { for (MessageReply reply : replies) { String hostUuid = hostUuids.get(replies.indexOf(reply)); if (!reply.isSuccess()) { logger.warn(String.format("cannot get the physical capacity of local storage on the host[uuid:%s], %s", hostUuid, reply.getError())); continue; } KVMHostAsyncHttpCallReply r = reply.castReply(); AgentResponse rsp = r.toResponse(AgentResponse.class); if (!rsp.isSuccess()) { logger.warn(String.format("cannot get the physical capacity of local storage on the host[uuid:%s], %s", hostUuid, rsp.getError())); continue; } ret.totalPhysicalSize += rsp.getTotalCapacity(); ret.availablePhysicalSize += rsp.getAvailableCapacity(); } completion.success(ret); } }); } @Override void handle(InstantiateVolumeMsg msg, ReturnValueCompletion<InstantiateVolumeReply> completion) { if (msg instanceof InstantiateRootVolumeFromTemplateMsg) { createRootVolume((InstantiateRootVolumeFromTemplateMsg) msg, completion); } else { createEmptyVolume(msg, completion); } } private void createEmptyVolume(final InstantiateVolumeMsg msg, final ReturnValueCompletion<InstantiateVolumeReply> completion) { String hostUuid = msg.getDestHost().getUuid(); final CreateEmptyVolumeCmd cmd = new CreateEmptyVolumeCmd(); cmd.setAccountUuid(acntMgr.getOwnerAccountUuidOfResource(msg.getVolume().getUuid())); if (VolumeType.Root.toString().equals(msg.getVolume().getType())) { cmd.setInstallUrl(makeRootVolumeInstallUrl(msg.getVolume())); } else { cmd.setInstallUrl(makeDataVolumeInstallUrl(msg.getVolume().getUuid())); } cmd.setName(msg.getVolume().getName()); cmd.setSize(msg.getVolume().getSize()); cmd.setVolumeUuid(msg.getVolume().getUuid()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setHostUuid(hostUuid); kmsg.setPath(CREATE_EMPTY_VOLUME_PATH); kmsg.setCommand(cmd); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); final String finalHostUuid = hostUuid; bus.send(kmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { InstantiateVolumeReply r = new InstantiateVolumeReply(); if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); CreateEmptyVolumeRsp rsp = kr.toResponse(CreateEmptyVolumeRsp.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError( String.format("unable to create an empty volume[uuid:%s, name:%s] on the kvm host[uuid:%s], %s", msg.getVolume().getUuid(), msg.getVolume().getName(), finalHostUuid, rsp.getError()) )); return; } VolumeInventory vol = msg.getVolume(); vol.setInstallPath(cmd.getInstallUrl()); r.setVolume(vol); completion.success(r); } }); } private String getHostUuidByResourceUuid(String resUuid, String resType) { SimpleQuery<LocalStorageResourceRefVO> q = dbf.createQuery(LocalStorageResourceRefVO.class); q.select(LocalStorageResourceRefVO_.hostUuid); q.add(LocalStorageResourceRefVO_.primaryStorageUuid, Op.EQ, self.getUuid()); q.add(LocalStorageResourceRefVO_.resourceUuid, Op.EQ, resUuid); String hostUuid = q.findValue(); if (hostUuid == null) { throw new CloudRuntimeException(String.format("resource[uuid:%s, type:%s] is not any on any host of local primary storage[uuid:%s]", resUuid, resType, self.getUuid())); } return hostUuid; } class CacheInstallPath { String fullPath; String hostUuid; String installPath; CacheInstallPath disassemble() { DebugUtils.Assert(fullPath != null, "fullPath cannot be null"); String[] pair = fullPath.split(";"); installPath = pair[0].replaceFirst("file://", ""); hostUuid = pair[1].replaceFirst("hostUuid://", ""); return this; } String makeFullPath() { DebugUtils.Assert(installPath != null, "installPath cannot be null"); DebugUtils.Assert(hostUuid != null, "hostUuid cannot be null"); fullPath = String.format("file://%s;hostUuid://%s", installPath, hostUuid); return fullPath; } } class ImageCache { ImageInventory image; BackupStorageInventory backupStorage; String hostUuid; String primaryStorageInstallPath; String backupStorageInstallPath; void download(final ReturnValueCompletion<String> completion) { DebugUtils.Assert(image != null, "image cannot be null"); DebugUtils.Assert(backupStorage != null, "backup storage cannot be null"); DebugUtils.Assert(hostUuid != null, "host uuid cannot be null"); DebugUtils.Assert(primaryStorageInstallPath != null, "primaryStorageInstallPath cannot be null"); DebugUtils.Assert(backupStorageInstallPath != null, "backupStorageInstallPath cannot be null"); thdf.chainSubmit(new ChainTask(completion) { @Override public String getSyncSignature() { return String.format("download-image-%s-to-localstorage-%s-cache", image.getUuid(), self.getUuid()); } @Override public void run(final SyncTaskChain chain) { SimpleQuery<ImageCacheVO> q = dbf.createQuery(ImageCacheVO.class); q.select(ImageCacheVO_.installUrl); q.add(ImageCacheVO_.primaryStorageUuid, Op.EQ, self.getUuid()); q.add(ImageCacheVO_.imageUuid, Op.EQ, image.getUuid()); q.add(ImageCacheVO_.installUrl, Op.LIKE, String.format("%%hostUuid://%s%%", hostUuid)); String fullPath = q.findValue(); if (fullPath != null) { CacheInstallPath path = new CacheInstallPath(); path.fullPath = fullPath; String installPath = path.disassemble().installPath; logger.debug(String.format("found image[uuid: %s, name: %s] in the image cache of local primary storage[uuid:%s, installPath: %s]", image.getUuid(), image.getName(), self.getUuid(), installPath)); completion.success(installPath); chain.next(); return; } LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, backupStorage.getType()); m.downloadBits(getSelfInventory(), backupStorage, backupStorageInstallPath, primaryStorageInstallPath, hostUuid, new Completion(completion, chain) { @Override public void success() { ImageCacheVO vo = new ImageCacheVO(); vo.setState(ImageCacheState.ready); vo.setMediaType(ImageMediaType.valueOf(image.getMediaType())); vo.setImageUuid(image.getUuid()); vo.setPrimaryStorageUuid(self.getUuid()); vo.setSize(image.getSize()); vo.setMd5sum("not calculated"); CacheInstallPath path = new CacheInstallPath(); path.installPath = primaryStorageInstallPath; path.hostUuid = hostUuid; vo.setInstallUrl(path.makeFullPath()); dbf.persist(vo); logger.debug(String.format("downloaded image[uuid:%s, name:%s] to the image cache of local primary storage[uuid: %s, installPath: %s] on host[uuid: %s]", image.getUuid(), image.getName(), self.getUuid(), primaryStorageInstallPath, hostUuid)); completion.success(primaryStorageInstallPath); chain.next(); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); chain.next(); } }); } @Override public String getName() { return getSyncSignature(); } }); } } private void createRootVolume(InstantiateRootVolumeFromTemplateMsg msg, final ReturnValueCompletion<InstantiateVolumeReply> completion) { final ImageSpec ispec = msg.getTemplateSpec(); final ImageInventory image = ispec.getInventory(); if (!ImageMediaType.RootVolumeTemplate.toString().equals(image.getMediaType())) { createEmptyVolume(msg, completion); return; } SimpleQuery<BackupStorageVO> q = dbf.createQuery(BackupStorageVO.class); q.add(BackupStorageVO_.uuid, Op.EQ, ispec.getSelectedBackupStorage().getBackupStorageUuid()); BackupStorageVO bs = q.find(); final BackupStorageInventory bsInv = BackupStorageInventory.valueOf(bs); final VolumeInventory volume = msg.getVolume(); final String hostUuid = msg.getDestHost().getUuid(); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("kvm-localstorage-create-root-volume-from-image-%s", image.getUuid())); chain.then(new ShareFlow() { String pathInCache = makeCachedImageInstallUrl(image); String installPath; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "download-image-to-cache"; @Override public void run(final FlowTrigger trigger, Map data) { ImageCache cache = new ImageCache(); cache.backupStorage = bsInv; cache.backupStorageInstallPath = ispec.getSelectedBackupStorage().getInstallPath(); cache.primaryStorageInstallPath = pathInCache; cache.hostUuid = hostUuid; cache.image = image; cache.download(new ReturnValueCompletion<String>(trigger) { @Override public void success(String returnValue) { pathInCache = returnValue; trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); flow(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { installPath = makeRootVolumeInstallUrl(volume); CreateVolumeFromCacheCmd cmd = new CreateVolumeFromCacheCmd(); cmd.setInstallUrl(installPath); cmd.setTemplatePathInCache(pathInCache); cmd.setVolumeUuid(volume.getUuid()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setCommand(cmd); kmsg.setHostUuid(hostUuid); kmsg.setPath(CREATE_VOLUME_FROM_CACHE_PATH); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); CreateVolumeFromCacheRsp rsp = kr.toResponse(CreateVolumeFromCacheRsp.class); if (!rsp.isSuccess()) { trigger.fail(errf.stringToOperationError(rsp.getError())); return; } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { InstantiateVolumeReply reply = new InstantiateVolumeReply(); volume.setInstallPath(installPath); reply.setVolume(volume); completion.success(reply); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void deleteBits(String path, String hostUuid, final Completion completion) { DeleteBitsCmd cmd = new DeleteBitsCmd(); cmd.setPath(path); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setHostUuid(hostUuid); kmsg.setCommand(cmd); kmsg.setPath(DELETE_BITS_PATH); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); DeleteBitsRsp rsp = kr.toResponse(DeleteBitsRsp.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError(rsp.getError())); return; } completion.success(); } }); } @Override void handle(DeleteVolumeOnPrimaryStorageMsg msg, final ReturnValueCompletion<DeleteVolumeOnPrimaryStorageReply> completion) { String hostUuid = getHostUuidByResourceUuid(msg.getVolume().getUuid(), VolumeVO.class.getSimpleName()); deleteBits(msg.getVolume().getInstallPath(), hostUuid, new Completion(completion) { @Override public void success() { DeleteVolumeOnPrimaryStorageReply dreply = new DeleteVolumeOnPrimaryStorageReply(); completion.success(dreply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(final DownloadDataVolumeToPrimaryStorageMsg msg, final ReturnValueCompletion<DownloadDataVolumeToPrimaryStorageReply> completion) { BackupStorageVO bsvo = dbf.findByUuid(msg.getBackupStorageRef().getBackupStorageUuid(), BackupStorageVO.class); LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, bsvo.getType()); final String installPath = makeDataVolumeInstallUrl(msg.getVolumeUuid()); m.downloadBits(getSelfInventory(), BackupStorageInventory.valueOf(bsvo), msg.getBackupStorageRef().getInstallPath(), installPath, msg.getHostUuid(), new Completion(completion) { @Override public void success() { DownloadDataVolumeToPrimaryStorageReply reply = new DownloadDataVolumeToPrimaryStorageReply(); reply.setFormat(msg.getImage().getFormat()); reply.setInstallPath(installPath); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(DeleteBitsOnPrimaryStorageMsg msg, final ReturnValueCompletion<DeleteBitsOnPrimaryStorageReply> completion) { String hostUuid = getHostUuidByResourceUuid(msg.getBitsUuid(), msg.getBitsType()); deleteBits(msg.getInstallPath(), hostUuid, new Completion(completion) { @Override public void success() { DeleteBitsOnPrimaryStorageReply reply = new DeleteBitsOnPrimaryStorageReply(); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(DownloadIsoToPrimaryStorageMsg msg, final ReturnValueCompletion<DownloadIsoToPrimaryStorageReply> completion) { ImageSpec ispec = msg.getIsoSpec(); SimpleQuery<BackupStorageVO> q = dbf.createQuery(BackupStorageVO.class); q.add(BackupStorageVO_.uuid, Op.EQ, ispec.getSelectedBackupStorage().getBackupStorageUuid()); BackupStorageVO bsvo = q.find(); BackupStorageInventory bsinv = BackupStorageInventory.valueOf(bsvo); ImageCache cache = new ImageCache(); cache.image = ispec.getInventory(); cache.hostUuid = msg.getDestHostUuid(); cache.primaryStorageInstallPath = makeCachedImageInstallUrl(ispec.getInventory()); cache.backupStorage = bsinv; cache.backupStorageInstallPath = ispec.getSelectedBackupStorage().getInstallPath(); cache.download(new ReturnValueCompletion<String>(completion) { @Override public void success(String returnValue) { DownloadIsoToPrimaryStorageReply reply = new DownloadIsoToPrimaryStorageReply(); reply.setInstallPath(returnValue); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(DeleteIsoFromPrimaryStorageMsg msg, ReturnValueCompletion<DeleteIsoFromPrimaryStorageReply> completion) { // The ISO is in the image cache, no need to delete it DeleteIsoFromPrimaryStorageReply reply = new DeleteIsoFromPrimaryStorageReply(); completion.success(reply); } @Override void handle(InitPrimaryStorageOnHostConnectedMsg msg, final ReturnValueCompletion<PhysicalCapacityUsage> completion) { InitCmd cmd = new InitCmd(); cmd.setHostUuid(msg.getHostUuid()); cmd.setPath(self.getUrl()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setCommand(cmd); kmsg.setHostUuid(msg.getHostUuid()); kmsg.setPath(INIT_PATH); kmsg.setCommand(cmd); kmsg.setNoStatusCheck(true); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, msg.getHostUuid()); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); AgentResponse rsp = kr.toResponse(AgentResponse.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError(rsp.getError())); return; } PhysicalCapacityUsage usage = new PhysicalCapacityUsage(); usage.totalPhysicalSize = rsp.getTotalCapacity(); usage.availablePhysicalSize = rsp.getAvailableCapacity(); completion.success(usage); } }); } @Override void handle(final TakeSnapshotMsg msg, String hostUuid, final ReturnValueCompletion<TakeSnapshotReply> completion) { final VolumeSnapshotInventory sp = msg.getStruct().getCurrent(); VolumeInventory vol = VolumeInventory.valueOf(dbf.findByUuid(sp.getVolumeUuid(), VolumeVO.class)); TakeSnapshotOnHypervisorMsg hmsg = new TakeSnapshotOnHypervisorMsg(); hmsg.setHostUuid(hostUuid); hmsg.setVmUuid(vol.getVmInstanceUuid()); hmsg.setVolume(vol); hmsg.setSnapshotName(msg.getStruct().getCurrent().getUuid()); hmsg.setFullSnapshot(msg.getStruct().isFullSnapshot()); String installPath = makeSnapshotInstallPath(vol, sp); hmsg.setInstallPath(installPath); bus.makeTargetServiceIdByResourceUuid(hmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(hmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } TakeSnapshotOnHypervisorReply treply = (TakeSnapshotOnHypervisorReply)reply; sp.setSize(treply.getSize()); sp.setPrimaryStorageUuid(self.getUuid()); sp.setPrimaryStorageInstallPath(treply.getSnapshotInstallPath()); sp.setType(VolumeSnapshotConstant.HYPERVISOR_SNAPSHOT_TYPE.toString()); TakeSnapshotReply ret = new TakeSnapshotReply(); ret.setNewVolumeInstallPath(treply.getNewVolumeInstallPath()); ret.setInventory(sp); completion.success(ret); } }); } @Override void handle(DeleteSnapshotOnPrimaryStorageMsg msg, String hostUuid, final ReturnValueCompletion<DeleteSnapshotOnPrimaryStorageReply> completion) { deleteBits(msg.getSnapshot().getPrimaryStorageInstallPath(), hostUuid, new Completion(completion) { @Override public void success() { DeleteSnapshotOnPrimaryStorageReply reply = new DeleteSnapshotOnPrimaryStorageReply(); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(RevertVolumeFromSnapshotOnPrimaryStorageMsg msg, String hostUuid, final ReturnValueCompletion<RevertVolumeFromSnapshotOnPrimaryStorageReply> completion) { VolumeSnapshotInventory sp = msg.getSnapshot(); RevertVolumeFromSnapshotCmd cmd = new RevertVolumeFromSnapshotCmd(); cmd.setSnapshotInstallPath(sp.getPrimaryStorageInstallPath()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setHostUuid(hostUuid); kmsg.setPath(REVERT_SNAPSHOT_PATH); kmsg.setCommand(cmd); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); RevertVolumeFromSnapshotRsp rsp = kr.toResponse(RevertVolumeFromSnapshotRsp.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError(rsp.getError())); return; } RevertVolumeFromSnapshotOnPrimaryStorageReply ret = new RevertVolumeFromSnapshotOnPrimaryStorageReply(); ret.setNewVolumeInstallPath(rsp.getNewVolumeInstallPath()); completion.success(ret); } }); } @Override void handle(BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg msg, String hostUuid, final ReturnValueCompletion<BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply> completion) { VolumeSnapshotInventory sp = msg.getSnapshot(); LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, msg.getBackupStorage().getType()); BackupStorageAskInstallPathMsg bmsg = new BackupStorageAskInstallPathMsg(); bmsg.setImageMediaType(VolumeSnapshotVO.class.getSimpleName()); bmsg.setBackupStorageUuid(msg.getBackupStorage().getUuid()); bmsg.setImageUuid(sp.getUuid()); bus.makeTargetServiceIdByResourceUuid(bmsg, BackupStorageConstant.SERVICE_ID, msg.getBackupStorage().getUuid()); MessageReply br = bus.call(bmsg); if (!br.isSuccess()) { completion.fail(br.getError()); return; } final String installPath = ((BackupStorageAskInstallPathReply)br).getInstallPath(); m.uploadBits(getSelfInventory(), msg.getBackupStorage(), installPath, sp.getPrimaryStorageInstallPath(), hostUuid, new Completion(completion) { @Override public void success() { BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply reply = new BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply(); reply.setBackupStorageInstallPath(installPath); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } class CreateTemplateOrVolumeFromSnapshots { List<SnapshotDownloadInfo> infos; String hostUuid; boolean needDownload; String primaryStorageInstallPath; private void createTemplateWithDownload(final ReturnValueCompletion<Long> completion) { FlowChain c = FlowChainBuilder.newShareFlowChain(); c.setName("download-snapshots-and-create-template"); c.then(new ShareFlow() { long totalSnapshotSize; List<String> snapshotInstallPaths; long templateSize; @Override public void setup() { for (SnapshotDownloadInfo i : infos) { totalSnapshotSize += i.getSnapshot().getSize(); } flow(new Flow() { String __name__ = "reserve-capacity-for-downloading-snapshots"; @Override public void run(FlowTrigger trigger, Map data) { reserveCapacityOnHost(hostUuid, totalSnapshotSize); trigger.next(); } @Override public void rollback(FlowTrigger trigger, Map data) { returnCapacityToHost(hostUuid, totalSnapshotSize); trigger.rollback(); } }); flow(new Flow() { String __name__ = "download-snapshots"; @Override public void run(final FlowTrigger trigger, Map data) { download(infos.iterator(), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void download(final Iterator<SnapshotDownloadInfo> it, final Completion completion) { if (!it.hasNext()) { Collections.reverse(snapshotInstallPaths); completion.success(); return; } SnapshotDownloadInfo i = it.next(); BackupStorageVO bsvo = dbf.findByUuid(i.getBackupStorageUuid(), BackupStorageVO.class); LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, bsvo.getType()); final String pinstallPath = makeSnapshotWorkspacePath(i.getSnapshot().getUuid()); m.downloadBits(getSelfInventory(), BackupStorageInventory.valueOf(bsvo), i.getBackupStorageInstallPath(), pinstallPath, hostUuid, new Completion(completion) { @Override public void success() { snapshotInstallPaths.add(pinstallPath); download(it, completion); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override public void rollback(FlowTrigger trigger, Map data) { for (String path : snapshotInstallPaths) { //TODO deleteBits(path, hostUuid, new NopeCompletion()); } trigger.rollback(); } }); flow(new NoRollbackFlow() { String __name__ = "rebase-and-merge-snapshots-on-host"; @Override public void run(final FlowTrigger trigger, Map data) { RebaseAndMergeSnapshotsCmd cmd = new RebaseAndMergeSnapshotsCmd(); cmd.setSnapshotInstallPaths(snapshotInstallPaths); cmd.setWorkspaceInstallPath(primaryStorageInstallPath); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setCommand(cmd); kmsg.setPath(MERGE_AND_REBASE_SNAPSHOT_PATH); kmsg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); RebaseAndMergeSnapshotsRsp rsp = kr.toResponse(RebaseAndMergeSnapshotsRsp.class); if (!rsp.isSuccess()) { trigger.fail(errf.stringToOperationError(rsp.getError())); return; } templateSize = rsp.getSize(); trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "delete-temporary-snapshot-in-workspace"; @Override public void run(FlowTrigger trigger, Map data) { for (String installPath : snapshotInstallPaths) { deleteBits(installPath, hostUuid, new NopeCompletion()); } trigger.next(); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(templateSize); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void createTemplateWithoutDownload(final ReturnValueCompletion<Long> completion) { VolumeSnapshotInventory latest = infos.get(infos.size()-1).getSnapshot(); MergeSnapshotCmd cmd = new MergeSnapshotCmd(); cmd.setSnapshotInstallPath(latest.getPrimaryStorageInstallPath()); cmd.setWorkspaceInstallPath(primaryStorageInstallPath); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setCommand(cmd); kmsg.setPath(MERGE_SNAPSHOT_PATH); kmsg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } MergeSnapshotRsp rsp = ((KVMHostAsyncHttpCallReply) reply).toResponse(MergeSnapshotRsp.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError(rsp.getError())); return; } completion.success(rsp.getSize()); } }); } void create(ReturnValueCompletion<Long> completion) { DebugUtils.Assert(infos != null, "infos cannot be null"); DebugUtils.Assert(hostUuid != null, "hostUuid cannot be null"); DebugUtils.Assert(primaryStorageInstallPath != null, "workSpaceInstallPath cannot be null"); if (needDownload) { createTemplateWithDownload(completion); } else { createTemplateWithoutDownload(completion); } } } @Override void handle(final CreateTemplateFromVolumeSnapshotOnPrimaryStorageMsg msg, final String hostUuid, final ReturnValueCompletion<CreateTemplateFromVolumeSnapshotOnPrimaryStorageReply> completion) { final List<SnapshotDownloadInfo> infos = msg.getSnapshotsDownloadInfo(); SimpleQuery<ImageVO> q = dbf.createQuery(ImageVO.class); q.select(ImageVO_.mediaType); q.add(ImageVO_.uuid, Op.EQ, msg.getImageUuid()); final String mediaType = q.findValue().toString(); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-template-%s-from-snapshots", msg.getImageUuid())); chain.then(new ShareFlow() { String workSpaceInstallPath = makeSnapshotWorkspacePath(msg.getImageUuid()); long templateSize; class Result { BackupStorageInventory backupStorageInventory; String installPath; } List<Result> successBackupStorage = new ArrayList<Result>(); @Override public void setup() { flow(new Flow() { String __name__ = "create-template-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { CreateTemplateOrVolumeFromSnapshots c = new CreateTemplateOrVolumeFromSnapshots(); c.infos = infos; c.primaryStorageInstallPath = workSpaceInstallPath; c.needDownload = msg.isNeedDownload(); c.hostUuid = hostUuid; c.create(new ReturnValueCompletion<Long>(trigger) { @Override public void success(Long returnValue) { templateSize = returnValue; trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } @Override public void rollback(final FlowTrigger trigger, Map data) { deleteBits(workSpaceInstallPath, hostUuid, new Completion(trigger) { @Override public void success() { trigger.rollback(); } @Override public void fail(ErrorCode errorCode) { //TODO logger.warn(String.format("failed to delete %s on local primary storage[uuid: %s], %s; continue to rollback", workSpaceInstallPath, self.getUuid(), errorCode)); trigger.rollback(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "upload-template-to-backup-storage"; @Override public void run(final FlowTrigger trigger, Map data) { upload(msg.getBackupStorage().iterator(), new Completion() { @Override public void success() { if (successBackupStorage.isEmpty()) { trigger.fail(errf.stringToInternalError("failed to upload the template to all backup storage")); } else { trigger.next(); } } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } private void upload(final Iterator<BackupStorageInventory> it, final Completion completion) { if (!it.hasNext()) { completion.success(); return; } final BackupStorageInventory bs = it.next(); BackupStorageAskInstallPathMsg bmsg = new BackupStorageAskInstallPathMsg(); bmsg.setImageMediaType(mediaType); bmsg.setImageUuid(msg.getImageUuid()); bmsg.setBackupStorageUuid(bs.getUuid()); bus.makeTargetServiceIdByResourceUuid(bmsg, BackupStorageConstant.SERVICE_ID, bs.getUuid()); MessageReply br = bus.call(bmsg); if (!br.isSuccess()) { logger.warn(String.format("failed to get install path on backup storage[uuid: %s] for image[uuid:%s]", bs.getUuid(), msg.getImageUuid())); upload(it, completion); return; } final String backupStorageInstallPath = ((BackupStorageAskInstallPathReply) br).getInstallPath(); LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, bs.getType()); m.uploadBits(getSelfInventory(), bs, backupStorageInstallPath, workSpaceInstallPath, hostUuid, new Completion(completion) { @Override public void success() { Result ret = new Result(); ret.backupStorageInventory = bs; ret.installPath = backupStorageInstallPath; successBackupStorage.add(ret); upload(it, completion); } @Override public void fail(ErrorCode errorCode) { //TODO logger.warn(String.format("failed to upload template[%s] from local primary storage[uuid: %s] to the backup storage[uuid: %s, path: %s]", workSpaceInstallPath, self.getUuid(), bs.getUuid(), backupStorageInstallPath)); upload(it, completion); } }); } }); flow(new NoRollbackFlow() { String __name__ = "delete-temporary-template-from-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { deleteBits(workSpaceInstallPath, hostUuid, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { //TODO logger.warn(String.format("failed to delete temporary template[%s] from primary storage[uuid:%s], %s; need a cleanup", workSpaceInstallPath, self.getUuid(), errorCode)); trigger.next(); } }); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { CreateTemplateFromVolumeSnapshotOnPrimaryStorageReply reply = new CreateTemplateFromVolumeSnapshotOnPrimaryStorageReply(); List<CreateTemplateFromVolumeSnapshotResult> ret = CollectionUtils.transformToList(successBackupStorage, new Function<CreateTemplateFromVolumeSnapshotResult, Result>() { @Override public CreateTemplateFromVolumeSnapshotResult call(Result arg) { CreateTemplateFromVolumeSnapshotResult r = new CreateTemplateFromVolumeSnapshotResult(); r.setBackupStorageUuid(arg.backupStorageInventory.getUuid()); r.setInstallPath(arg.installPath); return r; } }); reply.setResults(ret); reply.setSize(templateSize); completion.success(reply); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } @Override void handle(CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg msg, String hostUuid, final ReturnValueCompletion<CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply> completion) { final CreateTemplateOrVolumeFromSnapshots c = new CreateTemplateOrVolumeFromSnapshots(); c.hostUuid = hostUuid; c.needDownload = msg.isNeedDownload(); c.primaryStorageInstallPath = makeDataVolumeInstallUrl(msg.getVolumeUuid()); c.infos = msg.getSnapshots(); c.create(new ReturnValueCompletion<Long>(completion) { @Override public void success(Long returnValue) { CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply reply = new CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply(); reply.setInstallPath(c.primaryStorageInstallPath); reply.setSize(returnValue); completion.success(reply); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } @Override void handle(MergeVolumeSnapshotOnPrimaryStorageMsg msg, String hostUuid, final ReturnValueCompletion<MergeVolumeSnapshotOnPrimaryStorageReply> completion) { boolean offline = true; VolumeInventory volume = msg.getTo(); VolumeSnapshotInventory sp = msg.getFrom(); if (volume.getVmInstanceUuid() != null) { SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class); q.select(VmInstanceVO_.state); q.add(VmInstanceVO_.uuid, Op.EQ, volume.getVmInstanceUuid()); VmInstanceState state = q.findValue(); offline = (state == VmInstanceState.Stopped); } final MergeVolumeSnapshotOnPrimaryStorageReply ret = new MergeVolumeSnapshotOnPrimaryStorageReply(); if (offline) { OfflineMergeSnapshotCmd cmd = new OfflineMergeSnapshotCmd(); cmd.setFullRebase(msg.isFullRebase()); cmd.setSrcPath(sp.getPrimaryStorageInstallPath()); cmd.setDestPath(volume.getInstallPath()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setCommand(cmd); kmsg.setPath(OFFLINE_MERGE_PATH); kmsg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { completion.fail(reply.getError()); return; } OfflineMergeSnapshotRsp rsp = ((KVMHostAsyncHttpCallReply)reply).toResponse(OfflineMergeSnapshotRsp.class); if (!rsp.isSuccess()) { completion.fail(errf.stringToOperationError(rsp.getError())); return; } completion.success(ret); } }); } else { MergeVolumeSnapshotOnKvmMsg kmsg = new MergeVolumeSnapshotOnKvmMsg(); kmsg.setFullRebase(msg.isFullRebase()); kmsg.setHostUuid(hostUuid); kmsg.setFrom(sp); kmsg.setTo(volume); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(kmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { completion.success(ret); } else { completion.fail(reply.getError()); } } }); } } @Override public void detachHook(String clusterUuid, final Completion completion) { SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.uuid); q.add(HostVO_.clusterUuid, Op.EQ, clusterUuid); final List<String> hostUuids = q.listValue(); if (hostUuids.isEmpty()) { completion.success(); return; } SimpleQuery<LocalStorageHostRefVO> refq = dbf.createQuery(LocalStorageHostRefVO.class); refq.add(LocalStorageHostRefVO_.primaryStorageUuid, Op.EQ, self.getUuid()); refq.add(LocalStorageHostRefVO_.hostUuid, Op.IN, hostUuids); List<LocalStorageHostRefVO> refs = refq.list(); if (!refs.isEmpty()) { dbf.removeCollection(refs, LocalStorageHostRefVO.class); long total = 0; for (LocalStorageHostRefVO ref : refs) { total += ref.getTotalCapacity(); } // after detaching, total capacity on those hosts should be deducted // from both total and available capacity of the primary storage decreaseCapacity(total, total, null, null); } syncPhysicalCapacity(new ReturnValueCompletion<PhysicalCapacityUsage>(completion) { @Override public void success(PhysicalCapacityUsage returnValue) { setCapacity(null, null, returnValue.totalPhysicalSize, returnValue.availablePhysicalSize); completion.success(); } @Override public void fail(ErrorCode errorCode) { logger.warn(String.format("failed to sync the physical capacity on the local primary storage[uuid:%s], %s", self.getUuid(), errorCode)); completion.success(); } }); } @Override public void attachHook(String clusterUuid, final Completion completion) { SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.uuid); q.add(HostVO_.clusterUuid, Op.EQ, clusterUuid); final List<String> hostUuids = q.listValue(); if (hostUuids.isEmpty()) { completion.success(); return; } List<KVMHostAsyncHttpCallMsg> msgs = CollectionUtils.transformToList(hostUuids, new Function<KVMHostAsyncHttpCallMsg, String>() { @Override public KVMHostAsyncHttpCallMsg call(String arg) { InitCmd cmd = new InitCmd(); cmd.path = self.getUrl(); cmd.hostUuid = arg; KVMHostAsyncHttpCallMsg msg = new KVMHostAsyncHttpCallMsg(); msg.setCommand(cmd); msg.setPath(INIT_PATH); msg.setHostUuid(arg); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, arg); return msg; } }); bus.send(msgs, new CloudBusListCallBack(completion) { @Override public void run(List<MessageReply> replies) { long total = 0; long avail = 0; List<LocalStorageHostRefVO> refs = new ArrayList<LocalStorageHostRefVO>(); for (MessageReply reply : replies) { String hostUuid = hostUuids.get(replies.indexOf(reply)); if (!reply.isSuccess()) { logger.warn(String.format("cannot get the physical capacity of local storage on the host[uuid:%s], %s", hostUuid, reply.getError())); continue; } KVMHostAsyncHttpCallReply r = reply.castReply(); AgentResponse rsp = r.toResponse(AgentResponse.class); if (!rsp.isSuccess()) { logger.warn(String.format("cannot get the physical capacity of local storage on the host[uuid:%s], %s", hostUuid, rsp.getError())); continue; } if (dbf.isExist(hostUuid, LocalStorageHostRefVO.class)) { logger.debug(String.format("host[uuid :%s] is already in the local primary storage[uuid: %s]", hostUuid, self.getUuid())); continue; } total += rsp.getTotalCapacity(); avail += rsp.getAvailableCapacity(); LocalStorageHostRefVO ref = new LocalStorageHostRefVO(); ref.setPrimaryStorageUuid(self.getUuid()); ref.setHostUuid(hostUuid); ref.setAvailablePhysicalCapacity(rsp.getAvailableCapacity()); ref.setAvailableCapacity(rsp.getAvailableCapacity()); ref.setTotalCapacity(rsp.getTotalCapacity()); ref.setTotalPhysicalCapacity(rsp.getTotalCapacity()); refs.add(ref); } dbf.persistCollection(refs); increaseCapacity(total, avail, total, avail); completion.success(); } }); } @Override protected void handle(final CreateTemplateFromVolumeOnPrimaryStorageMsg msg) { final LocalStorageResourceRefVO ref = dbf.findByUuid(msg.getVolumeInventory().getUuid(), LocalStorageResourceRefVO.class); final CreateTemplateFromVolumeOnPrimaryStorageReply reply = new CreateTemplateFromVolumeOnPrimaryStorageReply(); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-image-%s-from-volume-%s", msg.getImageInventory().getUuid(), msg.getVolumeInventory().getUuid())); chain.then(new ShareFlow() { String temporaryTemplatePath = makeTemplateFromVolumeInWorkspacePath(msg.getImageInventory().getUuid()); String backupStorageInstallPath; @Override public void setup() { flow(new Flow() { String __name__ = "reserve-capacity-on-the-host-for-template"; @Override public void run(FlowTrigger trigger, Map data) { reserveCapacityOnHost(ref.getHostUuid(), msg.getVolumeInventory().getSize()); trigger.next(); } @Override public void rollback(FlowTrigger trigger, Map data) { returnCapacityToHost(ref.getHostUuid(), msg.getVolumeInventory().getSize()); trigger.rollback(); } }); flow(new Flow() { String __name__ = "create-temporary-template"; @Override public void run(final FlowTrigger trigger, Map data) { CreateTemplateFromVolumeCmd cmd = new CreateTemplateFromVolumeCmd(); cmd.setInstallPath(temporaryTemplatePath); cmd.setVolumePath(msg.getVolumeInventory().getInstallPath()); KVMHostAsyncHttpCallMsg kmsg = new KVMHostAsyncHttpCallMsg(); kmsg.setHostUuid(ref.getHostUuid()); kmsg.setPath(CREATE_TEMPLATE_FROM_VOLUME); kmsg.setCommand(cmd); bus.makeTargetServiceIdByResourceUuid(kmsg, HostConstant.SERVICE_ID, ref.getHostUuid()); bus.send(kmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } KVMHostAsyncHttpCallReply kr = reply.castReply(); CreateTemplateFromVolumeRsp rsp = kr.toResponse(CreateTemplateFromVolumeRsp.class); if (!rsp.isSuccess()) { trigger.fail(errf.stringToOperationError(rsp.getError())); return; } trigger.next(); } }); } @Override public void rollback(final FlowTrigger trigger, Map data) { deleteBits(temporaryTemplatePath, ref.getHostUuid(), new Completion(trigger) { @Override public void success() { trigger.rollback(); } @Override public void fail(ErrorCode errorCode) { logger.warn(String.format("failed to delete %s on primary storage[uuid: %s], %s; continue to rollback", temporaryTemplatePath, self.getUuid(), errorCode)); trigger.rollback(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "upload-template-to-backup-storage"; @Override public void run(final FlowTrigger trigger, Map data) { BackupStorageAskInstallPathMsg bmsg = new BackupStorageAskInstallPathMsg(); bmsg.setBackupStorageUuid(msg.getBackupStorageUuid()); bmsg.setImageMediaType(msg.getImageInventory().getMediaType()); bmsg.setImageUuid(msg.getImageInventory().getUuid()); bus.makeTargetServiceIdByResourceUuid(bmsg, BackupStorageConstant.SERVICE_ID, msg.getBackupStorageUuid()); MessageReply br = bus.call(bmsg); if (!br.isSuccess()) { trigger.fail(br.getError()); return; } backupStorageInstallPath = ((BackupStorageAskInstallPathReply) br).getInstallPath(); BackupStorageVO bsvo = dbf.findByUuid(msg.getBackupStorageUuid(), BackupStorageVO.class); LocalStorageBackupStorageMediator m = localStorageFactory.getBackupStorageMediator(KVMConstant.KVM_HYPERVISOR_TYPE, bsvo.getType()); m.uploadBits(getSelfInventory(), BackupStorageInventory.valueOf(bsvo), backupStorageInstallPath, temporaryTemplatePath, ref.getHostUuid(), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); flow(new NoRollbackFlow() { String __name__ = "delete-temporary-template-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { deleteBits(temporaryTemplatePath, ref.getHostUuid(), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { //TODO: cleanup logger.warn(String.format("failed to delete %s on local primary storage[uuid: %s], %s; need a cleanup", temporaryTemplatePath, self.getUuid(), errorCode)); trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "return-capacity-of-temporary-template-to-host"; @Override public void run(FlowTrigger trigger, Map data) { returnCapacityToHost(ref.getHostUuid(), msg.getVolumeInventory().getSize()); trigger.next(); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { reply.setFormat(msg.getVolumeInventory().getFormat()); reply.setTemplateBackupStorageInstallPath(backupStorageInstallPath); bus.reply(msg, reply); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); } }); } }).start(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.harness; import static com.google.common.base.Preconditions.checkState; import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.fail; import java.io.IOException; import org.apache.accumulo.cluster.AccumuloCluster; import org.apache.accumulo.cluster.ClusterControl; import org.apache.accumulo.cluster.ClusterUser; import org.apache.accumulo.cluster.ClusterUsers; import org.apache.accumulo.cluster.standalone.StandaloneAccumuloCluster; import org.apache.accumulo.core.client.ClientConfiguration; import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.admin.SecurityOperations; import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.client.security.tokens.KerberosToken; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.security.TablePermission; import org.apache.accumulo.harness.conf.AccumuloClusterConfiguration; import org.apache.accumulo.harness.conf.AccumuloClusterPropertyConfiguration; import org.apache.accumulo.harness.conf.AccumuloMiniClusterConfiguration; import org.apache.accumulo.harness.conf.StandaloneAccumuloClusterConfiguration; import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl; import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.junit.After; import org.junit.AfterClass; import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * General Integration-Test base class that provides access to an Accumulo instance for testing. This instance could be MAC or a standalone instance. */ public abstract class AccumuloClusterHarness extends AccumuloITBase implements MiniClusterConfigurationCallback, ClusterUsers { private static final Logger log = LoggerFactory.getLogger(AccumuloClusterHarness.class); private static final String TRUE = Boolean.toString(true); public static enum ClusterType { MINI, STANDALONE; public boolean isDynamic() { return this == MINI; } } private static boolean initialized = false; protected static AccumuloCluster cluster; protected static ClusterType type; protected static AccumuloClusterPropertyConfiguration clusterConf; protected static TestingKdc krb; @BeforeClass public static void setUp() throws Exception { clusterConf = AccumuloClusterPropertyConfiguration.get(); type = clusterConf.getClusterType(); if (ClusterType.MINI == type && TRUE.equals(System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION))) { krb = new TestingKdc(); krb.start(); log.info("MiniKdc started"); } initialized = true; } @AfterClass public static void tearDownKdc() throws Exception { if (null != krb) { krb.stop(); } } /** * The {@link TestingKdc} used for this {@link AccumuloCluster}. Might be null. */ public static TestingKdc getKdc() { return krb; } @Before public void setupCluster() throws Exception { // Before we try to instantiate the cluster, check to see if the test even wants to run against this type of cluster Assume.assumeTrue(canRunTest(type)); switch (type) { case MINI: MiniClusterHarness miniClusterHarness = new MiniClusterHarness(); // Intrinsically performs the callback to let tests alter MiniAccumuloConfig and core-site.xml MiniAccumuloClusterImpl impl = miniClusterHarness.create(this, getAdminToken(), krb); cluster = impl; // MAC makes a ClientConf for us, just set it ((AccumuloMiniClusterConfiguration) clusterConf).setClientConf(impl.getClientConfig()); // Login as the "root" user if (null != krb) { ClusterUser rootUser = krb.getRootUser(); // Log in the 'client' user UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); } break; case STANDALONE: StandaloneAccumuloClusterConfiguration conf = (StandaloneAccumuloClusterConfiguration) clusterConf; ClientConfiguration clientConf = conf.getClientConf(); StandaloneAccumuloCluster standaloneCluster = new StandaloneAccumuloCluster(conf.getInstance(), clientConf, conf.getTmpDirectory(), conf.getUsers(), conf.getAccumuloServerUser()); // If these are provided in the configuration, pass them into the cluster standaloneCluster.setAccumuloHome(conf.getAccumuloHome()); standaloneCluster.setClientAccumuloConfDir(conf.getClientAccumuloConfDir()); standaloneCluster.setServerAccumuloConfDir(conf.getServerAccumuloConfDir()); standaloneCluster.setHadoopConfDir(conf.getHadoopConfDir()); // For SASL, we need to get the Hadoop configuration files as well otherwise UGI will log in as SIMPLE instead of KERBEROS Configuration hadoopConfiguration = standaloneCluster.getHadoopConfiguration(); if (clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) { UserGroupInformation.setConfiguration(hadoopConfiguration); // Login as the admin user to start the tests UserGroupInformation.loginUserFromKeytab(conf.getAdminPrincipal(), conf.getAdminKeytab().getAbsolutePath()); } // Set the implementation cluster = standaloneCluster; break; default: throw new RuntimeException("Unhandled type"); } if (type.isDynamic()) { cluster.start(); } else { log.info("Removing tables which appear to be from a previous test run"); cleanupTables(); log.info("Removing users which appear to be from a previous test run"); cleanupUsers(); } switch (type) { case MINI: if (null != krb) { final String traceTable = Property.TRACE_TABLE.getDefaultValue(); final ClusterUser systemUser = krb.getAccumuloServerUser(), rootUser = krb.getRootUser(); // Login as the trace user UserGroupInformation.loginUserFromKeytab(systemUser.getPrincipal(), systemUser.getKeytab().getAbsolutePath()); // Open a connector as the system user (ensures the user will exist for us to assign permissions to) Connector conn = cluster.getConnector(systemUser.getPrincipal(), new KerberosToken(systemUser.getPrincipal(), systemUser.getKeytab(), true)); // Then, log back in as the "root" user and do the grant UserGroupInformation.loginUserFromKeytab(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath()); conn = getConnector(); // Create the trace table conn.tableOperations().create(traceTable); // Trace user (which is the same kerberos principal as the system user, but using a normal KerberosToken) needs // to have the ability to read, write and alter the trace table conn.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.READ); conn.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.WRITE); conn.securityOperations().grantTablePermission(systemUser.getPrincipal(), traceTable, TablePermission.ALTER_TABLE); } break; default: // do nothing } } public void cleanupTables() throws Exception { final String tablePrefix = this.getClass().getSimpleName() + "_"; final TableOperations tops = getConnector().tableOperations(); for (String table : tops.list()) { if (table.startsWith(tablePrefix)) { log.debug("Removing table {}", table); tops.delete(table); } } } public void cleanupUsers() throws Exception { final String userPrefix = this.getClass().getSimpleName(); final SecurityOperations secOps = getConnector().securityOperations(); for (String user : secOps.listLocalUsers()) { if (user.startsWith(userPrefix)) { log.info("Dropping local user {}", user); secOps.dropLocalUser(user); } } } @After public void teardownCluster() throws Exception { if (null != cluster) { if (type.isDynamic()) { cluster.stop(); } else { log.info("Removing tables which appear to be from the current test"); cleanupTables(); log.info("Removing users which appear to be from the current test"); cleanupUsers(); } } } public static AccumuloCluster getCluster() { checkState(initialized); return cluster; } public static ClusterControl getClusterControl() { checkState(initialized); return cluster.getClusterControl(); } public static ClusterType getClusterType() { checkState(initialized); return type; } public static String getAdminPrincipal() { checkState(initialized); return clusterConf.getAdminPrincipal(); } public static AuthenticationToken getAdminToken() { checkState(initialized); return clusterConf.getAdminToken(); } @Override public ClusterUser getAdminUser() { switch (type) { case MINI: if (null == krb) { PasswordToken passwordToken = (PasswordToken) getAdminToken(); return new ClusterUser(getAdminPrincipal(), new String(passwordToken.getPassword(), UTF_8)); } return krb.getRootUser(); case STANDALONE: return new ClusterUser(getAdminPrincipal(), ((StandaloneAccumuloClusterConfiguration) clusterConf).getAdminKeytab()); default: throw new RuntimeException("Unknown cluster type"); } } @Override public ClusterUser getUser(int offset) { switch (type) { case MINI: if (null != krb) { // Defer to the TestingKdc when kerberos is on so we can get the keytab instead of a password return krb.getClientPrincipal(offset); } else { // Come up with a mostly unique name String principal = getClass().getSimpleName() + "_" + testName.getMethodName() + "_" + offset; // Username and password are the same return new ClusterUser(principal, principal); } case STANDALONE: return ((StandaloneAccumuloCluster) cluster).getUser(offset); default: throw new RuntimeException("Unknown cluster type"); } } public static FileSystem getFileSystem() throws IOException { checkState(initialized); return cluster.getFileSystem(); } public static AccumuloClusterConfiguration getClusterConfiguration() { checkState(initialized); return clusterConf; } public Connector getConnector() { try { String princ = getAdminPrincipal(); AuthenticationToken token = getAdminToken(); log.debug("Creating connector as {} with {}", princ, token); return cluster.getConnector(princ, token); } catch (Exception e) { log.error("Could not connect to Accumulo", e); fail("Could not connect to Accumulo: " + e.getMessage()); throw new RuntimeException("Could not connect to Accumulo", e); } } // TODO Really don't want this here. Will ultimately need to abstract configuration method away from MAConfig // and change over to something more generic @Override public void configureMiniCluster(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSite) {} /** * A test may not be capable of running against a given AccumuloCluster. Implementations can override this method to advertise that they cannot (or perhaps do * not) want to run the test. */ public boolean canRunTest(ClusterType type) { return true; } /** * Tries to give a reasonable directory which can be used to create temporary files for the test. Makes a basic attempt to create the directory if it does not * already exist. * * @return A directory which can be expected to exist on the Cluster's FileSystem */ public Path getUsableDir() throws IllegalArgumentException, IOException { return cluster.getTemporaryPath(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import java.io.Serializable; import java.util.Comparator; /** * <p>An immutable range of objects from a minimum to maximum point inclusive.</p> * * <p>The objects need to either be implementations of {@code Comparable} * or you need to supply a {@code Comparator}. </p> * * <p>#ThreadSafe# if the objects and comparator are thread-safe</p> * * @since 3.0 * @version $Id$ */ public final class Range<T> implements Serializable { /** * Serialization version. * @see java.io.Serializable */ private static final long serialVersionUID = 1L; /** * The ordering scheme used in this range. */ private final Comparator<T> comparator; /** * The minimum value in this range (inclusive). */ private final T minimum; /** * The maximum value in this range (inclusive). */ private final T maximum; /** * Cached output hashCode (class is immutable). */ private transient int hashCode; /** * Cached output toString (class is immutable). */ private transient String toString; /** * <p>Obtains a range using the specified element as both the minimum * and maximum in this range.</p> * * <p>The range uses the natural ordering of the elements to determine where * values lie in the range.</p> * * @param <T> the type of the elements in this range * @param element the value to use for this range, not null * @return the range object, not null * @throws IllegalArgumentException if the element is null * @throws ClassCastException if the element is not {@code Comparable} */ public static <T extends Comparable<T>> Range<T> is(final T element) { return between(element, element, null); } /** * <p>Obtains a range using the specified element as both the minimum * and maximum in this range.</p> * * <p>The range uses the specified {@code Comparator} to determine where * values lie in the range.</p> * * @param <T> the type of the elements in this range * @param element the value to use for this range, must not be {@code null} * @param comparator the comparator to be used, null for natural ordering * @return the range object, not null * @throws IllegalArgumentException if the element is null * @throws ClassCastException if using natural ordering and the elements are not {@code Comparable} */ public static <T> Range<T> is(final T element, final Comparator<T> comparator) { return between(element, element, comparator); } /** * <p>Obtains a range with the specified minimum and maximum values (both inclusive).</p> * * <p>The range uses the natural ordering of the elements to determine where * values lie in the range.</p> * * <p>The arguments may be passed in the order (min,max) or (max,min). * The getMinimum and getMaximum methods will return the correct values.</p> * * @param <T> the type of the elements in this range * @param fromInclusive the first value that defines the edge of the range, inclusive * @param toInclusive the second value that defines the edge of the range, inclusive * @return the range object, not null * @throws IllegalArgumentException if either element is null * @throws ClassCastException if the elements are not {@code Comparable} */ public static <T extends Comparable<T>> Range<T> between(final T fromInclusive, final T toInclusive) { return between(fromInclusive, toInclusive, null); } /** * <p>Obtains a range with the specified minimum and maximum values (both inclusive).</p> * * <p>The range uses the specified {@code Comparator} to determine where * values lie in the range.</p> * * <p>The arguments may be passed in the order (min,max) or (max,min). * The getMinimum and getMaximum methods will return the correct values.</p> * * @param <T> the type of the elements in this range * @param fromInclusive the first value that defines the edge of the range, inclusive * @param toInclusive the second value that defines the edge of the range, inclusive * @param comparator the comparator to be used, null for natural ordering * @return the range object, not null * @throws IllegalArgumentException if either element is null * @throws ClassCastException if using natural ordering and the elements are not {@code Comparable} */ public static <T> Range<T> between(final T fromInclusive, final T toInclusive, final Comparator<T> comparator) { return new Range<T>(fromInclusive, toInclusive, comparator); } /** * Creates an instance. * * @param element1 the first element, not null * @param element2 the second element, not null * @param comp the comparator to be used, null for natural ordering */ @SuppressWarnings("unchecked") private Range(final T element1, final T element2, final Comparator<T> comp) { if (element1 == null || element2 == null) { throw new IllegalArgumentException("Elements in a range must not be null: element1=" + element1 + ", element2=" + element2); } if (comp == null) { this.comparator = ComparableComparator.INSTANCE; } else { this.comparator = comp; } if (this.comparator.compare(element1, element2) < 1) { this.minimum = element1; this.maximum = element2; } else { this.minimum = element2; this.maximum = element1; } } // Accessors //-------------------------------------------------------------------- /** * <p>Gets the minimum value in this range.</p> * * @return the minimum value in this range, not null */ public T getMinimum() { return minimum; } /** * <p>Gets the maximum value in this range.</p> * * @return the maximum value in this range, not null */ public T getMaximum() { return maximum; } /** * <p>Gets the comparator being used to determine if objects are within the range.</p> * * <p>Natural ordering uses an internal comparator implementation, thus this * method never returns null. See {@link #isNaturalOrdering()}.</p> * * @return the comparator being used, not null */ public Comparator<T> getComparator() { return comparator; } /** * <p>Whether or not the Range is using the natural ordering of the elements.</p> * * <p>Natural ordering uses an internal comparator implementation, thus this * method is the only way to check if a null comparator was specified.</p> * * @return true if using natural ordering */ public boolean isNaturalOrdering() { return comparator == ComparableComparator.INSTANCE; } // Element tests //-------------------------------------------------------------------- /** * <p>Checks whether the specified element occurs within this range.</p> * * @param element the element to check for, null returns false * @return true if the specified element occurs within this range */ public boolean contains(final T element) { if (element == null) { return false; } return comparator.compare(element, minimum) > -1 && comparator.compare(element, maximum) < 1; } /** * <p>Checks whether this range is after the specified element.</p> * * @param element the element to check for, null returns false * @return true if this range is entirely after the specified element */ public boolean isAfter(final T element) { if (element == null) { return false; } return comparator.compare(element, minimum) < 0; } /** * <p>Checks whether this range starts with the specified element.</p> * * @param element the element to check for, null returns false * @return true if the specified element occurs within this range */ public boolean isStartedBy(final T element) { if (element == null) { return false; } return comparator.compare(element, minimum) == 0; } /** * <p>Checks whether this range starts with the specified element.</p> * * @param element the element to check for, null returns false * @return true if the specified element occurs within this range */ public boolean isEndedBy(final T element) { if (element == null) { return false; } return comparator.compare(element, maximum) == 0; } /** * <p>Checks whether this range is before the specified element.</p> * * @param element the element to check for, null returns false * @return true if this range is entirely before the specified element */ public boolean isBefore(final T element) { if (element == null) { return false; } return comparator.compare(element, maximum) > 0; } /** * <p>Checks where the specified element occurs relative to this range.</p> * * <p>The API is reminiscent of the Comparable interface returning {@code -1} if * the element is before the range, {@code 0} if contained within the range and * {@code 1} if the element is after the range. </p> * * @param element the element to check for, not null * @return -1, 0 or +1 depending on the element's location relative to the range */ public int elementCompareTo(final T element) { if (element == null) { // Comparable API says throw NPE on null throw new NullPointerException("Element is null"); } if (isAfter(element)) { return -1; } else if (isBefore(element)) { return 1; } else { return 0; } } // Range tests //-------------------------------------------------------------------- /** * <p>Checks whether this range contains all the elements of the specified range.</p> * * <p>This method may fail if the ranges have two different comparators or element types.</p> * * @param otherRange the range to check, null returns false * @return true if this range contains the specified range * @throws RuntimeException if ranges cannot be compared */ public boolean containsRange(final Range<T> otherRange) { if (otherRange == null) { return false; } return contains(otherRange.minimum) && contains(otherRange.maximum); } /** * <p>Checks whether this range is completely after the specified range.</p> * * <p>This method may fail if the ranges have two different comparators or element types.</p> * * @param otherRange the range to check, null returns false * @return true if this range is completely after the specified range * @throws RuntimeException if ranges cannot be compared */ public boolean isAfterRange(final Range<T> otherRange) { if (otherRange == null) { return false; } return isAfter(otherRange.maximum); } /** * <p>Checks whether this range is overlapped by the specified range.</p> * * <p>Two ranges overlap if there is at least one element in common.</p> * * <p>This method may fail if the ranges have two different comparators or element types.</p> * * @param otherRange the range to test, null returns false * @return true if the specified range overlaps with this * range; otherwise, {@code false} * @throws RuntimeException if ranges cannot be compared */ public boolean isOverlappedBy(final Range<T> otherRange) { if (otherRange == null) { return false; } return otherRange.contains(minimum) || otherRange.contains(maximum) || contains(otherRange.minimum); } /** * <p>Checks whether this range is completely before the specified range.</p> * * <p>This method may fail if the ranges have two different comparators or element types.</p> * * @param otherRange the range to check, null returns false * @return true if this range is completely before the specified range * @throws RuntimeException if ranges cannot be compared */ public boolean isBeforeRange(final Range<T> otherRange) { if (otherRange == null) { return false; } return isBefore(otherRange.minimum); } /** * Calculate the intersection of {@code this} and an overlapping Range. * @param other overlapping Range * @return range representing the intersection of {@code this} and {@code other} ({@code this} if equal) * @throws IllegalArgumentException if {@code other} does not overlap {@code this} * @since 3.0.1 */ public Range<T> intersectionWith(final Range<T> other) { if (!this.isOverlappedBy(other)) { throw new IllegalArgumentException(String.format( "Cannot calculate intersection with non-overlapping range %s", other)); } if (this.equals(other)) { return this; } final T min = getComparator().compare(minimum, other.minimum) < 0 ? other.minimum : minimum; final T max = getComparator().compare(maximum, other.maximum) < 0 ? maximum : other.maximum; return between(min, max, getComparator()); } // Basics //-------------------------------------------------------------------- /** * <p>Compares this range to another object to test if they are equal.</p>. * * <p>To be equal, the minimum and maximum values must be equal, which * ignores any differences in the comparator.</p> * * @param obj the reference object with which to compare * @return true if this object is equal */ @Override public boolean equals(final Object obj) { if (obj == this) { return true; } else if (obj == null || obj.getClass() != getClass()) { return false; } else { @SuppressWarnings("unchecked") // OK because we checked the class above final Range<T> range = (Range<T>) obj; return minimum.equals(range.minimum) && maximum.equals(range.maximum); } } /** * <p>Gets a suitable hash code for the range.</p> * * @return a hash code value for this object */ @Override public int hashCode() { int result = hashCode; if (hashCode == 0) { result = 17; result = 37 * result + getClass().hashCode(); result = 37 * result + minimum.hashCode(); result = 37 * result + maximum.hashCode(); hashCode = result; } return result; } /** * <p>Gets the range as a {@code String}.</p> * * <p>The format of the String is '[<i>min</i>..<i>max</i>]'.</p> * * @return the {@code String} representation of this range */ @Override public String toString() { String result = toString; if (result == null) { final StringBuilder buf = new StringBuilder(32); buf.append('['); buf.append(minimum); buf.append(".."); buf.append(maximum); buf.append(']'); result = buf.toString(); toString = result; } return result; } /** * <p>Formats the receiver using the given format.</p> * * <p>This uses {@link java.util.Formattable} to perform the formatting. Three variables may * be used to embed the minimum, maximum and comparator. * Use {@code %1$s} for the minimum element, {@code %2$s} for the maximum element * and {@code %3$s} for the comparator. * The default format used by {@code toString()} is {@code [%1$s..%2$s]}.</p> * * @param format the format string, optionally containing {@code %1$s}, {@code %2$s} and {@code %3$s}, not null * @return the formatted string, not null */ public String toString(final String format) { return String.format(format, minimum, maximum, comparator); } //----------------------------------------------------------------------- @SuppressWarnings({"rawtypes", "unchecked"}) private enum ComparableComparator implements Comparator { INSTANCE; /** * Comparable based compare implementation. * * @param obj1 left hand side of comparison * @param obj2 right hand side of comparison * @return negative, 0, positive comparison value */ @Override public int compare(final Object obj1, final Object obj2) { return ((Comparable) obj1).compareTo(obj2); } } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.storage.lowlevel; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Enumeration; import java.util.Map; import java.util.NoSuchElementException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.fcrepo.server.errors.LowlevelStorageException; import org.fcrepo.server.errors.LowlevelStorageInconsistencyException; import org.fcrepo.server.errors.ObjectNotInLowlevelStorageException; import org.fcrepo.server.storage.ConnectionPool; import org.fcrepo.server.utilities.SQLUtility; /** * @author Bill Niebel */ public class DBPathRegistry extends PathRegistry { private static final Logger logger = LoggerFactory.getLogger(DBPathRegistry.class); private ConnectionPool connectionPool = null; private final boolean backslashIsEscape; public DBPathRegistry(Map<String, ?> configuration) { super(configuration); connectionPool = (ConnectionPool) configuration.get("connectionPool"); backslashIsEscape = Boolean .valueOf((String) configuration .get("backslashIsEscape")).booleanValue(); } @Override public String get(String pid) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { String path = null; Connection connection = null; PreparedStatement statement = null; ResultSet rs = null; try { int paths = 0; connection = connectionPool.getReadOnlyConnection(); String query = "SELECT path FROM " + getRegistryName() + " WHERE token=?"; statement = connection.prepareStatement(query); statement.setString(1,pid); rs = statement.executeQuery(); for (; rs.next(); paths++) { path = rs.getString(1); } if (paths == 0) { throw new ObjectNotInLowlevelStorageException("no path in db registry for [" + pid + "]"); } if (paths > 1) { throw new LowlevelStorageInconsistencyException("[" + pid + "] in db registry -multiple- times"); } if (path == null || path.length() == 0) { throw new LowlevelStorageInconsistencyException("[" + pid + "] has -null- path in db registry"); } } catch (SQLException e1) { throw new LowlevelStorageException(true, "sql failure (get)", e1); } finally { try { if (rs != null) { rs.close(); } if (statement != null) { statement.close(); } if (connection != null) { connectionPool.free(connection); } } catch (Exception e2) { // purposely general to include uninstantiated statement, connection throw new LowlevelStorageException(true, "sql failure closing statement, connection, pool (get)", e2); } finally { rs = null; statement = null; } } return path; } private void ensureSingleUpdate(Statement statement) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { try{ int updateCount = statement.getUpdateCount(); if (updateCount == 0) { throw new ObjectNotInLowlevelStorageException("-no- rows updated in db registry"); } if (updateCount > 1) { throw new LowlevelStorageInconsistencyException("-multiple- rows updated in db registry"); } } catch (SQLException e1) { throw new LowlevelStorageException(true, "sql failurex (exec)", e1); } } @Deprecated public void executeSql(String sql) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { Connection connection = null; Statement statement = null; try { connection = connectionPool.getReadWriteConnection(); statement = connection.createStatement(); if (statement.execute(sql)) { throw new LowlevelStorageException(true, "sql returned query results for a nonquery"); } ensureSingleUpdate(statement); } catch (SQLException e1) { throw new LowlevelStorageException(true, "sql failurex (exec)", e1); } finally { try { if (statement != null) { statement.close(); } if (connection != null) { connectionPool.free(connection); } } catch (Exception e2) { // purposely general to include uninstantiated statement, connection throw new LowlevelStorageException(true, "sql failure closing statement, connection, pool (exec)", e2); } finally { statement = null; } } } private void executeUpdate(String sql, String pid) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { Connection connection = null; PreparedStatement statement = null; try { connection = connectionPool.getReadWriteConnection(); statement = connection.prepareStatement(sql); if (pid != null){ statement.setString(1,pid); } if (statement.execute()) { throw new LowlevelStorageException(true, "sql returned query results for a nonquery"); } ensureSingleUpdate(statement); } catch (SQLException e1) { throw new LowlevelStorageException(true, "sql failurex (exec)", e1); } finally { try { if (statement != null) { statement.close(); } if (connection != null) { connectionPool.free(connection); } } catch (Exception e2) { // purposely general to include uninstantiated statement, connection throw new LowlevelStorageException(true, "sql failure closing statement, connection, pool (exec)", e2); } finally { statement = null; } } } @Override public void put(String pid, String path) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { if (backslashIsEscape) { StringBuffer buffer = new StringBuffer(); String backslash = "\\"; //Java quotes will interpolate this as 1 backslash String escapedBackslash = "\\\\"; //Java quotes will interpolate these as 2 backslashes /* * Escape each backspace so that DB will correctly record a single * backspace, instead of incorrectly escaping the following * character. */ for (int i = 0; i < path.length(); i++) { String s = path.substring(i, i + 1); buffer.append(s.equals(backslash) ? escapedBackslash : s); } path = buffer.toString(); } Connection conn = null; try { conn = connectionPool.getReadWriteConnection(); SQLUtility.replaceInto(conn, getRegistryName(), new String[] { "token", "path"}, new String[] {pid, path}, "token"); } catch (SQLException e1) { throw new ObjectNotInLowlevelStorageException("put into db registry failed for [" + pid + "]", e1); } finally { if (conn != null) { connectionPool.free(conn); } } } @Override public void remove(String pid) throws ObjectNotInLowlevelStorageException, LowlevelStorageInconsistencyException, LowlevelStorageException { try { String query = "DELETE FROM " + getRegistryName() + " WHERE " + getRegistryName() + ".token=?"; executeUpdate(query, pid); } catch (ObjectNotInLowlevelStorageException e1) { throw new ObjectNotInLowlevelStorageException("[" + pid + "] not in db registry to delete", e1); } catch (LowlevelStorageInconsistencyException e2) { throw new LowlevelStorageInconsistencyException("[" + pid + "] deleted from db registry -multiple- times", e2); } } @Override public void rebuild() throws LowlevelStorageException { int report = FULL_REPORT; try { executeUpdate("DELETE FROM " + getRegistryName(), null); } catch (ObjectNotInLowlevelStorageException e1) { } catch (LowlevelStorageInconsistencyException e2) { } try { logger.info("begin rebuilding registry from files"); traverseFiles(storeBases, REBUILD, false, report); // continues, ignoring bad files logger.info("end rebuilding registry from files (ending normally)"); } catch (Exception e) { if (report != NO_REPORT) { logger.error("ending rebuild unsuccessfully", e); } throw new LowlevelStorageException(true, "ending rebuild unsuccessfully", e); //<<==== } } @Override public void auditFiles() throws LowlevelStorageException { logger.info("begin audit: files-against-registry"); traverseFiles(storeBases, AUDIT_FILES, false, FULL_REPORT); logger.info("end audit: files-against-registry (ending normally)"); } @Override protected Enumeration<String> keys() throws LowlevelStorageException, LowlevelStorageInconsistencyException { File tempFile = null; PrintWriter writer = null; ResultSet rs = null; Connection connection = null; Statement statement = null; try { tempFile = File.createTempFile("fedora-keys", ".tmp"); writer = new PrintWriter(new OutputStreamWriter( new FileOutputStream(tempFile))); connection = connectionPool.getReadOnlyConnection(); statement = connection.createStatement(); rs = statement.executeQuery("SELECT token FROM " + getRegistryName()); while (rs.next()) { String key = rs.getString(1); if (null == key || 0 == key.length()) { throw new LowlevelStorageInconsistencyException( "Null token found in " + getRegistryName()); } writer.println(key); } writer.close(); return new KeyEnumeration(tempFile); } catch (Exception e) { throw new LowlevelStorageException(true, "Unexpected error", e); } finally { try { if (rs != null) { rs.close(); } if (statement != null) { statement.close(); } if (connection != null) { connectionPool.free(connection); } } catch (Exception e) { throw new LowlevelStorageException(true, "Unexpected error", e); } finally { writer.close(); rs = null; statement = null; } } } /** * Iterates over each non-empty line in a temporary file. * When iteration is complete, or garbage collection occurs, the * file will be deleted. */ private class KeyEnumeration implements Enumeration<String> { private final File file; private final BufferedReader reader; private boolean closed; private String nextKey; public KeyEnumeration(File file) throws FileNotFoundException { this.file = file; this.reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))); setNextKey(); } private void setNextKey() { try { nextKey = reader.readLine(); if (nextKey == null) { close(); } else if (nextKey.length() == 0) { setNextKey(); } } catch (IOException e) { throw new Error(e); } } public boolean hasMoreElements() { return nextKey != null; } public String nextElement() { if (nextKey != null) { try { return nextKey; } finally { setNextKey(); } } else { throw new NoSuchElementException(); } } @Override protected void finalize() { if (!closed) { close(); } } private void close() { try { reader.close(); file.delete(); } catch (IOException e) { throw new Error(e); } finally { closed = true; } } } }
/* * Copyright (c) 2018. Open Text Corporation. All Rights Reserved. */ package com.emc.documentum.rest.client.sample.client.impl.jaxb; import java.io.OutputStream; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.converter.FormHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.xml.Jaxb2RootElementHttpMessageConverter; import org.springframework.web.client.RestTemplate; import com.emc.documentum.rest.client.sample.client.DCTMRestClient; import com.emc.documentum.rest.client.sample.client.impl.AbstractRestTemplateClient; import com.emc.documentum.rest.client.sample.client.util.Collections; import com.emc.documentum.rest.client.sample.client.util.Headers; import com.emc.documentum.rest.client.sample.client.util.SupportedMediaTypes; import com.emc.documentum.rest.client.sample.client.util.UriHelper; import com.emc.documentum.rest.client.sample.model.AuditPolicy; import com.emc.documentum.rest.client.sample.model.AuditTrail; import com.emc.documentum.rest.client.sample.model.AvailableAuditEvents; import com.emc.documentum.rest.client.sample.model.Comment; import com.emc.documentum.rest.client.sample.model.Feed; import com.emc.documentum.rest.client.sample.model.FolderLink; import com.emc.documentum.rest.client.sample.model.HomeDocument; import com.emc.documentum.rest.client.sample.model.Lifecycle; import com.emc.documentum.rest.client.sample.model.LinkRelation; import com.emc.documentum.rest.client.sample.model.Linkable; import com.emc.documentum.rest.client.sample.model.ObjectAspects; import com.emc.documentum.rest.client.sample.model.ObjectLifecycle; import com.emc.documentum.rest.client.sample.model.Permission; import com.emc.documentum.rest.client.sample.model.PermissionSet; import com.emc.documentum.rest.client.sample.model.Preference; import com.emc.documentum.rest.client.sample.model.Repository; import com.emc.documentum.rest.client.sample.model.RestObject; import com.emc.documentum.rest.client.sample.model.RestType; import com.emc.documentum.rest.client.sample.model.SavedSearch; import com.emc.documentum.rest.client.sample.model.Search; import com.emc.documentum.rest.client.sample.model.SearchFeed; import com.emc.documentum.rest.client.sample.model.SearchTemplate; import com.emc.documentum.rest.client.sample.model.ValueAssistant; import com.emc.documentum.rest.client.sample.model.ValueAssistantRequest; import com.emc.documentum.rest.client.sample.model.VirtualDocumentNode; import com.emc.documentum.rest.client.sample.model.batch.Batch; import com.emc.documentum.rest.client.sample.model.batch.Capabilities; import com.emc.documentum.rest.client.sample.model.plain.PlainRestObject; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAcl; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAspectType; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAuditEvent; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAuditPolicy; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAuditTrail; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbAvailableAuditEvents; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbBatch; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbBatchCapabilities; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbCabinet; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbComment; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbContent; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbDocument; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbFeed; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbFolder; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbFolderLink; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbFormat; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbGroup; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbHomeDocument; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbLifecycle; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbNetworkLocation; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbObjectAspects; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbObjectLifecycle; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbPermission; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbPermissionSet; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbPreference; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbProductInfo; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbRelation; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbRelationType; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbRepository; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbSavedSearch; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbSearchFeed; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbSearchTemplate; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbSubscribers; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbSysObject; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbType; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbUser; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbValueAssistance; import com.emc.documentum.rest.client.sample.model.xml.jaxb.JaxbValueAssistantRequest; import static com.emc.documentum.rest.client.sample.client.util.Headers.ACCEPT_ATOM_HEADERS; import static com.emc.documentum.rest.client.sample.client.util.SupportedMediaTypes.APPLICATION_VND_DCTM_XML_VALUE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.ABOUT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.ACLS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.ASPECT_TYPES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.ASSIS_VALUES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.ASSOCIATIONS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.AUDIT_POLICIES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.AVAILABLE_AUDIT_EVENTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.BATCH_CAPABILITIES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CABINETS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CANCEL_CHECKOUT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CANCEL_DEMOTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CANCEL_PROMOTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CANCEL_RESUMPTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CANCEL_SUSPENSION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CHECKIN_BRANCH_VERSION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CHECKIN_NEXT_MAJOR; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CHECKIN_NEXT_MINOR; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CHECKOUT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.COMMENTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CONTENTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.CURRENT_USER_PREFERENCES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.DELETE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.DEMATERIALIZE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.DEMOTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.DOCUMENTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.EDIT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.FOLDERS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.FORMATS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.GROUPS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.LIFECYCLES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.MATERIALIZE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.NETWORK_LOCATIONS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.OBJECTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.OBJECT_ASPECTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.OBJECT_LIFECYCLE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.PRIMARY_CONTENT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.PROMOTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.RECENT_TRAILS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.REGISTERED_AUDIT_EVENTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.RELATIONS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.RELATION_TYPES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.REPLIES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.REPOSITORIES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.RESUMPTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SAVED_SEARCHES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SAVED_SEARCH_SAVED_RESULTS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SEARCH; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SEARCH_EXECUTION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SEARCH_TEMPLATES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SELF; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SHARED_PARENT; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SUBSCRIBE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SUBSCRIPTIONS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.SUSPENSION; import static com.emc.documentum.rest.client.sample.model.LinkRelation.TYPES; import static com.emc.documentum.rest.client.sample.model.LinkRelation.UNSUBSCRIBE; import static com.emc.documentum.rest.client.sample.model.LinkRelation.USERS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.VERSIONS; import static com.emc.documentum.rest.client.sample.model.LinkRelation.VIRTUAL_DOCUMENT_NODES; import static org.springframework.http.HttpMethod.PUT; import static org.springframework.http.MediaType.APPLICATION_ATOM_XML_VALUE; /** * the DCTMRestClient implementation by JAXB xml support */ @NotThreadSafe public class DCTMJaxbClient extends AbstractRestTemplateClient implements DCTMRestClient, Cloneable { public DCTMJaxbClient(String contextRoot, String repositoryName, String username, String password, boolean useFormatExtension) { super(contextRoot, repositoryName, username, password, useFormatExtension); } public DCTMJaxbClient(String contextRoot, String repositoryName, String username, String password, boolean useFormatExtension, boolean ignoreSslWarning) { super(contextRoot, repositoryName, username, password, useFormatExtension, ignoreSslWarning); } @Override public DCTMJaxbClient clone() { return clone(new DCTMJaxbClient(contextRoot, repositoryName, username, password, useFormatExtension, ignoreSslWarning)); } @Override public HomeDocument getHomeDocument() { if(homeDocument == null) { homeDocument = get(getHomeDocumentUri(), Headers.ACCEPT_XML_HOME_DOCUMENT, JaxbHomeDocument.class); } return homeDocument; } @Override public RestObject getProductInfo() { if(productInfo == null) { productInfo = get(getHomeDocument().getHref(ABOUT), false, JaxbProductInfo.class); } return productInfo; } @Override public Feed<Repository> getRepositories() { if(repositories == null) { repositories = feed(getHomeDocument(), REPOSITORIES); } return repositories; } @Override public Repository getRepository() { return getRepository(JaxbRepository.class); } @Override public Feed<RestObject> dql(String dql, String... params) { return feed(SELF, UriHelper.append(params, "dql", dql)); } @Override public SearchFeed<RestObject> search(String search, String... params) { return get(getRepository().getHref(SEARCH), true, JaxbSearchFeed.class, UriHelper.append(params, "q", search)); } @Override public SearchFeed<RestObject> search(Search search, String... params) { return post(getRepository().getHref(SEARCH), search, new Headers().accept(MediaType.APPLICATION_ATOM_XML_VALUE).contentType(SupportedMediaTypes.APPLICATION_VND_DCTM_XML_VALUE).toHttpHeaders(), JaxbSearchFeed.class, params); } @Override public RestObject createCabinet(RestObject cabinetToCreate) { return post(getRepository().getHref(CABINETS), new JaxbCabinet(cabinetToCreate), JaxbCabinet.class); } @Override public Feed<RestObject> getCabinets(String... params) { return feed(CABINETS, params); } @Override public RestObject getCabinet(String cabinet, String... params) { return getCabinet(cabinet, JaxbCabinet.class, params); } @Override public Feed<RestObject> getFolders(Linkable parent, String... params) { return feed(parent, FOLDERS, params); } @Override public Feed<RestObject> getObjects(Linkable parent, String... params) { return feed(parent, OBJECTS, params); } @Override public Feed<RestObject> getDocuments(Linkable parent, String... params) { return feed(parent, DOCUMENTS, params); } @Override public RestObject createFolder(Linkable parent, RestObject newFolder, String... params) { return post(parent.getHref(FOLDERS), new JaxbFolder(newFolder), JaxbFolder.class, params); } @Override public RestObject getFolder(String folderUri, String... params) { return get(folderUri, false, JaxbFolder.class, params); } @Override public RestObject createObject(Linkable parent, LinkRelation rel, RestObject objectToCreate, Object content, String contentMediaType, String... params) { return post(parent.getHref(rel), new JaxbSysObject(objectToCreate), content, contentMediaType, JaxbSysObject.class, params); } @Override public RestObject createObject(Linkable parent, LinkRelation rel, RestObject objectToCreate, List<Object> contents, List<String> contentMediaTypes, String... params) { return post(parent.getHref(rel), new JaxbSysObject(objectToCreate), contents, contentMediaTypes, JaxbSysObject.class, params); } @Override public RestObject getObject(String objectUri, String... params) { return get(objectUri, false, JaxbSysObject.class, params); } @Override public RestObject createDocument(Linkable parent, RestObject objectToCreate, Object content, String contentMediaType, String... params) { return post(parent.getHref(DOCUMENTS), new JaxbDocument(objectToCreate), content, contentMediaType, JaxbDocument.class, params); } @Override public RestObject createDocument(Linkable parent, RestObject objectToCreate, List<Object> contents, List<String> contentMediaTypes, String... params) { return post(parent.getHref(DOCUMENTS), new JaxbDocument(objectToCreate), contents, contentMediaTypes, JaxbDocument.class, params); } @Override public RestObject getDocument(String documentUri, String... params) { return get(documentUri, false, JaxbDocument.class, params); } @Override public RestObject update(RestObject oldObject, RestObject newObject, String... params) { return update(oldObject, EDIT, newObject, HttpMethod.POST, params); } @Override public RestObject createContent(RestObject object, Object content, String mediaType, String... params) { return post(object.getHref(CONTENTS), content, mediaType, JaxbContent.class, params); } @Override public RestObject getPrimaryContent(RestObject object, String... params) { return getContent(object.getHref(PRIMARY_CONTENT), params); } @Override public RestObject getContent(String contentUri, String... params) { return get(contentUri, false, JaxbContent.class, params); } @Override public Feed<RestObject> getContents(RestObject object, String... params) { return feed(object, CONTENTS, params); } @Override public RestObject checkout(RestObject object, String... params) { return put(object.getHref(CHECKOUT), JaxbSysObject.class, params); } @Override public void cancelCheckout(RestObject object) { delete(object.getHref(CANCEL_CHECKOUT)); } @Override public RestObject checkinNextMajor(RestObject oldObject, RestObject newObject, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MAJOR), new JaxbSysObject(newObject), JaxbSysObject.class, params); } @Override public RestObject checkinNextMajor(RestObject oldObject, RestObject newObject, Object content, String contentMediaType, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MAJOR), newObject==null?null:new JaxbSysObject(newObject), content, contentMediaType, JaxbSysObject.class, params); } @Override public RestObject checkinNextMajor(RestObject oldObject, RestObject newObject, List<Object> contents, List<String> contentMediaTypes, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MAJOR), newObject==null?null:new JaxbSysObject(newObject), contents, contentMediaTypes, JaxbSysObject.class, params); } @Override public RestObject checkinNextMinor(RestObject oldObject, RestObject newObject, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MINOR), new JaxbSysObject(newObject), JaxbSysObject.class, params); } @Override public RestObject checkinNextMinor(RestObject oldObject, RestObject newObject, Object content, String contentMediaType, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MINOR), newObject==null?null:new JaxbSysObject(newObject), content, contentMediaType, JaxbSysObject.class, params); } @Override public RestObject checkinNextMinor(RestObject oldObject, RestObject newObject, List<Object> contents, List<String> contentMediaTypes, String... params) { return post(oldObject.getHref(CHECKIN_NEXT_MINOR), newObject==null?null:new JaxbSysObject(newObject), contents, contentMediaTypes, JaxbSysObject.class, params); } @Override public RestObject checkinBranch(RestObject oldObject, RestObject newObject, String... params) { return post(oldObject.getHref(CHECKIN_BRANCH_VERSION), new JaxbSysObject(newObject), JaxbSysObject.class, params); } @Override public RestObject checkinBranch(RestObject oldObject, RestObject newObject, Object content, String contentMediaType, String... params) { return post(oldObject.getHref(CHECKIN_BRANCH_VERSION), newObject==null?null:new JaxbSysObject(newObject), content, contentMediaType, JaxbSysObject.class, params); } @Override public RestObject checkinBranch(RestObject oldObject, RestObject newObject, List<Object> contents, List<String> contentMediaTypes, String... params) { return post(oldObject.getHref(CHECKIN_BRANCH_VERSION), newObject==null?null:new JaxbSysObject(newObject), contents, contentMediaTypes, JaxbSysObject.class, params); } @Override public Feed<RestObject> getVersions(RestObject object, String... params) { return feed(object, VERSIONS, params); } @Override public RestObject materialize(RestObject oldObject) { return put(oldObject.getHref(MATERIALIZE), JaxbSysObject.class); } @Override public void dematerialize(RestObject oldObject) { delete(oldObject.getHref(DEMATERIALIZE)); } @Override public RestObject reparent(RestObject oldObject, RestObject newParent) { try { RestObject newRestObject = newRestObject(oldObject, new PlainRestObject(newParent.getHref(SELF))); return post(oldObject.getHref(SHARED_PARENT), newRestObject, newRestObject.getClass()); } catch (Exception e) { throw new IllegalArgumentException(getModelClass(oldObject).getName()); } } @Override public RestType getType(String name, String... params) { return get(getRepository().getHref(TYPES)+"/"+name, false, JaxbType.class, params); } @Override public Feed<RestType> getTypes(String... params) { return feed(TYPES, params); } @Override public Feed<RestObject> getAspectTypes(String... params) { return feed(ASPECT_TYPES, params); } @Override public RestObject getAspectType(String aspectType, String... params) { return get(getRepository().getHref(ASPECT_TYPES)+"/"+aspectType, false, JaxbAspectType.class, params); } @Override public ValueAssistant getValueAssistant(RestType type, ValueAssistantRequest request, String... params) { return post(type.getHref(ASSIS_VALUES), new JaxbValueAssistantRequest(request), JaxbValueAssistance.class, params); } @Override public ObjectAspects attach(RestObject object, String... aspects) { return post(object.getHref(OBJECT_ASPECTS), new JaxbObjectAspects(aspects), JaxbObjectAspects.class); } @Override public void detach(ObjectAspects objectAspects, String aspect) { delete(objectAspects.getHref(DELETE, aspect)); } @Override public ObjectAspects getObjectAspects(RestObject object, String... params) { return get(object.getHref(OBJECT_ASPECTS), JaxbObjectAspects.class, params); } @Override public Feed<RestObject> getUsers(String... params) { return getUsers(getRepository(), params); } @Override public Feed<RestObject> getUsers(Linkable parent, String... params) { return feed(parent, USERS, params); } @Override public Feed<RestObject> getGroups(String... params) { return getGroups(getRepository(), params); } @Override public Feed<RestObject> getGroups(Linkable parent, String... params) { return feed(parent, GROUPS, params); } @Override public RestObject getCurrentUser(String... params) { return get(getRepository().getHref(LinkRelation.CURRENT_USER), false, JaxbUser.class, params); } @Override public RestObject getDefaultFolder(String... params) { return get(getCurrentUser().getHref(LinkRelation.DEFAULT_FOLDER), false, JaxbFolder.class, params); } @Override public RestObject getUser(String userUri, String... params) { return get(userUri, false, JaxbUser.class, params); } @Override public RestObject getGroup(String groupUri, String... params) { return get(groupUri, false, JaxbGroup.class, params); } @Override public RestObject createUser(RestObject userToCreate) { return post(getRepository().getHref(USERS), new JaxbUser(userToCreate), JaxbUser.class); } @Override public RestObject createGroup(RestObject groupToCreate) { return post(getRepository().getHref(GROUPS), new JaxbGroup(groupToCreate), JaxbGroup.class); } @Override public void addUserToGroup(RestObject group, RestObject user) { post(group.getHref(USERS), new JaxbUser(user.getHref(SELF)), null); } @Override public void addGroupToGroup(RestObject group, RestObject subGroup) { post(group.getHref(GROUPS), new JaxbGroup(subGroup.getHref(SELF)), null); } @Override public Feed<RestObject> getRelationTypes(String... params) { return feed(RELATION_TYPES, params); } @Override public RestObject getRelationType(String uri, String... params) { return get(uri, false, JaxbRelationType.class, params); } @Override public Feed<RestObject> getRelations(String... params) { return feed(RELATION_TYPES, params); } @Override public RestObject getRelation(String uri, String... params) { return get(uri, false, JaxbRelation.class, params); } @Override public RestObject createRelation(RestObject object) { return post(getRepository().getHref(RELATIONS), new JaxbRelation(object), JaxbRelation.class); } @Override public Feed<RestObject> getFormats(String... params) { return feed(FORMATS, params); } @Override public RestObject getFormat(String uri, String... params) { return get(uri, false, JaxbFormat.class, params); } @Override public Feed<RestObject> getNetworkLocations(String... params) { return feed(NETWORK_LOCATIONS, params); } @Override public RestObject getNetworkLocation(String uri, String... params) { return get(uri, false, JaxbNetworkLocation.class, params); } @Override public Feed<FolderLink> getFolderLinks(Linkable object, LinkRelation rel, String... params) { return feed(object, rel, params); } @Override public FolderLink getFolderLink(String uri, String... params) { return get(uri, false, JaxbFolderLink.class, params); } @Override public FolderLink move(FolderLink oldLink, FolderLink newLink, String... params) { return put(oldLink.getHref(SELF), new JaxbFolderLink(newLink), JaxbFolderLink.class, params); } @Override public FolderLink link(Linkable object, LinkRelation rel, FolderLink link) { return post(object.getHref(rel), new JaxbFolderLink(link), JaxbFolderLink.class); } @Override public Feed<RestObject> getAcls(String... params) { return feed(ACLS, params); } @Override public Feed<RestObject> getAclAssociations(Linkable acl, String... params) { return feed(acl, ASSOCIATIONS, params); } @Override public RestObject getAcl(String uri, String... params) { return get(uri, false, JaxbAcl.class, params); } @Override public RestObject createAcl(RestObject object) { return post(getRepository().getHref(ACLS), new JaxbAcl(object), JaxbAcl.class); } @Override public Capabilities getBatchCapabilities() { return get(getRepository().getHref(BATCH_CAPABILITIES), false, JaxbBatchCapabilities.class); } @Override public Batch createBatch(Batch batch) { return post(batch, JaxbBatch.class); } @Override public Feed<Preference> getPreferences(String... params) { return feed(CURRENT_USER_PREFERENCES, params); } @Override public Preference getPreference(String uri, String... params) { return get(uri, false, JaxbPreference.class, params); } @Override public Preference createPreference(Preference preference) { return post(getRepository().getHref(CURRENT_USER_PREFERENCES), new JaxbPreference(preference), JaxbPreference.class); } @Override public Preference updatePreference(Preference oldPreference, Preference newPreference) { return post(oldPreference.self(), new JaxbPreference(newPreference), JaxbPreference.class); } @Override public Permission getPermission(Linkable linkable, String... params) { return get(linkable.getHref(LinkRelation.PERMISSIONS), false, JaxbPermission.class, params); } @Override public PermissionSet getPermissionSet(Linkable linkable, String... params) { return get(linkable.getHref(LinkRelation.PERMISSION_SET), false, JaxbPermissionSet.class, params); } @Override public Feed<Comment> getComments(Linkable parent, String... params) { return feed(parent, COMMENTS, params); } @Override public Comment createComment(Linkable parent, Comment comment) { return post(parent.getHref(COMMENTS), new JaxbComment(comment), JaxbComment.class); } @Override public Comment getComment(String commentUri, String... params) { return get(commentUri, false, JaxbComment.class, params); } @Override public Feed<Comment> getReplies(Linkable parent, String... params) { return feed(parent, REPLIES, params); } @Override public Comment createReply(Linkable parent, Comment comment) { return post(parent.getHref(REPLIES), new JaxbComment(comment), JaxbComment.class); } @Override public Feed<VirtualDocumentNode> getVirtualDocumentNodes(Linkable linkable, String... params) { return feed(linkable, VIRTUAL_DOCUMENT_NODES, params); } @Override public Feed<SearchTemplate> getSearchTemplates(String... params) { return feed(SEARCH_TEMPLATES, params); } @Override public SearchTemplate getSearchTemplate(String uri, String... params) { return get(uri, false, JaxbSearchTemplate.class, params); } @Override public SearchTemplate createSearchTemplate(SearchTemplate template) { return post(getRepository().getHref(SEARCH_TEMPLATES), new JaxbSearchTemplate(template), JaxbSearchTemplate.class); } @Override public SearchFeed<RestObject> executeSearchTemplate(SearchTemplate toBeExecuted, String... params) { return post(toBeExecuted.getHref(SEARCH_EXECUTION), toBeExecuted, new Headers().accept(MediaType.APPLICATION_ATOM_XML_VALUE).contentType(SupportedMediaTypes.APPLICATION_VND_DCTM_XML_VALUE).toHttpHeaders(), JaxbSearchFeed.class, params); } @Override public SearchFeed<RestObject> executeSavedSearch(SavedSearch toBeExecuted, String... params) { return get(toBeExecuted.getHref(SEARCH_EXECUTION), true, JaxbSearchFeed.class, params); } @Override public Feed<SavedSearch> getSavedSearches(String... params) { return feed(SAVED_SEARCHES, params); } @Override public SavedSearch getSavedSearch(String uri, String... params) { return get(uri, false, JaxbSavedSearch.class, params); } @Override public SavedSearch createSavedSearch(SavedSearch savedSearch) { return post(getRepository().getHref(SAVED_SEARCHES), new JaxbSavedSearch(savedSearch), JaxbSavedSearch.class); } @Override public SavedSearch updateSavedSearch(SavedSearch oldSavedSearch, SavedSearch newSavedSearch) { return post(oldSavedSearch.self(), new JaxbSavedSearch(newSavedSearch), JaxbSavedSearch.class); } @Override public SearchFeed<RestObject> enableSavedSearchResult(SavedSearch toBeExecuted, String... params) { return sendRequest(toBeExecuted.getHref(SAVED_SEARCH_SAVED_RESULTS), PUT, ACCEPT_ATOM_HEADERS, null, JaxbSearchFeed.class, params); } @Override public void disableSavedSearchResult(SavedSearch toBeExecuted) { delete(toBeExecuted.getHref(SAVED_SEARCH_SAVED_RESULTS)); } @Override public SearchFeed<RestObject> getSavedSearchResult(SavedSearch toBeExecuted, String... params) { return get(toBeExecuted.getHref(SAVED_SEARCH_SAVED_RESULTS), true, JaxbSearchFeed.class, params); } @Override protected void initRestTemplate(RestTemplate restTemplate) { super.initRestTemplate(restTemplate); restTemplate.setErrorHandler(new DCTMJaxbErrorHandler(restTemplate.getMessageConverters())); for(HttpMessageConverter<?> c : restTemplate.getMessageConverters()) { if(c instanceof FormHttpMessageConverter) { ((FormHttpMessageConverter)c).addPartConverter(new Jaxb2RootElementHttpMessageConverter()); break; } } } @SuppressWarnings("rawtypes") private Feed feed(Linkable parent, LinkRelation rel, String... params) { return feed(parent, rel, JaxbFeed.class, params); } @SuppressWarnings("rawtypes") private Feed feed(LinkRelation rel, String... params) { return feed(rel, JaxbFeed.class, params); } @Override public void serialize(Object object, OutputStream os) { try { DCTMJaxbContext.marshal(os, object); } catch (RuntimeException re) { throw (RuntimeException)re; } catch (Exception e) { throw new IllegalArgumentException(e); } } @Override public ClientType getClientType() { return ClientType.XML; } @Override public Feed<Lifecycle> getLifecycles(String... params) { return feed(LIFECYCLES, params); } @Override public Lifecycle getLifecycle(String uri, String... params) { return get(uri, false, JaxbLifecycle.class, params); } @Override public ObjectLifecycle attach(RestObject object, ObjectLifecycle objectLifecycle) { return put(object.getHref(OBJECT_LIFECYCLE), objectLifecycle==null?null:new JaxbObjectLifecycle(objectLifecycle), JaxbObjectLifecycle.class); } @Override public void detach(ObjectLifecycle objectLifecycle) { delete(objectLifecycle.self()); } @Override public ObjectLifecycle getObjectLifecycle(RestObject object, String... params) { return get(object.getHref(OBJECT_LIFECYCLE), JaxbObjectLifecycle.class, params); } @Override public ObjectLifecycle promote(ObjectLifecycle objectLifecycle, String... params) { return put(objectLifecycle.getHref(PROMOTION), JaxbObjectLifecycle.class, params); } @Override public ObjectLifecycle demote(ObjectLifecycle objectLifecycle, String... params) { return put(objectLifecycle.getHref(DEMOTION), JaxbObjectLifecycle.class, params); } @Override public ObjectLifecycle suspend(ObjectLifecycle objectLifecycle, String... params) { return put(objectLifecycle.getHref(SUSPENSION), JaxbObjectLifecycle.class, params); } @Override public ObjectLifecycle resume(ObjectLifecycle objectLifecycle, String... params) { return put(objectLifecycle.getHref(RESUMPTION), JaxbObjectLifecycle.class, params); } @Override public void cancel(ObjectLifecycle objectLifecycle) { if(objectLifecycle.hasHref(CANCEL_PROMOTION)) { delete(objectLifecycle.getHref(CANCEL_PROMOTION)); } if(objectLifecycle.hasHref(CANCEL_DEMOTION)) { delete(objectLifecycle.getHref(CANCEL_DEMOTION)); } if(objectLifecycle.hasHref(CANCEL_SUSPENSION)) { delete(objectLifecycle.getHref(CANCEL_SUSPENSION)); } if(objectLifecycle.hasHref(CANCEL_RESUMPTION)) { delete(objectLifecycle.getHref(CANCEL_RESUMPTION)); } } @Override public RestObject subscribe(RestObject object, String... subscribers) { if(!object.hasHref(SUBSCRIBE)) { object = get(object, "check-subscription", "true"); } if(object.hasHref(SUBSCRIBE)) { return Collections.isEmpty(subscribers) ? put(object.getHref(SUBSCRIBE), object.getClass()): put(object.getHref(SUBSCRIBE), new JaxbSubscribers(subscribers), object.getClass()); } else if(object.hasHref(UNSUBSCRIBE)) { throw new IllegalArgumentException("the object is already subscribed"); } else { throw new IllegalArgumentException("the object is not subscribable"); } } @Override public void unsubscribe(RestObject object) { if(!object.hasHref(UNSUBSCRIBE)) { object = get(object, "check-subscription", "true"); } if(object.hasHref(UNSUBSCRIBE)) { delete(object.getHref(UNSUBSCRIBE)); } else if(object.hasHref(SUBSCRIBE)) { throw new IllegalArgumentException("the object is not subscribed"); } else { throw new IllegalArgumentException("the object is not unsubscribable"); } } @Override public Feed<RestObject> getSubscriptions(String... params) { return feed(SUBSCRIPTIONS, params); } @Override public Feed<AuditPolicy> getAuditPolicies(String... params) { return feed(AUDIT_POLICIES, params); } @Override public AuditPolicy createAuditPolicy(AuditPolicy auditPolicy) { return post(getRepository().getHref(AUDIT_POLICIES), new JaxbAuditPolicy(auditPolicy), JaxbAuditPolicy.class); } @Override public AuditPolicy getAuditPolicy(String uri, String... params) { return get(uri, JaxbAuditPolicy.class, params); } @Override public AuditPolicy updateAuditPolicy(AuditPolicy oldPolicy, AuditPolicy newPolicy) { return post(oldPolicy.getHref(EDIT), new JaxbAuditPolicy(newPolicy), JaxbAuditPolicy.class); } @Override public void deleteAuditPolicy(AuditPolicy auditPolicy) { delete(auditPolicy); } @Override public Feed<RestObject> getRecentAuditTrails(String... params) { return feed(getCurrentUser(), RECENT_TRAILS, params); } @Override public AuditTrail getAuditTrail(String auditTrailUri, String... params) { return get(auditTrailUri, JaxbAuditTrail.class, params); } @Override public AvailableAuditEvents getAvailableAuditEvents(String... params) { return getRepository().hasHref(AVAILABLE_AUDIT_EVENTS)? get(getRepository().getHref(AVAILABLE_AUDIT_EVENTS), new Headers().accept(APPLICATION_VND_DCTM_XML_VALUE+","+APPLICATION_ATOM_XML_VALUE).toHttpHeaders(), JaxbAvailableAuditEvents.class, params):null; } @Override public Feed<RestObject> getRegisteredAuditEvents(String... params) { return feed(getRepository(), REGISTERED_AUDIT_EVENTS, params); } @Override public RestObject registerAuditEvent(RestObject auditEvent, String... params) { return post(getRepository().getHref(REGISTERED_AUDIT_EVENTS), new JaxbAuditEvent(auditEvent), JaxbAuditEvent.class, params); } @Override public RestObject getRegisteredAuditEvent(String uri, String... params) { return get(uri, JaxbAuditEvent.class, params); } @Override public void unregisterAuditEvent(RestObject auditEvent) { delete(auditEvent); } }
package org.broadinstitute.hellbender.tools.walkers.vqsr; import htsjdk.variant.variantcontext.Allele; import htsjdk.variant.vcf.*; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.variantcontext.VariantContextBuilder; import htsjdk.variant.variantcontext.writer.VariantContextWriter; import org.broadinstitute.barclay.argparser.Argument; import org.broadinstitute.barclay.argparser.Advanced; import org.broadinstitute.barclay.argparser.CommandLineProgramProperties; import org.broadinstitute.barclay.help.DocumentedFeature; import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions; import picard.cmdline.programgroups.VariantFilteringProgramGroup; import org.broadinstitute.hellbender.engine.FeatureContext; import org.broadinstitute.hellbender.engine.FeatureInput; import org.broadinstitute.hellbender.engine.ReadsContext; import org.broadinstitute.hellbender.engine.ReferenceContext; import org.broadinstitute.hellbender.engine.MultiVariantWalker; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.tools.walkers.annotator.AnnotationUtils; import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants; import java.io.File; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Apply a score cutoff to filter variants based on a recalibration table * * <p>This tool performs the second pass in a two-stage process called Variant Quality Score Recalibration (VQSR). * Specifically, it applies filtering to the input variants based on the recalibration table produced in the first step * by VariantRecalibrator and a target sensitivity value, which the tool matches internally to a VQSLOD score cutoff * based on the model's estimated sensitivity to a set of true variants.</p> * * <p>The filter determination is not just a pass/fail process. The tool evaluates for each variant which "tranche", * or slice of the dataset, it falls into in terms of sensitivity to the truthset. Variants in tranches that fall below * the specified truth sensitivity filter level have their FILTER field annotated with the corresponding tranche level. * This results in a callset that is filtered to the desired level but retains the information necessary to increase * sensitivity if needed.</p> * * <p>To be clear, please note that by "filtered", we mean that variants failing the requested tranche cutoff are * <b>marked as filtered</b> in the output VCF; they are <b>not discarded</b> unless the option to do so is specified.</p> * * <h4>Summary of the VQSR procedure</h4> * <p>The purpose of variant recalibration is to assign a well-calibrated probability to each variant call in a call set. * These probabilities can then be used to filter the variants with a greater level of accuracy and flexibility than * can typically be achieved by traditional hard-filter (filtering on individual annotation value thresholds). The first * pass consists of building a model that describes how variant annotation values co-vary with the truthfulness of * variant calls in a training set, and then scoring all input variants according to the model. The second pass simply * consists of specifying a target sensitivity value (which corresponds to an empirical VQSLOD cutoff) and applying * filters to each variant call according to their ranking. The result is a VCF file in which variants have been * assigned a score and filter status.</p> * * <p>VQSR is probably the hardest part of the Best Practices to get right, so be sure to read the * <a href='https://software.broadinstitute.org/gatk/guide/article?id=39'>method documentation</a>, * <a href='https://software.broadinstitute.org/gatk/guide/article?id=1259'>parameter recommendations</a> and * <a href='https://software.broadinstitute.org/gatk/guide/article?id=2805'>tutorial</a> to really understand what these * tools do and how to use them for best results on your own data.</p> * * <h3>Inputs</h3> * <ul> * <li>The raw input variants to be filtered.</li> * <li>The recalibration table file that was generated by the VariantRecalibrator tool.</li> * <li>The tranches file that was generated by the VariantRecalibrator tool.</li> * </ul> * * <h3>Output</h3> * <ul> * <li>A recalibrated VCF file in which each variant of the requested type is annotated with its VQSLOD and marked as * filtered if the score is below the desired quality level.</li> * </ul> * * <h3>Usage examples</h3> * * <h4>Applying recalibration/filtering to SNPs</h4> * <pre> * gatk ApplyVQSR \ * -R Homo_sapiens_assembly38.fasta \ * -V input.vcf.gz \ * -O output.vcf.gz \ * --truth-sensitivity-filter-level 99.0 \ * --tranches-file output.tranches \ * --recal-file output.recal \ * -mode SNP * </pre> * * <h4>Allele-specific version of the SNP filtering (beta)</h4> * <pre> * gatk ApplyVQSR \ * -R Homo_sapiens_assembly38.fasta \ * -V input.vcf.gz \ * -O output.vcf.gz \ * -AS \ * --truth-sensitivity-filter-level 99.0 \ * --tranches-file output.AS.tranches \ * --recal-file output.AS.recal \ * -mode SNP * </pre> * <p>Note that the tranches and recalibration files must have been produced by an allele-specific run of * VariantRecalibrator. Also note that the AS_culprit, AS_FilterStatus, and AS_VQSLOD fields will have placeholder * values (NA or NaN) for alleles of a type that have not yet been processed by ApplyRecalibration. The spanning * deletion allele (*) will not be recalibrated because it represents missing data. Its VQSLOD will remain NaN, and its * culprit and FilterStatus will be NA.</p> * <p>Each allele will be annotated by its corresponding entry in the AS_FilterStatus INFO field annotation. * Allele-specific VQSLOD and culprit are also carried through from VariantRecalibrator, and stored in the AS_VQSLOD * and AS_culprit INFO fields, respectively. The site-level filter is set to the most lenient of any of * the allele filters. That is, if one allele passes, the whole site will be PASS. If no alleles pass, the site-level * filter will be set to the lowest sensitivity tranche among all the alleles.</p> * * <h3>Caveats</h3> * * <ul> * <li>The tranche values used in the example above are only meant to be a general example. You should determine the * level of sensitivity that is appropriate for your specific project. Remember that higher sensitivity (more power to * detect variants, yay!) comes at the cost of specificity (more false negatives, boo!). You have to choose at what * point you want to set the tradeoff.</li> * <li>In order to create the tranche reporting plots (which are only generated for SNPs, not indels!) the Rscript * executable needs to be in your environment PATH (this is the scripting version of R, not the interactive version).</li> * </ul> */ @CommandLineProgramProperties( summary = "Apply a score cutoff to filter variants based on a recalibration table", oneLineSummary = " Apply a score cutoff to filter variants based on a recalibration table", programGroup = VariantFilteringProgramGroup.class ) @DocumentedFeature public class ApplyVQSR extends MultiVariantWalker { protected static final String LOW_VQSLOD_FILTER_NAME = "LOW_VQSLOD"; private final double DEFAULT_VQSLOD_CUTOFF = 0.0; private boolean foundSNPTranches = false; private boolean foundINDELTranches = false; ///////////////////////////// // Inputs ///////////////////////////// @Argument(fullName="recal-file", doc="The input recal file used by ApplyRecalibration", optional=false) private FeatureInput<VariantContext> recal; @Argument(fullName="tranches-file", doc="The input tranches file describing where to cut the data", optional=true) private File TRANCHES_FILE; ///////////////////////////// // Outputs ///////////////////////////// @Argument(fullName= StandardArgumentDefinitions.OUTPUT_LONG_NAME, shortName=StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc="The output filtered and recalibrated VCF file in which each variant is annotated with its VQSLOD value", optional=false) private String output; ///////////////////////////// // Command Line Arguments ///////////////////////////// @Argument(fullName="truth-sensitivity-filter-level", shortName="ts-filter-level", doc="The truth sensitivity level at which to start filtering", optional=true) private Double TS_FILTER_LEVEL = null; /** * Filter the input file based on allele-specific recalibration data. See tool docs for site-level and allele-level filtering details. * Requires a .recal file produced using an allele-specific run of VariantRecalibrator. */ @Argument(fullName="use-allele-specific-annotations", shortName="AS", doc="If specified, the tool will attempt to apply a filter to each allele based on the input tranches and allele-specific .recal file.", optional=true) private boolean useASannotations = false; @Advanced @Argument(fullName="lod-score-cutoff", doc="The VQSLOD score below which to start filtering", optional=true) protected Double VQSLOD_CUTOFF = null; /** * For this to work properly, the --ignore-filter argument should also be applied to the VariantRecalibration command. */ @Argument(fullName="ignore-filter", doc="If specified, the recalibration will be applied to variants marked as filtered by the specified filter name in the input VCF file", optional=true) private List<String> IGNORE_INPUT_FILTERS = new ArrayList<>(); @Argument(fullName="ignore-all-filters", doc="If specified, the variant recalibrator will ignore all input filters. Useful to rerun the VQSR from a filtered output file.", optional=true) private boolean IGNORE_ALL_FILTERS = false; @Argument(fullName="exclude-filtered", doc="Don't output filtered loci after applying the recalibration", optional=true) private boolean EXCLUDE_FILTERED = false; @Argument(fullName = "mode", shortName = "mode", doc = "Recalibration mode to employ: 1.) SNP for recalibrating only SNPs (emitting indels untouched in the output VCF); 2.) INDEL for indels; and 3.) BOTH for recalibrating both SNPs and indels simultaneously.", optional=true) private VariantRecalibratorArgumentCollection.Mode MODE = VariantRecalibratorArgumentCollection.Mode.SNP; ///////////////////////////// // Private Member Variables ///////////////////////////// private VariantContextWriter vcfWriter; final private List<TruthSensitivityTranche> tranches = new ArrayList<>(); final private Set<String> ignoreInputFilterSet = new TreeSet<>(); final static private String listPrintSeparator = ","; final static private String trancheFilterString = "VQSRTranche"; final static private String arrayParseRegex = "[\\[\\]\\s]"; final static private String emptyStringValue = "NA"; final static private String emptyFloatValue = "NaN"; //--------------------------------------------------------------------------------------------------------------- // // onTraversalStart // //--------------------------------------------------------------------------------------------------------------- @Override public void onTraversalStart() { if( TS_FILTER_LEVEL != null ) { try { for (final TruthSensitivityTranche t : TruthSensitivityTranche.readTranches(TRANCHES_FILE)) { if (t.targetTruthSensitivity >= TS_FILTER_LEVEL) { tranches.add(t); } logger.info(String.format("Read tranche " + t)); } } catch(IOException e ) { throw new UserException.CouldNotReadInputFile(TRANCHES_FILE, e); } Collections.reverse(tranches); // this algorithm wants the tranches ordered from best (lowest truth sensitivity) to worst (highest truth sensitivity) } if( IGNORE_INPUT_FILTERS != null ) { ignoreInputFilterSet.addAll( IGNORE_INPUT_FILTERS ); } // setup the header fields VCFHeader inputHeader = getHeaderForVariants(); final Set<VCFHeaderLine> inputHeaders = inputHeader.getMetaDataInSortedOrder(); final Set<VCFHeaderLine> hInfo = new HashSet<>(inputHeaders); VariantRecalibrationUtils.addVQSRStandardHeaderLines(hInfo); if (useASannotations) { VariantRecalibrationUtils.addAlleleSpecificVQSRHeaderLines(hInfo); } checkForPreviousApplyRecalRun(Collections.unmodifiableSet(inputHeaders)); final TreeSet<String> samples = new TreeSet<>(); samples.addAll(inputHeader.getGenotypeSamples()); //generate headers from tranches file //TODO: throw away old tranche headers if we're ignoring filters //TODO: TS_FILTER_LEVEL and VQSLOD_CUTOFF should use mutex argument declaration if( TS_FILTER_LEVEL != null ) { // if the user specifies both ts_filter_level and lodCutoff then throw a user error if( VQSLOD_CUTOFF != null ) { throw new UserException("Arguments --truth-sensitivity-filter-level and --lod-score-cutoff are mutually exclusive. Please only specify one option."); } if( tranches.size() >= 2 ) { for( int iii = 0; iii < tranches.size() - 1; iii++ ) { final TruthSensitivityTranche t = tranches.get(iii); hInfo.add(new VCFFilterHeaderLine(t.name, String.format("Truth sensitivity tranche level for " + t.model.toString() + " model at VQS Lod: " + t.minVQSLod + " <= x < " + tranches.get(iii+1).minVQSLod))); } } if( tranches.size() >= 1 ) { hInfo.add(new VCFFilterHeaderLine(tranches.get(0).name + "+", String.format("Truth sensitivity tranche level for " + tranches.get(0).model.toString() + " model at VQS Lod < " + tranches.get(0).minVQSLod))); } else { throw new UserException("No tranches were found in the file or were above the truth sensitivity filter level " + TS_FILTER_LEVEL); } logger.info("Keeping all variants in tranche " + tranches.get(tranches.size()-1)); } else { if( VQSLOD_CUTOFF == null ) { VQSLOD_CUTOFF = DEFAULT_VQSLOD_CUTOFF; } hInfo.add(new VCFFilterHeaderLine(LOW_VQSLOD_FILTER_NAME, "VQSLOD < " + VQSLOD_CUTOFF)); logger.info("Keeping all variants with VQSLOD >= " + VQSLOD_CUTOFF); } hInfo.addAll(getDefaultToolVCFHeaderLines()); final VCFHeader vcfHeader = new VCFHeader(hInfo, samples); vcfWriter = createVCFWriter(new File(output)); vcfWriter.writeHeader(vcfHeader); } private boolean trancheIntervalIsValid(final String sensitivityLimits) { final String[] vals = sensitivityLimits.split("to"); if(vals.length != 2) return false; try { double lowerLimit = Double.parseDouble(vals[0]); double upperLimit = Double.parseDouble(vals[1].replace("+","")); //why does our last tranche end with 100+? Is there anything greater than 100 percent? Really??? } catch(NumberFormatException e) { throw new UserException("Poorly formatted tranche filter name does not contain two sensitivity interval end points."); } return true; } /** * Check the filter declarations in the input VCF header to see if any ApplyRecalibration mode has been run * Here we assume that the tranches are named with a specific format: VQSRTranche[SNP|INDEL][lowerLimit]to[upperLimit] * @param inputHeaders */ private void checkForPreviousApplyRecalRun(final Set<VCFHeaderLine> inputHeaders) { for(final VCFHeaderLine header : inputHeaders) { if(header instanceof VCFFilterHeaderLine) { final String filterName = ((VCFFilterHeaderLine)header).getID(); //TODO: clean up these magic numbers if(filterName.length() < 12 || !filterName.substring(0, 11).equalsIgnoreCase(trancheFilterString)) { continue; } if(filterName.charAt(11) == 'S') { //for SNP tranches, get sensitivity limit final String sensitivityLimits = filterName.substring(14); if(trancheIntervalIsValid(sensitivityLimits)) foundSNPTranches = true; } else if(filterName.charAt(11) == 'I') { //for INDEL tranches, get sensitivity limit final String sensitivityLimits = filterName.substring(16); if(trancheIntervalIsValid(sensitivityLimits)) foundINDELTranches = true; } } } } //--------------------------------------------------------------------------------------------------------------- // // apply // //--------------------------------------------------------------------------------------------------------------- @Override public void apply(final VariantContext vc, final ReadsContext readsContext, final ReferenceContext ref, final FeatureContext featureContext) { final List<VariantContext> recals = featureContext.getValues(recal, vc.getStart()); final boolean evaluateThisVariant = useASannotations || VariantDataManager.checkVariationClass( vc, MODE ); //vc.isNotFiltered is true for PASS; vc.filtersHaveBeenApplied covers PASS and filters final boolean variantIsNotFiltered = IGNORE_ALL_FILTERS || vc.isNotFiltered() || (!ignoreInputFilterSet.isEmpty() && ignoreInputFilterSet.containsAll(vc.getFilters())); if( evaluateThisVariant && variantIsNotFiltered) { String filterString; final VariantContextBuilder builder = new VariantContextBuilder(vc); if (!useASannotations) { filterString = doSiteSpecificFiltering(vc, recals, builder); } else { //allele-specific mode filterString = doAlleleSpecificFiltering(vc, recals, builder); } //for both non-AS and AS modes: if( filterString.equals(VCFConstants.PASSES_FILTERS_v4) ) { builder.passFilters(); } else if(filterString.equals(VCFConstants.UNFILTERED)) { builder.unfiltered(); } else { builder.filters(filterString); } final VariantContext outputVC = builder.make(); if( !EXCLUDE_FILTERED || outputVC.isNotFiltered() ) { vcfWriter.add( outputVC ); } } else { // valid VC but not compatible with this mode, so just emit the variant untouched vcfWriter.add( vc ); } } public double parseFilterLowerLimit(final String trancheFilter) { final Pattern pattern = Pattern.compile("VQSRTranche\\S+(\\d+\\.\\d+)to(\\d+\\.\\d+)"); final Matcher m = pattern.matcher(trancheFilter); return m.find() ? Double.parseDouble(m.group(1)) : -1; } /** * Generate the VCF filter string for this record based on the ApplyRecalibration modes run so far * @param vc the input VariantContext (with at least one ApplyRecalibration mode already run) * @param bestLod best LOD from the alleles we've seen in this recalibration mode * @return the String to use as the VCF filter field */ protected String generateFilterStringFromAlleles(final VariantContext vc, final double bestLod) { String filterString = "."; final boolean bothModesWereRun = (MODE == VariantRecalibratorArgumentCollection.Mode.SNP && foundINDELTranches) || (MODE == VariantRecalibratorArgumentCollection.Mode.INDEL && foundSNPTranches); final boolean onlyOneModeNeeded = !vc.isMixed() && VariantDataManager.checkVariationClass( vc, MODE ); //if both SNP and INDEL modes have not yet been run (and need to be), leave this variant as unfiltered and add the filters for the alleles in this mode to the INFO field if (!bothModesWereRun && !onlyOneModeNeeded) { return VCFConstants.UNFILTERED; } //if both SNP and INDEL modes have been run or the site is not mixed, generate a filter string for this site based on both models //pull out the allele filter status from the info field (there may be more than one entry in the list if there were multiple snp/indel alleles assessed in the other mode) final String prevFilterStatus = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY, null); //if this site hasn't had a filter applied yet if (prevFilterStatus != null && !prevFilterStatus.equals(VCFConstants.UNFILTERED)) { final String prevAllelesFilterStatusString = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY, null); final String[] prevAllelesFilterStatusList = prevAllelesFilterStatusString.split(listPrintSeparator); //start with the current best allele filter as the most lenient filter across all modes and all alleles String mostLenientFilterName = generateFilterString(bestLod); //if the current mode's best allele passes the tranche filter, then let the whole site pass if (mostLenientFilterName.equals(VCFConstants.PASSES_FILTERS_v4)) { filterString = mostLenientFilterName; } //if the current mode's best allele does not pass the tranche filter, compare the most lenient filter of this mode with those from the previous mode else { double mostLenientSensitivityLowerLimit = parseFilterLowerLimit(mostLenientFilterName); for (int i = 0; i < prevAllelesFilterStatusList.length; i++) { final String alleleFilterString = prevAllelesFilterStatusList[i].replaceAll(arrayParseRegex, "").trim(); //if any allele from the previous mode passed the tranche filter, then let the whole site pass if (alleleFilterString.equals(VCFConstants.PASSES_FILTERS_v4)) { //this allele is PASS mostLenientFilterName = alleleFilterString; break; } //if there's no PASS, then we need to parse the filters to find out how lenient they are else { final double alleleLowerLimit = parseFilterLowerLimit(alleleFilterString); if (alleleLowerLimit == -1) continue; if (alleleLowerLimit < mostLenientSensitivityLowerLimit) { mostLenientSensitivityLowerLimit = alleleLowerLimit; mostLenientFilterName = alleleFilterString; } } } filterString = mostLenientFilterName; } } //if both modes have been run, but the previous mode didn't apply a filter, use the current mode's best allele VQSLOD filter (shouldn't get run, but just in case) else { filterString = generateFilterString(bestLod); } return filterString; } /** * Generate the VCF filter string for this record based on the provided lod score * @param lod non-null double * @return the String to use as the VCF filter field */ protected String generateFilterString( final double lod ) { String filterString = null; if( TS_FILTER_LEVEL != null ) { for( int i = tranches.size() - 1; i >= 0; i-- ) { final TruthSensitivityTranche tranche = tranches.get(i); if( lod >= tranche.minVQSLod ) { if( i == tranches.size() - 1 ) { filterString = VCFConstants.PASSES_FILTERS_v4; } else { filterString = tranche.name; } break; } } if( filterString == null ) { filterString = tranches.get(0).name+"+"; } } else { filterString = ( lod < VQSLOD_CUTOFF ? LOW_VQSLOD_FILTER_NAME : VCFConstants.PASSES_FILTERS_v4 ); } return filterString; } private VariantContext getMatchingRecalVC(final VariantContext target, final List<VariantContext> recalVCs, final Allele allele) { for( final VariantContext recalVC : recalVCs ) { if ( target.getEnd() == recalVC.getEnd() ) { if (!useASannotations) return recalVC; else if (allele.equals(recalVC.getAlternateAllele(0))) return recalVC; } } return null; } /** * * @param altIndex current alt allele * @param prevCulpritList culprits from previous ApplyRecalibration run * @param prevLodList lods from previous ApplyRecalibration run * @param prevASfiltersList AS_filters from previous ApplyRecalibration run * @param culpritString * @param lodString * @param AS_filterString */ private void updateAnnotationsWithoutRecalibrating(final int altIndex, final String[] prevCulpritList, final String[] prevLodList, final String[] prevASfiltersList, final List<String> culpritString, final List<String> lodString, final List<String> AS_filterString) { if (foundINDELTranches || foundSNPTranches) { if (altIndex < prevCulpritList.length) { culpritString.add(prevCulpritList[altIndex].replaceAll(arrayParseRegex, "").trim()); lodString.add(prevLodList[altIndex].replaceAll(arrayParseRegex, "").trim()); AS_filterString.add(prevASfiltersList[altIndex].replaceAll(arrayParseRegex, "").trim()); } } else { //if the other allele type hasn't been processed yet, make sure there are enough entries culpritString.add(emptyStringValue); lodString.add(emptyFloatValue); AS_filterString.add(emptyStringValue); } } /** * Calculate the allele-specific filter status of vc * @param vc * @param recals * @param builder is modified by adding attributes * @return a String with the filter status for this site */ private String doAlleleSpecificFiltering(final VariantContext vc, final List<VariantContext> recals, final VariantContextBuilder builder) { double bestLod = VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE; final List<String> culpritStrings = new ArrayList<>(); final List<String> lodStrings = new ArrayList<>(); final List<String> AS_filterStrings = new ArrayList<>(); String[] prevCulpritList = null; String[] prevLodList = null; String[] prevASfiltersList = null; //get VQSR annotations from previous run of ApplyRecalibration, if applicable if(foundINDELTranches || foundSNPTranches) { final String prevCulprits = vc.getAttributeAsString(GATKVCFConstants.AS_CULPRIT_KEY,""); prevCulpritList = prevCulprits.isEmpty()? new String[0] : prevCulprits.split(listPrintSeparator); final String prevLodString = vc.getAttributeAsString(GATKVCFConstants.AS_VQS_LOD_KEY,""); prevLodList = prevLodString.isEmpty()? new String[0] : prevLodString.split(listPrintSeparator); final String prevASfilters = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY,""); prevASfiltersList = prevASfilters.isEmpty()? new String[0] : prevASfilters.split(listPrintSeparator); } //for each allele in the current VariantContext for (int altIndex = 0; altIndex < vc.getNAlleles()-1; altIndex++) { final Allele allele = vc.getAlternateAllele(altIndex); //if the current allele is not part of this recalibration mode, add its annotations to the list and go to the next allele if (!VariantDataManager.checkVariationClass(vc, allele, MODE)) { updateAnnotationsWithoutRecalibrating(altIndex, prevCulpritList, prevLodList, prevASfiltersList, culpritStrings, lodStrings, AS_filterStrings); continue; } //if the current allele does need to have recalibration applied... //initialize allele-specific VQSR annotation data with values for spanning deletion String alleleLodString = emptyFloatValue; String alleleFilterString = emptyStringValue; String alleleCulpritString = emptyStringValue; //if it's not a spanning deletion, replace those allele strings with the real values if (!GATKVCFConstants.isSpanningDeletion(allele)) { VariantContext recalDatum = getMatchingRecalVC(vc, recals, allele); if (recalDatum == null) { throw new UserException("Encountered input allele which isn't found in the input recal file. Please make sure VariantRecalibrator and ApplyRecalibration were run on the same set of input variants with flag -AS. First seen at: " + vc); } //compare VQSLODs for all alleles in the current mode for filtering later final double lod = recalDatum.getAttributeAsDouble(GATKVCFConstants.VQS_LOD_KEY, VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE); if (lod > bestLod) bestLod = lod; alleleLodString = String.format("%.4f", lod); alleleFilterString = generateFilterString(lod); alleleCulpritString = recalDatum.getAttributeAsString(GATKVCFConstants.CULPRIT_KEY, "."); if(recalDatum != null) { if (recalDatum.hasAttribute(GATKVCFConstants.POSITIVE_LABEL_KEY)) builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true); if (recalDatum.hasAttribute(GATKVCFConstants.NEGATIVE_LABEL_KEY)) builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true); } } //append per-allele VQSR annotations lodStrings.add(alleleLodString); AS_filterStrings.add(alleleFilterString); culpritStrings.add(alleleCulpritString); } // Annotate the new record with its VQSLOD, AS_FilterStatus, and the worst performing annotation if(!AS_filterStrings.isEmpty() ) builder.attribute(GATKVCFConstants.AS_FILTER_STATUS_KEY, AnnotationUtils.encodeStringList(AS_filterStrings)); if(!lodStrings.isEmpty()) builder.attribute(GATKVCFConstants.AS_VQS_LOD_KEY, AnnotationUtils.encodeStringList(lodStrings)); if(!culpritStrings.isEmpty()) builder.attribute(GATKVCFConstants.AS_CULPRIT_KEY, AnnotationUtils.encodeStringList(culpritStrings)); return generateFilterStringFromAlleles(vc, bestLod); } /** * Calculate the filter status for a given VariantContext using the combined data from all alleles at a site * @param vc * @param recals * @param builder is modified by adding attributes * @return a String with the filter status for this site */ private String doSiteSpecificFiltering(final VariantContext vc, final List<VariantContext> recals, final VariantContextBuilder builder) { VariantContext recalDatum = getMatchingRecalVC(vc, recals, null); if( recalDatum == null ) { throw new UserException("Encountered input variant which isn't found in the input recal file. Please make sure VariantRecalibrator and ApplyRecalibration were run on the same set of input variants. First seen at: " + vc ); } final String lodString = recalDatum.getAttributeAsString(GATKVCFConstants.VQS_LOD_KEY, null); if( lodString == null ) { throw new UserException("Encountered a malformed record in the input recal file. There is no lod for the record at: " + vc ); } final double lod; try { lod = Double.valueOf(lodString); } catch (NumberFormatException e) { throw new UserException("Encountered a malformed record in the input recal file. The lod is unreadable for the record at: " + vc ); } builder.attribute(GATKVCFConstants.VQS_LOD_KEY, lod); builder.attribute(GATKVCFConstants.CULPRIT_KEY, recalDatum.getAttribute(GATKVCFConstants.CULPRIT_KEY)); if(recalDatum != null) { if (recalDatum.hasAttribute(GATKVCFConstants.POSITIVE_LABEL_KEY)) builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true); if (recalDatum.hasAttribute(GATKVCFConstants.NEGATIVE_LABEL_KEY)) builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true); } return generateFilterString(lod); } @Override public void closeTool() { if (vcfWriter != null) { vcfWriter.close(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.services.s3.model.SSECustomerKey; import com.google.common.base.Preconditions; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CanSetReadahead; import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSInputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.EOFException; import java.io.IOException; import java.net.SocketTimeoutException; import static org.apache.commons.lang3.StringUtils.isNotEmpty; /** * The input stream for an S3A object. * * As this stream seeks withing an object, it may close then re-open the stream. * When this happens, any updated stream data may be retrieved, and, given * the consistency model of Amazon S3, outdated data may in fact be picked up. * * As a result, the outcome of reading from a stream of an object which is * actively manipulated during the read process is "undefined". * * The class is marked as private as code should not be creating instances * themselves. Any extra feature (e.g instrumentation) should be considered * unstable. * * Because it prints some of the state of the instrumentation, * the output of {@link #toString()} must also be considered unstable. */ @InterfaceAudience.Private @InterfaceStability.Evolving public class S3AInputStream extends FSInputStream implements CanSetReadahead { /** * This is the public position; the one set in {@link #seek(long)} * and returned in {@link #getPos()}. */ private long pos; /** * Closed bit. Volatile so reads are non-blocking. * Updates must be in a synchronized block to guarantee an atomic check and * set */ private volatile boolean closed; private S3ObjectInputStream wrappedStream; private final S3AReadOpContext context; private final AmazonS3 client; private final String bucket; private final String key; private final String pathStr; private final long contentLength; private final String uri; private static final Logger LOG = LoggerFactory.getLogger(S3AInputStream.class); private final S3AInstrumentation.InputStreamStatistics streamStatistics; private S3AEncryptionMethods serverSideEncryptionAlgorithm; private String serverSideEncryptionKey; private S3AInputPolicy inputPolicy; private long readahead = Constants.DEFAULT_READAHEAD_RANGE; /** * This is the actual position within the object, used by * lazy seek to decide whether to seek on the next read or not. */ private long nextReadPos; /** * The end of the content range of the last request. * This is an absolute value of the range, not a length field. */ private long contentRangeFinish; /** * The start of the content range of the last request. */ private long contentRangeStart; /** * Create the stream. * This does not attempt to open it; that is only done on the first * actual read() operation. * @param ctx operation context * @param s3Attributes object attributes from a HEAD request * @param contentLength length of content * @param client S3 client to use * @param readahead readahead bytes * @param inputPolicy IO policy */ public S3AInputStream(S3AReadOpContext ctx, S3ObjectAttributes s3Attributes, long contentLength, AmazonS3 client, long readahead, S3AInputPolicy inputPolicy) { Preconditions.checkArgument(isNotEmpty(s3Attributes.getBucket()), "No Bucket"); Preconditions.checkArgument(isNotEmpty(s3Attributes.getKey()), "No Key"); Preconditions.checkArgument(contentLength >= 0, "Negative content length"); this.context = ctx; this.bucket = s3Attributes.getBucket(); this.key = s3Attributes.getKey(); this.pathStr = ctx.dstFileStatus.getPath().toString(); this.contentLength = contentLength; this.client = client; this.uri = "s3a://" + this.bucket + "/" + this.key; this.streamStatistics = ctx.instrumentation.newInputStreamStatistics(); this.serverSideEncryptionAlgorithm = s3Attributes.getServerSideEncryptionAlgorithm(); this.serverSideEncryptionKey = s3Attributes.getServerSideEncryptionKey(); setInputPolicy(inputPolicy); setReadahead(readahead); } /** * Set/update the input policy of the stream. * This updates the stream statistics. * @param inputPolicy new input policy. */ private void setInputPolicy(S3AInputPolicy inputPolicy) { this.inputPolicy = inputPolicy; streamStatistics.inputPolicySet(inputPolicy.ordinal()); } /** * Opens up the stream at specified target position and for given length. * * @param reason reason for reopen * @param targetPos target position * @param length length requested * @throws IOException on any failure to open the object */ @Retries.OnceTranslated private synchronized void reopen(String reason, long targetPos, long length, boolean forceAbort) throws IOException { if (wrappedStream != null) { closeStream("reopen(" + reason + ")", contentRangeFinish, forceAbort); } contentRangeFinish = calculateRequestLimit(inputPolicy, targetPos, length, contentLength, readahead); LOG.debug("reopen({}) for {} range[{}-{}], length={}," + " streamPosition={}, nextReadPosition={}, policy={}", uri, reason, targetPos, contentRangeFinish, length, pos, nextReadPos, inputPolicy); long opencount = streamStatistics.streamOpened(); GetObjectRequest request = new GetObjectRequest(bucket, key) .withRange(targetPos, contentRangeFinish - 1); if (S3AEncryptionMethods.SSE_C.equals(serverSideEncryptionAlgorithm) && StringUtils.isNotBlank(serverSideEncryptionKey)){ request.setSSECustomerKey(new SSECustomerKey(serverSideEncryptionKey)); } String text = String.format("Failed to %s %s at %d", (opencount == 0 ? "open" : "re-open"), uri, targetPos); S3Object object = context.getReadInvoker().once(text, uri, () -> client.getObject(request)); wrappedStream = object.getObjectContent(); contentRangeStart = targetPos; if (wrappedStream == null) { throw new IOException("Null IO stream from reopen of (" + reason + ") " + uri); } this.pos = targetPos; } @Override public synchronized long getPos() throws IOException { return (nextReadPos < 0) ? 0 : nextReadPos; } @Override public synchronized void seek(long targetPos) throws IOException { checkNotClosed(); // Do not allow negative seek if (targetPos < 0) { throw new EOFException(FSExceptionMessages.NEGATIVE_SEEK + " " + targetPos); } if (this.contentLength <= 0) { return; } // Lazy seek nextReadPos = targetPos; } /** * Seek without raising any exception. This is for use in * {@code finally} clauses * @param positiveTargetPos a target position which must be positive. */ private void seekQuietly(long positiveTargetPos) { try { seek(positiveTargetPos); } catch (IOException ioe) { LOG.debug("Ignoring IOE on seek of {} to {}", uri, positiveTargetPos, ioe); } } /** * Adjust the stream to a specific position. * * @param targetPos target seek position * @param length length of content that needs to be read from targetPos * @throws IOException */ @Retries.OnceTranslated private void seekInStream(long targetPos, long length) throws IOException { checkNotClosed(); if (wrappedStream == null) { return; } // compute how much more to skip long diff = targetPos - pos; if (diff > 0) { // forward seek -this is where data can be skipped int available = wrappedStream.available(); // always seek at least as far as what is available long forwardSeekRange = Math.max(readahead, available); // work out how much is actually left in the stream // then choose whichever comes first: the range or the EOF long remainingInCurrentRequest = remainingInCurrentRequest(); long forwardSeekLimit = Math.min(remainingInCurrentRequest, forwardSeekRange); boolean skipForward = remainingInCurrentRequest > 0 && diff <= forwardSeekLimit; if (skipForward) { // the forward seek range is within the limits LOG.debug("Forward seek on {}, of {} bytes", uri, diff); streamStatistics.seekForwards(diff); long skipped = wrappedStream.skip(diff); if (skipped > 0) { pos += skipped; // as these bytes have been read, they are included in the counter incrementBytesRead(diff); } if (pos == targetPos) { // all is well return; } else { // log a warning; continue to attempt to re-open LOG.warn("Failed to seek on {} to {}. Current position {}", uri, targetPos, pos); } } } else if (diff < 0) { // backwards seek streamStatistics.seekBackwards(diff); // if the stream is in "Normal" mode, switch to random IO at this // point, as it is indicative of columnar format IO if (inputPolicy.equals(S3AInputPolicy.Normal)) { LOG.info("Switching to Random IO seek policy"); setInputPolicy(S3AInputPolicy.Random); } } else { // targetPos == pos if (remainingInCurrentRequest() > 0) { // if there is data left in the stream, keep going return; } } // if the code reaches here, the stream needs to be reopened. // close the stream; if read the object will be opened at the new pos closeStream("seekInStream()", this.contentRangeFinish, false); pos = targetPos; } @Override public boolean seekToNewSource(long targetPos) throws IOException { return false; } /** * Perform lazy seek and adjust stream to correct position for reading. * * @param targetPos position from where data should be read * @param len length of the content that needs to be read */ @Retries.RetryTranslated private void lazySeek(long targetPos, long len) throws IOException { // With S3Guard, the metadatastore gave us metadata for the file in // open(), so we use a slightly different retry policy. Invoker invoker = context.getReadInvoker(); invoker.retry("lazySeek", pathStr, true, () -> { //For lazy seek seekInStream(targetPos, len); //re-open at specific location if needed if (wrappedStream == null) { reopen("read from new offset", targetPos, len, false); } }); } /** * Increment the bytes read counter if there is a stats instance * and the number of bytes read is more than zero. * @param bytesRead number of bytes read */ private void incrementBytesRead(long bytesRead) { streamStatistics.bytesRead(bytesRead); if (context.stats != null && bytesRead > 0) { context.stats.incrementBytesRead(bytesRead); } } @Override @Retries.RetryTranslated // Some retries only happen w/ S3Guard, as intended. public synchronized int read() throws IOException { checkNotClosed(); if (this.contentLength == 0 || (nextReadPos >= contentLength)) { return -1; } try { lazySeek(nextReadPos, 1); } catch (EOFException e) { return -1; } // With S3Guard, the metadatastore gave us metadata for the file in // open(), so we use a slightly different retry policy. // read() may not be likely to fail, but reopen() does a GET which // certainly could. Invoker invoker = context.getReadInvoker(); int byteRead = invoker.retry("read", pathStr, true, () -> { int b; try { b = wrappedStream.read(); } catch (EOFException e) { return -1; } catch (SocketTimeoutException e) { onReadFailure(e, 1, true); b = wrappedStream.read(); } catch (IOException e) { onReadFailure(e, 1, false); b = wrappedStream.read(); } return b; }); if (byteRead >= 0) { pos++; nextReadPos++; } if (byteRead >= 0) { incrementBytesRead(1); } return byteRead; } /** * Handle an IOE on a read by attempting to re-open the stream. * The filesystem's readException count will be incremented. * @param ioe exception caught. * @param length length of data being attempted to read * @throws IOException any exception thrown on the re-open attempt. */ @Retries.OnceTranslated private void onReadFailure(IOException ioe, int length, boolean forceAbort) throws IOException { LOG.info("Got exception while trying to read from stream {}" + " trying to recover: " + ioe, uri); streamStatistics.readException(); reopen("failure recovery", pos, length, forceAbort); } /** * {@inheritDoc} * * This updates the statistics on read operations started and whether * or not the read operation "completed", that is: returned the exact * number of bytes requested. * @throws IOException if there are other problems */ @Override @Retries.RetryTranslated // Some retries only happen w/ S3Guard, as intended. public synchronized int read(byte[] buf, int off, int len) throws IOException { checkNotClosed(); validatePositionedReadArgs(nextReadPos, buf, off, len); if (len == 0) { return 0; } if (this.contentLength == 0 || (nextReadPos >= contentLength)) { return -1; } try { lazySeek(nextReadPos, len); } catch (EOFException e) { // the end of the file has moved return -1; } // With S3Guard, the metadatastore gave us metadata for the file in // open(), so we use a slightly different retry policy. // read() may not be likely to fail, but reopen() does a GET which // certainly could. Invoker invoker = context.getReadInvoker(); streamStatistics.readOperationStarted(nextReadPos, len); int bytesRead = invoker.retry("read", pathStr, true, () -> { int bytes; try { bytes = wrappedStream.read(buf, off, len); } catch (EOFException e) { // the base implementation swallows EOFs. return -1; } catch (SocketTimeoutException e) { onReadFailure(e, len, true); bytes = wrappedStream.read(buf, off, len); } catch (IOException e) { onReadFailure(e, len, false); bytes= wrappedStream.read(buf, off, len); } return bytes; }); if (bytesRead > 0) { pos += bytesRead; nextReadPos += bytesRead; } incrementBytesRead(bytesRead); streamStatistics.readOperationCompleted(len, bytesRead); return bytesRead; } /** * Verify that the input stream is open. Non blocking; this gives * the last state of the volatile {@link #closed} field. * @throws IOException if the connection is closed. */ private void checkNotClosed() throws IOException { if (closed) { throw new IOException(uri + ": " + FSExceptionMessages.STREAM_IS_CLOSED); } } /** * Close the stream. * This triggers publishing of the stream statistics back to the filesystem * statistics. * This operation is synchronized, so that only one thread can attempt to * close the connection; all later/blocked calls are no-ops. * @throws IOException on any problem */ @Override public synchronized void close() throws IOException { if (!closed) { closed = true; try { // close or abort the stream closeStream("close() operation", this.contentRangeFinish, false); LOG.debug("Statistics of stream {}\n{}", key, streamStatistics); // this is actually a no-op super.close(); } finally { // merge the statistics back into the FS statistics. streamStatistics.close(); } } } /** * Close a stream: decide whether to abort or close, based on * the length of the stream and the current position. * If a close() is attempted and fails, the operation escalates to * an abort. * * This does not set the {@link #closed} flag. * @param reason reason for stream being closed; used in messages * @param length length of the stream. * @param forceAbort force an abort; used if explicitly requested. */ @Retries.OnceRaw private void closeStream(String reason, long length, boolean forceAbort) { if (wrappedStream != null) { // if the amount of data remaining in the current request is greater // than the readahead value: abort. long remaining = remainingInCurrentRequest(); LOG.debug("Closing stream {}: {}", reason, forceAbort ? "abort" : "soft"); boolean shouldAbort = forceAbort || remaining > readahead; if (!shouldAbort) { try { // clean close. This will read to the end of the stream, // so, while cleaner, can be pathological on a multi-GB object // explicitly drain the stream long drained = 0; while (wrappedStream.read() >= 0) { drained++; } LOG.debug("Drained stream of {} bytes", drained); // now close it wrappedStream.close(); // this MUST come after the close, so that if the IO operations fail // and an abort is triggered, the initial attempt's statistics // aren't collected. streamStatistics.streamClose(false, drained); } catch (IOException e) { // exception escalates to an abort LOG.debug("When closing {} stream for {}", uri, reason, e); shouldAbort = true; } } if (shouldAbort) { // Abort, rather than just close, the underlying stream. Otherwise, the // remaining object payload is read from S3 while closing the stream. LOG.debug("Aborting stream"); wrappedStream.abort(); streamStatistics.streamClose(true, remaining); } LOG.debug("Stream {} {}: {}; remaining={} streamPos={}," + " nextReadPos={}," + " request range {}-{} length={}", uri, (shouldAbort ? "aborted" : "closed"), reason, remaining, pos, nextReadPos, contentRangeStart, contentRangeFinish, length); wrappedStream = null; } } /** * Forcibly reset the stream, by aborting the connection. The next * {@code read()} operation will trigger the opening of a new HTTPS * connection. * * This is potentially very inefficient, and should only be invoked * in extreme circumstances. It logs at info for this reason. * @return true if the connection was actually reset. * @throws IOException if invoked on a closed stream. */ @InterfaceStability.Unstable public synchronized boolean resetConnection() throws IOException { checkNotClosed(); boolean connectionOpen = wrappedStream != null; if (connectionOpen) { LOG.info("Forced reset of connection to {}", uri); closeStream("reset()", contentRangeFinish, true); } return connectionOpen; } @Override public synchronized int available() throws IOException { checkNotClosed(); long remaining = remainingInFile(); if (remaining > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int)remaining; } /** * Bytes left in stream. * @return how many bytes are left to read */ @InterfaceAudience.Private @InterfaceStability.Unstable public synchronized long remainingInFile() { return this.contentLength - this.pos; } /** * Bytes left in the current request. * Only valid if there is an active request. * @return how many bytes are left to read in the current GET. */ @InterfaceAudience.Private @InterfaceStability.Unstable public synchronized long remainingInCurrentRequest() { return this.contentRangeFinish - this.pos; } @InterfaceAudience.Private @InterfaceStability.Unstable public synchronized long getContentRangeFinish() { return contentRangeFinish; } @InterfaceAudience.Private @InterfaceStability.Unstable public synchronized long getContentRangeStart() { return contentRangeStart; } @Override public boolean markSupported() { return false; } /** * String value includes statistics as well as stream state. * <b>Important: there are no guarantees as to the stability * of this value.</b> * @return a string value for printing in logs/diagnostics */ @Override @InterfaceStability.Unstable public String toString() { String s = streamStatistics.toString(); synchronized (this) { final StringBuilder sb = new StringBuilder( "S3AInputStream{"); sb.append(uri); sb.append(" wrappedStream=") .append(wrappedStream != null ? "open" : "closed"); sb.append(" read policy=").append(inputPolicy); sb.append(" pos=").append(pos); sb.append(" nextReadPos=").append(nextReadPos); sb.append(" contentLength=").append(contentLength); sb.append(" contentRangeStart=").append(contentRangeStart); sb.append(" contentRangeFinish=").append(contentRangeFinish); sb.append(" remainingInCurrentRequest=") .append(remainingInCurrentRequest()); sb.append('\n').append(s); sb.append('}'); return sb.toString(); } } /** * Subclass {@code readFully()} operation which only seeks at the start * of the series of operations; seeking back at the end. * * This is significantly higher performance if multiple read attempts are * needed to fetch the data, as it does not break the HTTP connection. * * To maintain thread safety requirements, this operation is synchronized * for the duration of the sequence. * {@inheritDoc} * */ @Override @Retries.RetryTranslated // Some retries only happen w/ S3Guard, as intended. public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { checkNotClosed(); validatePositionedReadArgs(position, buffer, offset, length); streamStatistics.readFullyOperationStarted(position, length); if (length == 0) { return; } int nread = 0; synchronized (this) { long oldPos = getPos(); try { seek(position); while (nread < length) { int nbytes = read(buffer, offset + nread, length - nread); if (nbytes < 0) { throw new EOFException(FSExceptionMessages.EOF_IN_READ_FULLY); } nread += nbytes; } } finally { seekQuietly(oldPos); } } } /** * Access the input stream statistics. * This is for internal testing and may be removed without warning. * @return the statistics for this input stream */ @InterfaceAudience.Private @InterfaceStability.Unstable public S3AInstrumentation.InputStreamStatistics getS3AStreamStatistics() { return streamStatistics; } @Override public synchronized void setReadahead(Long readahead) { if (readahead == null) { this.readahead = Constants.DEFAULT_READAHEAD_RANGE; } else { Preconditions.checkArgument(readahead >= 0, "Negative readahead value"); this.readahead = readahead; } } /** * Get the current readahead value. * @return a non-negative readahead value */ public synchronized long getReadahead() { return readahead; } /** * Calculate the limit for a get request, based on input policy * and state of object. * @param inputPolicy input policy * @param targetPos position of the read * @param length length of bytes requested; if less than zero "unknown" * @param contentLength total length of file * @param readahead current readahead value * @return the absolute value of the limit of the request. */ static long calculateRequestLimit( S3AInputPolicy inputPolicy, long targetPos, long length, long contentLength, long readahead) { long rangeLimit; switch (inputPolicy) { case Random: // positioned. // read either this block, or the here + readahead value. rangeLimit = (length < 0) ? contentLength : targetPos + Math.max(readahead, length); break; case Sequential: // sequential: plan for reading the entire object. rangeLimit = contentLength; break; case Normal: // normal is considered sequential until a backwards seek switches // it to 'Random' default: rangeLimit = contentLength; } // cannot read past the end of the object rangeLimit = Math.min(contentLength, rangeLimit); return rangeLimit; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.ode.nonstiff; import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.MaxCountExceededException; import org.apache.commons.math3.exception.NoBracketingException; import org.apache.commons.math3.exception.NumberIsTooSmallException; import org.apache.commons.math3.ode.ExpandableStatefulODE; import org.apache.commons.math3.util.FastMath; /** * This class implements the common part of all embedded Runge-Kutta * integrators for Ordinary Differential Equations. * * <p>These methods are embedded explicit Runge-Kutta methods with two * sets of coefficients allowing to estimate the error, their Butcher * arrays are as follows : * <pre> * 0 | * c2 | a21 * c3 | a31 a32 * ... | ... * cs | as1 as2 ... ass-1 * |-------------------------- * | b1 b2 ... bs-1 bs * | b'1 b'2 ... b's-1 b's * </pre> * </p> * * <p>In fact, we rather use the array defined by ej = bj - b'j to * compute directly the error rather than computing two estimates and * then comparing them.</p> * * <p>Some methods are qualified as <i>fsal</i> (first same as last) * methods. This means the last evaluation of the derivatives in one * step is the same as the first in the next step. Then, this * evaluation can be reused from one step to the next one and the cost * of such a method is really s-1 evaluations despite the method still * has s stages. This behaviour is true only for successful steps, if * the step is rejected after the error estimation phase, no * evaluation is saved. For an <i>fsal</i> method, we have cs = 1 and * asi = bi for all i.</p> * * @since 1.2 */ public abstract class EmbeddedRungeKuttaIntegrator extends AdaptiveStepsizeIntegrator { /** Indicator for <i>fsal</i> methods. */ private final boolean fsal; /** Time steps from Butcher array (without the first zero). */ private final double[] c; /** Internal weights from Butcher array (without the first empty row). */ private final double[][] a; /** External weights for the high order method from Butcher array. */ private final double[] b; /** Prototype of the step interpolator. */ private final RungeKuttaStepInterpolator prototype; /** Stepsize control exponent. */ private final double exp; /** Safety factor for stepsize control. */ private double safety; /** Minimal reduction factor for stepsize control. */ private double minReduction; /** Maximal growth factor for stepsize control. */ private double maxGrowth; /** Build a Runge-Kutta integrator with the given Butcher array. * @param name name of the method * @param fsal indicate that the method is an <i>fsal</i> * @param c time steps from Butcher array (without the first zero) * @param a internal weights from Butcher array (without the first empty row) * @param b propagation weights for the high order method from Butcher array * @param prototype prototype of the step interpolator to use * @param minStep minimal step (sign is irrelevant, regardless of * integration direction, forward or backward), the last step can * be smaller than this * @param maxStep maximal step (sign is irrelevant, regardless of * integration direction, forward or backward), the last step can * be smaller than this * @param scalAbsoluteTolerance allowed absolute error * @param scalRelativeTolerance allowed relative error */ protected EmbeddedRungeKuttaIntegrator(final String name, final boolean fsal, final double[] c, final double[][] a, final double[] b, final RungeKuttaStepInterpolator prototype, final double minStep, final double maxStep, final double scalAbsoluteTolerance, final double scalRelativeTolerance) { super(name, minStep, maxStep, scalAbsoluteTolerance, scalRelativeTolerance); this.fsal = fsal; this.c = c; this.a = a; this.b = b; this.prototype = prototype; exp = -1.0 / getOrder(); // set the default values of the algorithm control parameters setSafety(0.9); setMinReduction(0.2); setMaxGrowth(10.0); } /** Build a Runge-Kutta integrator with the given Butcher array. * @param name name of the method * @param fsal indicate that the method is an <i>fsal</i> * @param c time steps from Butcher array (without the first zero) * @param a internal weights from Butcher array (without the first empty row) * @param b propagation weights for the high order method from Butcher array * @param prototype prototype of the step interpolator to use * @param minStep minimal step (must be positive even for backward * integration), the last step can be smaller than this * @param maxStep maximal step (must be positive even for backward * integration) * @param vecAbsoluteTolerance allowed absolute error * @param vecRelativeTolerance allowed relative error */ protected EmbeddedRungeKuttaIntegrator(final String name, final boolean fsal, final double[] c, final double[][] a, final double[] b, final RungeKuttaStepInterpolator prototype, final double minStep, final double maxStep, final double[] vecAbsoluteTolerance, final double[] vecRelativeTolerance) { super(name, minStep, maxStep, vecAbsoluteTolerance, vecRelativeTolerance); this.fsal = fsal; this.c = c; this.a = a; this.b = b; this.prototype = prototype; exp = -1.0 / getOrder(); // set the default values of the algorithm control parameters setSafety(0.9); setMinReduction(0.2); setMaxGrowth(10.0); } /** Get the order of the method. * @return order of the method */ public abstract int getOrder(); /** Get the safety factor for stepsize control. * @return safety factor */ public double getSafety() { return safety; } /** Set the safety factor for stepsize control. * @param safety safety factor */ public void setSafety(final double safety) { this.safety = safety; } /** {@inheritDoc} */ @Override public void integrate(final ExpandableStatefulODE equations, final double t) throws NumberIsTooSmallException, DimensionMismatchException, MaxCountExceededException, NoBracketingException { sanityChecks(equations, t); setEquations(equations); final boolean forward = t > equations.getTime(); // create some internal working arrays final double[] y0 = equations.getCompleteState(); final double[] y = y0.clone(); final int stages = c.length + 1; final double[][] yDotK = new double[stages][y.length]; final double[] yTmp = y0.clone(); final double[] yDotTmp = new double[y.length]; // set up an interpolator sharing the integrator arrays final RungeKuttaStepInterpolator interpolator = (RungeKuttaStepInterpolator) prototype.copy(); interpolator.reinitialize(this, yTmp, yDotK, forward, equations.getPrimaryMapper(), equations.getSecondaryMappers()); interpolator.storeTime(equations.getTime()); // set up integration control objects stepStart = equations.getTime(); double hNew = 0; boolean firstTime = true; initIntegration(equations.getTime(), y0, t); // main integration loop isLastStep = false; do { interpolator.shift(); // iterate over step size, ensuring local normalized error is smaller than 1 double error = 10; while (error >= 1.0) { if (firstTime || !fsal) { // first stage computeDerivatives(stepStart, y, yDotK[0]); } if (firstTime) { final double[] scale = new double[mainSetDimension]; if (vecAbsoluteTolerance == null) { for (int i = 0; i < scale.length; ++i) { scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * FastMath.abs(y[i]); } } else { for (int i = 0; i < scale.length; ++i) { scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * FastMath.abs(y[i]); } } hNew = initializeStep(forward, getOrder(), scale, stepStart, y, yDotK[0], yTmp, yDotK[1]); firstTime = false; } stepSize = hNew; if (forward) { if (stepStart + stepSize >= t) { stepSize = t - stepStart; } } else { if (stepStart + stepSize <= t) { stepSize = t - stepStart; } } // next stages for (int k = 1; k < stages; ++k) { for (int j = 0; j < y0.length; ++j) { double sum = a[k-1][0] * yDotK[0][j]; for (int l = 1; l < k; ++l) { sum += a[k-1][l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]); } // estimate the state at the end of the step for (int j = 0; j < y0.length; ++j) { double sum = b[0] * yDotK[0][j]; for (int l = 1; l < stages; ++l) { sum += b[l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } // estimate the error at the end of the step error = estimateError(yDotK, y, yTmp, stepSize); if (error >= 1.0) { // reject the step and attempt to reduce error by stepsize control final double factor = FastMath.min(maxGrowth, FastMath.max(minReduction, safety * FastMath.pow(error, exp))); hNew = filterStep(stepSize * factor, forward, false); } } // local error is small enough: accept the step, trigger events and step handlers interpolator.storeTime(stepStart + stepSize); System.arraycopy(yTmp, 0, y, 0, y0.length); System.arraycopy(yDotK[stages - 1], 0, yDotTmp, 0, y0.length); stepStart = acceptStep(interpolator, y, yDotTmp, t); System.arraycopy(y, 0, yTmp, 0, y.length); if (!isLastStep) { // prepare next step interpolator.storeTime(stepStart); if (fsal) { // save the last evaluation for the next step System.arraycopy(yDotTmp, 0, yDotK[0], 0, y0.length); } // stepsize control for next step final double factor = FastMath.min(maxGrowth, FastMath.max(minReduction, safety * FastMath.pow(error, exp))); final double scaledH = stepSize * factor; final double nextT = stepStart + scaledH; final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t); hNew = filterStep(scaledH, forward, nextIsLast); final double filteredNextT = stepStart + hNew; final boolean filteredNextIsLast = forward ? (filteredNextT >= t) : (filteredNextT <= t); if (filteredNextIsLast) { hNew = t - stepStart; } } } while (!isLastStep); // dispatch results equations.setTime(stepStart); equations.setCompleteState(y); resetInternalState(); } /** Get the minimal reduction factor for stepsize control. * @return minimal reduction factor */ public double getMinReduction() { return minReduction; } /** Set the minimal reduction factor for stepsize control. * @param minReduction minimal reduction factor */ public void setMinReduction(final double minReduction) { this.minReduction = minReduction; } /** Get the maximal growth factor for stepsize control. * @return maximal growth factor */ public double getMaxGrowth() { return maxGrowth; } /** Set the maximal growth factor for stepsize control. * @param maxGrowth maximal growth factor */ public void setMaxGrowth(final double maxGrowth) { this.maxGrowth = maxGrowth; } /** Compute the error ratio. * @param yDotK derivatives computed during the first stages * @param y0 estimate of the step at the startToggle of the step * @param y1 estimate of the step at the end of the step * @param h current step * @return error ratio, greater than 1 if step should be rejected */ protected abstract double estimateError(double[][] yDotK, double[] y0, double[] y1, double h); }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.datadictionary.validation.processor; import java.math.BigDecimal; import java.util.Date; import org.kuali.rice.core.api.data.DataType; import org.kuali.rice.core.api.util.RiceKeyConstants; import org.kuali.rice.krad.datadictionary.exception.AttributeValidationException; import org.kuali.rice.krad.datadictionary.validation.AttributeValueReader; import org.kuali.rice.krad.datadictionary.validation.ValidationUtils; import org.kuali.rice.krad.datadictionary.validation.ValidationUtils.Result; import org.kuali.rice.krad.datadictionary.validation.capability.RangeConstrainable; import org.kuali.rice.krad.datadictionary.validation.constraint.Constraint; import org.kuali.rice.krad.datadictionary.validation.constraint.RangeConstraint; import org.kuali.rice.krad.datadictionary.validation.result.ConstraintValidationResult; import org.kuali.rice.krad.datadictionary.validation.result.DictionaryValidationResult; import org.kuali.rice.krad.datadictionary.validation.result.ProcessorResult; /** * RangeConstraintProcessor enforces range constraints - that is, constraints that keep a number or a date within a * specific range * * <p> An attribute * that is {@link RangeConstrainable} will expose a minimum and maximum value, and these will be validated against the * passed * value in the code below.</p> * * @author Kuali Rice Team (rice.collab@kuali.org) */ public class RangeConstraintProcessor extends MandatoryElementConstraintProcessor<RangeConstraint> { private static final String CONSTRAINT_NAME = "range constraint"; private static final String MIN_EXCLUSIVE_KEY = "validation.minExclusive"; private static final String MAX_INCLUSIVE_KEY = "validation.maxInclusive"; private static final String RANGE_KEY = "validation.range"; /** * @see org.kuali.rice.krad.datadictionary.validation.processor.ConstraintProcessor#process(org.kuali.rice.krad.datadictionary.validation.result.DictionaryValidationResult, * Object, org.kuali.rice.krad.datadictionary.validation.constraint.Constraint, * org.kuali.rice.krad.datadictionary.validation.AttributeValueReader) */ @Override public ProcessorResult process(DictionaryValidationResult result, Object value, RangeConstraint constraint, AttributeValueReader attributeValueReader) throws AttributeValidationException { // Since any given definition that is range constrained only expressed a single min and max, it means that there is only a single constraint to impose return new ProcessorResult(processSingleRangeConstraint(result, value, constraint, attributeValueReader)); } @Override public String getName() { return CONSTRAINT_NAME; } /** * @see org.kuali.rice.krad.datadictionary.validation.processor.ConstraintProcessor#getConstraintType() */ @Override public Class<? extends Constraint> getConstraintType() { return RangeConstraint.class; } /** * validates the value provided using {@code RangeConstraint} * * @param result - a holder for any already run validation results * @param value - the value to validate * @param constraint - the range constraint to use * @param attributeValueReader - provides access to the attribute being validated * @return the passed in result, updated with the results of the processing * @throws AttributeValidationException if validation fails */ protected ConstraintValidationResult processSingleRangeConstraint(DictionaryValidationResult result, Object value, RangeConstraint constraint, AttributeValueReader attributeValueReader) throws AttributeValidationException { // Can't process any range constraints on null values if (ValidationUtils.isNullOrEmpty(value) || (constraint.getExclusiveMin() == null && constraint.getInclusiveMax() == null)) { return result.addSkipped(attributeValueReader, CONSTRAINT_NAME); } // This is necessary because sometimes we'll be getting a string, for example, that represents a date. DataType dataType = constraint.getDataType(); Object typedValue = value; if (dataType != null) { typedValue = ValidationUtils.convertToDataType(value, dataType, dateTimeService); } else if (value instanceof String) { //assume string is a number of type double try { Double d = Double.parseDouble((String) value); typedValue = d; } catch (NumberFormatException n) { //do nothing, typedValue is never reset } } // TODO: decide if there is any reason why the following would be insufficient - i.e. if something numeric could still be cast to String at this point if (typedValue instanceof Date) { return validateRange(result, (Date) typedValue, constraint, attributeValueReader); } else if (typedValue instanceof Number) { return validateRange(result, (Number) typedValue, constraint, attributeValueReader); } return result.addSkipped(attributeValueReader, CONSTRAINT_NAME); } /** * validates the date value using the range constraint provided * * @param result - a holder for any already run validation results * @param value - the value to validate * @param constraint - the range constraint to use * @param attributeValueReader - provides access to the attribute being validated * @return the passed in result, updated with the results of the processing * @throws IllegalArgumentException */ protected ConstraintValidationResult validateRange(DictionaryValidationResult result, Date value, RangeConstraint constraint, AttributeValueReader attributeValueReader) throws IllegalArgumentException { Date date = value != null ? ValidationUtils.getDate(value, dateTimeService) : null; String inclusiveMaxText = constraint.getInclusiveMax(); String exclusiveMinText = constraint.getExclusiveMin(); Date inclusiveMax = inclusiveMaxText != null ? ValidationUtils.getDate(inclusiveMaxText, dateTimeService) : null; Date exclusiveMin = exclusiveMinText != null ? ValidationUtils.getDate(exclusiveMinText, dateTimeService) : null; return isInRange(result, date, inclusiveMax, inclusiveMaxText, exclusiveMin, exclusiveMinText, attributeValueReader); } /** * validates the number value using the range constraint provided * * @param result - a holder for any already run validation results * @param value - the value to validate * @param constraint - the range constraint to use * @param attributeValueReader - provides access to the attribute being validated * @return the passed in result, updated with the results of the processing * @throws IllegalArgumentException */ protected ConstraintValidationResult validateRange(DictionaryValidationResult result, Number value, RangeConstraint constraint, AttributeValueReader attributeValueReader) throws IllegalArgumentException { // TODO: JLR - need a code review of the conversions below to make sure this is the best way to ensure accuracy across all numerics // This will throw NumberFormatException if the value is 'NaN' or infinity... probably shouldn't be a NFE but something more intelligible at a higher level BigDecimal number = value != null ? new BigDecimal(value.toString()) : null; String inclusiveMaxText = constraint.getInclusiveMax(); String exclusiveMinText = constraint.getExclusiveMin(); BigDecimal inclusiveMax = inclusiveMaxText != null ? new BigDecimal(inclusiveMaxText) : null; BigDecimal exclusiveMin = exclusiveMinText != null ? new BigDecimal(exclusiveMinText) : null; return isInRange(result, number, inclusiveMax, inclusiveMaxText, exclusiveMin, exclusiveMinText, attributeValueReader); } /** * checks whether the value provided is in the range specified by inclusiveMax and exclusiveMin * * @param result a holder for any already run validation results * @param value the value to check * @param inclusiveMax the maximum value of the attribute * @param inclusiveMaxText the string representation of inclusiveMax * @param exclusiveMin the minimum value of the attribute * @param exclusiveMinText the string representation of exclusiveMin * @param attributeValueReader provides access to the attribute being validated * @return the passed in result, updated with the results of the range check */ private <T> ConstraintValidationResult isInRange(DictionaryValidationResult result, T value, Comparable<T> inclusiveMax, String inclusiveMaxText, Comparable<T> exclusiveMin, String exclusiveMinText, AttributeValueReader attributeValueReader) { // What we want to know is that the maximum value is greater than or equal to the number passed (the number can be equal to the max, i.e. it's 'inclusive') Result lessThanMax = ValidationUtils.isLessThanOrEqual(value, inclusiveMax); // On the other hand, since the minimum is exclusive, we just want to make sure it's less than the number (the number can't be equal to the min, i.e. it's 'exclusive') Result greaterThanMin = ValidationUtils.isGreaterThan(value, exclusiveMin); // It's okay for one end of the range to be undefined - that's not an error. It's only an error if one of them is actually invalid. if (lessThanMax != Result.INVALID && greaterThanMin != Result.INVALID) { // Of course, if they're both undefined then we didn't actually have a real constraint if (lessThanMax == Result.UNDEFINED && greaterThanMin == Result.UNDEFINED) { return result.addNoConstraint(attributeValueReader, CONSTRAINT_NAME); } // In this case, we've succeeded return result.addSuccess(attributeValueReader, CONSTRAINT_NAME); } // If both comparisons happened then if either comparison failed we can show the end user the expected range on both sides. if (lessThanMax != Result.UNDEFINED && greaterThanMin != Result.UNDEFINED) { return result.addError(RANGE_KEY, attributeValueReader, CONSTRAINT_NAME, RiceKeyConstants.ERROR_OUT_OF_RANGE, exclusiveMinText, inclusiveMaxText); } // If it's the max comparison that fails, then just tell the end user what the max can be else if (lessThanMax == Result.INVALID) { return result.addError(MAX_INCLUSIVE_KEY, attributeValueReader, CONSTRAINT_NAME, RiceKeyConstants.ERROR_INCLUSIVE_MAX, inclusiveMaxText); } // Otherwise, just tell them what the min can be else { return result.addError(MIN_EXCLUSIVE_KEY, attributeValueReader, CONSTRAINT_NAME, RiceKeyConstants.ERROR_EXCLUSIVE_MIN, exclusiveMinText); } } }
/* The MIT License (MIT) Copyright (c) 2013 Jan Schulte Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package de.hsbremen.powerwall.kinect.sandbox; import com.jme3.app.SimpleApplication; import com.jme3.asset.AssetManager; import com.jme3.bullet.BulletAppState; import com.jme3.input.InputManager; import com.jme3.input.KeyInput; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.KeyTrigger; import com.jme3.light.AmbientLight; import com.jme3.light.DirectionalLight; import com.jme3.material.Material; import com.jme3.material.RenderState.BlendMode; import com.jme3.math.ColorRGBA; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.post.FilterPostProcessor; import com.jme3.post.filters.BloomFilter; import com.jme3.renderer.ViewPort; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.scene.Node; import com.jme3.shadow.DirectionalLightShadowFilter; import com.jme3.shadow.DirectionalLightShadowRenderer; import com.jme3.shadow.EdgeFilteringMode; import com.jme3.texture.Texture; import com.jme3.texture.Texture.WrapMode; import com.jme3.util.SkyFactory; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Scene class which creates the actual Sandbox with all its objects in it * @author Jan Schulte */ public class Scene implements ActionListener { // constants public static final float GROUND_SIZE = 40.0f; public static final float BOX_SIZE_MIN = 0.4f; public static final float BOX_SIZE_MAX = 1.0f; public static final float BOX_MASS = 4.0f; public static final int BOX_COUNT = 20; public static final float BOX_SPAWN_HEIGHT = 8.0f; public static final float CONTROLLER_SIZE = 1.4f; public static final float CONTROLLER_MASS = 0.0f; public static final float CONTROLLER_MIN_HEIGHT = 1.6f; public static final float CONTROLLER_MAX_HEIGHT = 100.0f; public static final boolean STEREO_CAMERA = true; public static final boolean MOUSE_INPUT = false; // global objects private SimpleApplication mApplication = null; private BulletAppState mBulletAppState = null; private KinectController mKinectController = null; private Camera mCamera = null; // Scene objects private DirectionalLight mSunLight = null; private List<SceneObject> mObjects = new ArrayList<SceneObject>(); // helper objects private Random mRandom = new Random(); /** * Contructor */ public Scene(SimpleApplication _application) { mApplication = _application; // create jBullet Physics mBulletAppState = new BulletAppState(); mBulletAppState.setThreadingType(BulletAppState.ThreadingType.PARALLEL); mApplication.getStateManager().attach(mBulletAppState); //mBulletAppState.getPhysicsSpace().enableDebug(mApplication.getAssetManager()); // create Scene create(); } private void create() { createGround(); createBoxes(); createController(); setupScene(); } public void reset() { int minGroundSize = -(int)(GROUND_SIZE / 2.0f); int maxGroundSize = (-1) * minGroundSize; for (SceneObject object : mObjects) { // spawn box at random location int x = randInt(minGroundSize, maxGroundSize); int z = randInt(minGroundSize, maxGroundSize); object.setPosition(new Vector3f(x, BOX_SPAWN_HEIGHT, z)); object.getController().activate(); } } public void update(float _delta) { if (mCamera != null) { mCamera.update(_delta); } if (mKinectController != null) { mKinectController.update(_delta); } } private void setupScene() { // get global objects AssetManager assetManager = mApplication.getAssetManager(); Node rootNode = mApplication.getRootNode(); //ViewPort viewPort = mApplication.getViewPort(); InputManager inputManager = mApplication.getInputManager(); //init input inputManager.addMapping("reset", new KeyTrigger(KeyInput.KEY_R)); inputManager.addListener(this, "reset"); // add skysphere Texture west = assetManager.loadTexture("Textures/skybox/lefttron.jpg"); Texture east = assetManager.loadTexture("Textures/skybox/righttron.jpg"); Texture north = assetManager.loadTexture("Textures/skybox/fronttron.jpg"); Texture south = assetManager.loadTexture("Textures/skybox/backtron.jpg"); Texture up = assetManager.loadTexture("Textures/skybox/uptron.jpg"); Texture down = assetManager.loadTexture("Textures/skybox/downtron.jpg"); rootNode.attachChild(SkyFactory.createSky(assetManager, west, east, north, south, up, down)); // create ambient light AmbientLight al = new AmbientLight(); al.setColor(ColorRGBA.White.mult(0.3f)); rootNode.addLight(al); // Directional Light mSunLight = new DirectionalLight(); mSunLight.setColor(ColorRGBA.White); mSunLight.setDirection(new Vector3f(0.0f, -1.0f, -1.0f).normalizeLocal()); mApplication.getRootNode().addLight(mSunLight); // setup shadow renderer final int SHADOWMAP_SIZE = 1024; DirectionalLightShadowRenderer dlsr1 = new DirectionalLightShadowRenderer(assetManager, SHADOWMAP_SIZE, 3); dlsr1.setLight(mSunLight); dlsr1.setLambda(0.55f); dlsr1.setShadowIntensity(0.6f); dlsr1.setEdgeFilteringMode(EdgeFilteringMode.Bilinear); //dlsr.displayDebug(); if (mCamera.isSingleView()) { mCamera.addProcessor(dlsr1); } else { mCamera.addProcessorRight(dlsr1); } if (!mCamera.isSingleView()) { DirectionalLightShadowRenderer dlsr2 = new DirectionalLightShadowRenderer(assetManager, SHADOWMAP_SIZE, 3); dlsr2.setLight(mSunLight); dlsr2.setLambda(0.55f); dlsr2.setShadowIntensity(0.6f); dlsr2.setEdgeFilteringMode(EdgeFilteringMode.Bilinear); mCamera.addProcessorLeft(dlsr2); } FilterPostProcessor fpp1 = new FilterPostProcessor(assetManager); DirectionalLightShadowFilter dlsf1 = new DirectionalLightShadowFilter(assetManager, SHADOWMAP_SIZE, 3); dlsf1.setLight(mSunLight); dlsf1.setEnabled(true); dlsf1.setLambda(0.55f); dlsf1.setShadowIntensity(0.6f); dlsf1.setEdgeFilteringMode(EdgeFilteringMode.Bilinear); fpp1.addFilter(dlsf1); // glow filter BloomFilter bloom1 = new BloomFilter(BloomFilter.GlowMode.Objects); bloom1.setBloomIntensity(4.0f); bloom1.setBlurScale(1.0f); fpp1.addFilter(bloom1); if (mCamera.isSingleView()) { mCamera.addProcessor(fpp1); } else { mCamera.addProcessorRight(fpp1); FilterPostProcessor fpp2 = new FilterPostProcessor(assetManager); DirectionalLightShadowFilter dlsf2 = new DirectionalLightShadowFilter(assetManager, SHADOWMAP_SIZE, 3); dlsf2.setLight(mSunLight); dlsf2.setEnabled(true); dlsf2.setLambda(0.55f); dlsf2.setShadowIntensity(0.6f); dlsf2.setEdgeFilteringMode(EdgeFilteringMode.Bilinear); fpp1.addFilter(dlsf2); // glow filter BloomFilter bloom2 = new BloomFilter(BloomFilter.GlowMode.Objects); bloom2.setBloomIntensity(4.0f); bloom2.setBlurScale(1.0f); fpp2.addFilter(bloom2); mCamera.addProcessorLeft(fpp2); } } private void createGround() { // get global objects AssetManager assetManager = mApplication.getAssetManager(); Node rootNode = mApplication.getRootNode(); // create ground & add to scene Material material = new Material(assetManager, "Common/MatDefs/Light/Lighting.j3md"); Texture diffuseTexture = assetManager.loadTexture("Textures/skybox/downtron.jpg"); //Texture diffuseTexture = assetManager.loadTexture("Textures/ground_02.png"); //Texture normalTexture = assetManager.loadTexture("Textures/ground_02_n.png"); //diffuseTexture.setMinFilter(Texture.MinFilter.Trilinear); //diffuseTexture.setMagFilter(Texture.MagFilter.Bilinear); //diffuseTexture.setAnisotropicFilter(16); diffuseTexture.setWrap(WrapMode.Repeat); //normalTexture.setWrap(WrapMode.Repeat); material.setTexture("DiffuseMap", diffuseTexture); //material.setTexture("NormalMap", normalTexture); material.setBoolean("UseMaterialColors",true); material.setColor("Diffuse",ColorRGBA.White); material.setColor("Specular",ColorRGBA.White); material.setFloat("Shininess", 64f); // [0,128] BoxObject ground = new BoxObject("Ground", new Vector3f(0.0f, 0.0f, 0.0f), new Vector3f(GROUND_SIZE, 0.1f, GROUND_SIZE), material, 0.0f, new Vector2f(4.0f, 4.0f)); addToScene(ground); } private void createBoxes() { // get global objects AssetManager assetManager = mApplication.getAssetManager(); // create ground & add to scene /* Material material = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); Texture diffuseTexture = assetManager.loadTexture("Textures/crate_02.png"); material.setTexture("ColorMap", diffuseTexture); */ Texture diffuseTexture_01 = assetManager.loadTexture("Textures/crate_02.png"); Texture normalTexture_01 = assetManager.loadTexture("Textures/crate_02_n.png"); Texture diffuseTexture_02 = assetManager.loadTexture("Textures/crate_03.png"); Texture normalTexture_02 = assetManager.loadTexture("Textures/crate_03_n.png"); //diffuseTexture.setMinFilter(Texture.MinFilter.Trilinear); //diffuseTexture.setMagFilter(Texture.MagFilter.Bilinear); //diffuseTexture.setAnisotropicFilter(16); diffuseTexture_01.setWrap(WrapMode.Repeat); normalTexture_01.setWrap(WrapMode.Repeat); diffuseTexture_02.setWrap(WrapMode.Repeat); normalTexture_02.setWrap(WrapMode.Repeat); int minGroundSize = -(int)(GROUND_SIZE / 2.0f); int maxGroundSize = (-1) * minGroundSize; for (int i = 0; i < BOX_COUNT; ++i) { int textureId = randInt(0, 1); Material material = new Material(assetManager, "Common/MatDefs/Light/Lighting.j3md"); if (textureId == 0) { material.setTexture("DiffuseMap", diffuseTexture_01); material.setTexture("NormalMap", normalTexture_01); } else { material.setTexture("DiffuseMap", diffuseTexture_02); material.setTexture("NormalMap", normalTexture_02); } material.setBoolean("UseMaterialColors",true); material.setColor("Diffuse",ColorRGBA.White); material.setColor("Specular",ColorRGBA.White); material.setFloat("Shininess", 128f); // [0,128] // spawn box at random location int x = randInt(minGroundSize, maxGroundSize); int z = randInt(minGroundSize, maxGroundSize); float size = randFloat(BOX_SIZE_MIN, BOX_SIZE_MAX); BoxObject ground = new BoxObject("box_" + i, new Vector3f(x, BOX_SPAWN_HEIGHT, z), new Vector3f(size, size, size), material, BOX_MASS * size, null); addToScene(ground); } } private void createController() { // get global objects AssetManager assetManager = mApplication.getAssetManager(); // create sphere based controller Material material = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); material.setColor("Color", new ColorRGBA(0.2f, 0.2f, 1.0f, 0.7f)); material.setColor("GlowColor", ColorRGBA.Blue); material.getAdditionalRenderState().setBlendMode(BlendMode.Alpha); SphereObject controller = new SphereObject("Controller", new Vector3f(0.0f, 1.0f, 6.0f), CONTROLLER_SIZE, material, CONTROLLER_MASS); controller.setQueueBucket(Bucket.Transparent); addToScene(controller); mBulletAppState.getPhysicsSpace().add(controller.getGhostController()); // create camera if (STEREO_CAMERA) { mCamera = new StereoCamera(mApplication, controller); } else { mCamera = new ChaseCamera(mApplication, controller); } // create kinect controller mKinectController = new KinectController(mApplication, mBulletAppState, controller, mCamera); mBulletAppState.getPhysicsSpace().addCollisionListener(mKinectController); } private void addToScene(SceneObject _object) { mApplication.getRootNode().attachChild(_object); mBulletAppState.getPhysicsSpace().add(_object.getController()); if (!_object.getName().equals("Controller") && !_object.getName().equals("Ground")) { mObjects.add(_object); } } public int randInt(int _min, int _max) { return mRandom.nextInt((_max - _min) + 1) + _min; } public float randFloat(float _min, float _max) { return (float) (_min + (Math.random() * ((_max - _min) + 1.0f))); } public void onAction(String _name, boolean _isPressed, float _tpf) { if (_name.equals("reset") && _isPressed) { reset(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * IteratorTypeDefIntegerTest_vj.java * * Created on April 12, 2005, 3:52 PM */ package org.apache.geode.cache.query.functional; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.HashSet; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.query.CacheUtils; import org.apache.geode.cache.query.Query; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.data.Student; import org.apache.geode.test.junit.categories.OQLQueryTest; @Category({OQLQueryTest.class}) public class IteratorTypeDefaultTypesJUnitTest { @Before public void setUp() throws java.lang.Exception { CacheUtils.startCache(); } @After public void tearDown() throws java.lang.Exception { CacheUtils.closeCache(); } @Test public void testIteratorDefIntegerArray() throws Exception { Integer[] a = new Integer[2]; for (int j = 0; j < 2; j++) { a[j] = new Integer(j); } Object params[] = new Object[1]; params[0] = a; String queries[] = {"Select distinct intValue from $1 TYPE int", "Select distinct intValue from (array<int>) $1 " }; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefIntegerArray: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefIntegerArrayList() throws Exception { ArrayList Arlist = new ArrayList(); Arlist.add(new Integer(11)); Arlist.add(new Integer(12)); Object params[] = new Object[1]; params[0] = Arlist; String queries[] = {"Select distinct intValue from $1 TYPE int", "Select distinct intValue from (list<int>) $1"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefIntegerArrayList: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefString() throws Exception { String s1 = "AA"; String s2 = "BB"; HashSet C1 = new HashSet(); C1.add(s1); C1.add(s2); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT intern from (set<string>) $1", "SELECT DISTINCT intern from $1 TYPE string"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefString: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefBoolean() throws Exception { boolean b1 = true; boolean b2 = false; HashSet C1 = new HashSet(); C1.add(new Boolean(b1)); C1.add(new Boolean(b2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT booleanValue from (set<boolean>) $1", "SELECT DISTINCT booleanValue from $1 TYPE boolean"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefBoolean: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefByte() throws Exception { byte b1 = 1; byte b2 = 2; HashSet C1 = new HashSet(); C1.add(new Byte(b1)); C1.add(new Byte(b2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT byteValue from (set<byte>) $1", "SELECT DISTINCT byteValue from $1 TYPE byte"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefByte: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefShort() throws Exception { short sh1 = 11; short sh2 = 22; HashSet C1 = new HashSet(); C1.add(new Short(sh1)); C1.add(new Short(sh2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT shortValue from (set<short>) $1", "SELECT DISTINCT shortValue from $1 TYPE short"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefShort: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefLong() throws Exception { long lg1 = 111; long lg2 = 222; HashSet C1 = new HashSet(); C1.add(new Long(lg1)); C1.add(new Long(lg2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT longValue from (set<long>) $1", "SELECT DISTINCT longValue from $1 TYPE long"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefLong: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefDouble() throws Exception { double d1 = 1.11; double d2 = 2.22; HashSet C1 = new HashSet(); C1.add(new Double(d1)); C1.add(new Double(d2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT doubleValue from (set<double>) $1", "SELECT DISTINCT doubleValue from $1 TYPE double"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefDouble: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefFloat() throws Exception { float fl1 = 1; float fl2 = 2; HashSet C1 = new HashSet(); C1.add(new Float(fl1)); C1.add(new Float(fl2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT floatValue from (set<float>) $1", "SELECT DISTINCT floatValue from $1 TYPE float"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefFloat: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testIteratorDefChar() throws Exception { char ch1 = 'a'; char ch2 = 'z'; HashSet C1 = new HashSet(); C1.add(new Character(ch1)); C1.add(new Character(ch2)); Object params[] = new Object[1]; params[0] = C1; String queries[] = {"SELECT DISTINCT charValue from (set<char>) $1", "SELECT DISTINCT charValue from $1 TYPE char"}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() < 1) { fail("testIteratorDefChar: Query fetched zero results "); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testNonStaticInnerClassTypeDef() { Student.initializeCounter(); ArrayList Arlist = new ArrayList(); Arlist.add(new Student("asif")); Arlist.add(new Student("ketan")); Object params[] = new Object[1]; params[0] = Arlist; String queries[] = { "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Subject;" + "Select distinct * from $1 as it1 , it1.subjects x type Student$Subject where x.subject='Hindi' ", "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Subject;" + "Select distinct * from $1 as it1 , it1.subjects type Student$Subject where subject='Hindi' ", "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Subject;" + "Select distinct * from $1 as it1 , (list<Student$Subject>) it1.subjects where subject='Hindi' "}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() != 1) { fail("testNonStaticInnerClassTypeDef: Query fetched results with size =" + rs.size() + " FOr Query number = " + (i + 1)); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } @Test public void testStaticInnerClassTypeDef() { Student.initializeCounter(); ArrayList Arlist = new ArrayList(); Arlist.add(new Student("asif")); Arlist.add(new Student("ketan")); Object params[] = new Object[1]; params[0] = Arlist; String queries[] = { "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Teacher;" + "Select distinct * from $1 as it1 , it1.teachers x type Student$Teacher where x.teacher='Y' ", "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Teacher;" + "Select distinct * from $1 as it1 , it1.teachers type Student$Teacher where teacher='Y' ", "IMPORT org.apache.geode.cache.\"query\".data.Student;" + "IMPORT org.apache.geode.cache.\"query\".data.Student$Teacher;" + "Select distinct * from $1 as it1 , (list<Student$Teacher>) it1.teachers where teacher='Y' "}; for (int i = 0; i < queries.length; i++) { Query q = null; try { q = CacheUtils.getQueryService().newQuery(queries[i]); SelectResults rs = (SelectResults) q.execute(params); if (rs.size() != 1) { fail("testStaticInnerClassTypeDef: Query fetched results with size =" + rs.size() + " FOr Query number = " + (i + 1)); } } catch (Exception e) { e.printStackTrace(); fail(q.getQueryString()); } } } }
package ifsp.repository; import java.io.Serializable; import java.lang.reflect.ParameterizedType; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; import javax.persistence.Persistence; /** * * AbstractRepository abstract class, to use when you can't inject the EntityManager * - Lack of a trully Java EE Container(@PersistentContext) * - You can't use CDI(@ViewScoped) * * If you need a more complex EntityManager scope management, use a trully * Java EE Container(jBoss, TomEE, Glassfish) or Dependency Injection. * * Usage: * public class UserRepository extends AbstractRepository<Long, User> implements Serializable * { * //empty * } * * ... * UserRepository rep = new UserRepository(); * rep.save(user); * ... * * Don't forget to implement Serializable in every entity and children dao, * the hibernate 'll be happy =) * * @author Vitor Freitas - github(vFreitas) * @param <K> Type of the entity ID(Key). * @param <E> Type of the Entity. */ abstract class AbstractRepository<K , E> implements Repositoriable<K ,E>, Serializable { /* Name of the persistence unit to use*/ private static final String UNIT_NAME = "site"; /* Factory to create entity managers */ private static final EntityManagerFactory factory = Persistence.createEntityManagerFactory(UNIT_NAME); /* TransactionScoped EntityManager */ private EntityManager em; /* The entity class type */ protected Class<E> entityClass; /** * Builder, it gets the second(entity) parameterized type and * sets to entityClass variable. */ public AbstractRepository() { ParameterizedType genericSuperClass = (ParameterizedType) getClass().getGenericSuperclass(); this.entityClass = (Class<E>) genericSuperClass.getActualTypeArguments()[1]; } /** * It can be used within children daos in the same package * for especific queries * @return An instance of the EntityManager */ protected EntityManager getEntityManager() { if(em == null) em = factory.createEntityManager(); else if(!em.isOpen()) em = factory.createEntityManager(); return em; } /* Finalize the connection within the database */ protected void closeEntityManager() { this.em.close(); } /** * * @return The entity class type instance variable */ protected Class<E> getEntityClassType() { return this.entityClass; } /** * Persist an entity into the database * - Creates a new instance of the EntityTransaction * - Initiate it, persist, commit and finally closes the EntityManager * - Catches and rollback any errors on the transaction. * @param entity E object to persist in the database */ @Override public void save(E entity) { EntityTransaction trx = getEntityManager().getTransaction(); try { trx.begin(); getEntityManager().persist(entity); trx.commit(); } catch (Exception e) { if(trx != null && trx.isActive()) trx.rollback(); e.printStackTrace(); } finally { closeEntityManager(); } } /** * Remove an entity from the database * - Creates a new instance of the EntityTransaction * - Initiate it, merge, commit and finally closes the EntityManager * - Catches and rollback any errors on the transaction. * @param entity E object to merge in the database */ @Override public void merge(E entity) { EntityTransaction trx = getEntityManager().getTransaction(); try { trx.begin(); getEntityManager().merge(entity); trx.commit(); } catch (Exception e) { if(trx != null && trx.isActive()) trx.rollback(); e.printStackTrace(); } finally { closeEntityManager(); } } /** * Remove an entity into the database * - Creates a new instance of the EntityTransaction * - Initiate it, remove, commit and finally closes the EntityManager * - Catches and rollback any errors on the transaction. * @param entity The E entity to remove from database */ @Override public void remove(E entity) { EntityTransaction trx = getEntityManager().getTransaction(); try { trx.begin(); getEntityManager().remove(entity); trx.commit(); } catch (Exception e) { if(trx != null && trx.isActive()) trx.rollback(); e.printStackTrace(); } finally { closeEntityManager(); } } /** * Gets an entity by its ID * - It gets a new instance of the EntityManager * - It finds an E entity with the given id * @param id ID of the E entity * @return an E type object */ @Override public E getById(K id) { E result = null; try { result = (E) getEntityManager().find(getEntityClassType(), id); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return result; } /** * It gets a list of E objects * - It gets a new instance of the EntityManager * - Finds all E objects * @return A list of E objects */ @Override public List<E> getAll() { List<E> resultList = null; try { resultList = (List<E>) getEntityManager() .createQuery("SELECT e FROM " + getEntityClassType().getSimpleName() + " e") .getResultList(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return resultList; } /** * It gets a list of E objects with the results of the given named query. * - It gets a new instance of the EntityManager * - Create the named query * - And get the result list * @param namedQuery Name of the named query * @return A list of E objects */ @Override public List<E> getAllNamedQuery(String namedQuery) { List<E> resultList = null; try { resultList = (List<E>) getEntityManager().createNamedQuery(namedQuery) .getResultList(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return resultList; } /** * It gets an E object with the result of the given named query * with the parameter. * - It gets a new instance of the EntityManager * - Create the named query * - Set the parameter * - And get the single result * @param namedQuery Name of the named query * @param parameter Name of the parameter setted on the named query * @param value String value of the parameter * @return An E object */ @Override public E getUniqueByRestriction(String namedQuery, String parameter, String value) { E result = null; try { result = (E) getEntityManager().createNamedQuery(namedQuery) .setParameter(parameter, value) .getSingleResult(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return result; } /** * It gets an E object with the result of the given named query * with the parameter. * - It creates a new instance of the EntityManager * - Create the named query * - Set the parameter * - And get the single result * @param namedQuery Name of the named query * @param parameter Name of the parameter setted on the named query * @param value Object value of the parameter * @return An E object */ @Override public E getUniqueByRestriction(String namedQuery, String parameter, Object value) { E result = null; try { result = (E) getEntityManager().createNamedQuery(namedQuery) .setParameter(parameter, value) .getSingleResult(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return result; } /** * It gets a list of E objects with the results of the given named query * with the parameter. * - It gets a new instance of the EntityManager * - Create the named query * - Set the parameter * - And get the result list * @param namedQuery Name of the named query * @param parameter Name of the parameter setted on the named query * @param value Object value of the parameter * @return A list of E objects */ @Override public List<E> getByRestriction(String namedQuery, String parameter, Object value) { List<E> resultList = null; try { resultList = (List<E>) getEntityManager().createNamedQuery(namedQuery) .setParameter(parameter, value) .getResultList(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return resultList; } /** * It gets a list of E objects with the results of the given named query * with the parameter. * - It gets a new instance of the EntityManager * - Create the named query * - Set the parameter * - And get the result list * @param namedQuery Name of the named query * @param parameter Name of the parameter setted on the named query * @param value String value of the parameter * @return A list of E objects */ @Override public List<E> getByRestriction(String namedQuery, String parameter, String value) { List<E> resultList = null; try { resultList = (List<E>) getEntityManager().createNamedQuery(namedQuery) .setParameter(parameter, value) .getResultList(); } catch (Exception e) { e.printStackTrace(); } finally { closeEntityManager(); } return resultList; } }
/* * NameTableCellRenderer.java: a TableCellRenerer for NameTreeNode * * Copyright (c) 2015, 2016 Nozomi `James' Ytow * All rights reserved. */ /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nomencurator.gui.swing.table; import java.awt.Color; import java.awt.Component; import javax.swing.JTable; import javax.swing.table.DefaultTableCellRenderer; import org.nomencurator.model.NameUsage; import org.nomencurator.model.Rank; import org.nomencurator.model.gbif.NubNameUsage; import org.nomencurator.model.util.NameUsageAttribute; import org.nomencurator.gui.swing.DefaultColors; import org.nomencurator.gui.swing.tree.NameTreeNode; /** * {@code NameTableCellRenderer} is a {@code TableCellRenderer} to render a {@code NameTree} * * @version 20 Aug. 2016 * @author Nozomi `James' Ytow */ public class NameTableCellRenderer extends DefaultTableCellRenderer { private static final long serialVersionUID = -7370663090747254139L; /** * Attribute of {@code NameUsage} to be rendered. */ protected NameUsageAttribute attribute; /** * Foreground color of disabled components. */ protected Color disabledForeground; /** * Foreground color of enabled components. */ protected Color enabledForeground; /** * Constructs a cell renderer for given {@code attribute} of {@code NameUsage}. * * @param attribute to be rendered. */ public NameTableCellRenderer(NameUsageAttribute attribute) { super(); this.attribute = attribute; setDefaultTextColor(); } /** * Sets default foreground color of enabled and disabled elements. */ protected void setDefaultTextColor() { disabledForeground = DefaultColors.getDisabledForeground(DefaultColors.ComponentName.LABEL); enabledForeground = DefaultColors.getForeground(DefaultColors.ComponentName.LABEL); } @Override public void updateUI() { super.updateUI(); setDefaultTextColor(); } /** * Returns the cell rendering {@code Component} with foreground color depeding on * synonym status of the {@code NameUsage} to be rendered. * * @param table the {@code JTable} * @param value the value to assign to the cell at {@code [row, column]}, {@code NameUsage} expected * @param isSelected true if the cell is selected * @param hasFocus true if the chell has focus * @param row the row of the cell to render * @param column the column of the cell to render * @return the table cell renderer */ @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { NameUsage<?> nameUsage = null; boolean isSynonym = false; if(value != null) { if(value instanceof NameUsage) { nameUsage = (NameUsage)value; } else if(value instanceof NameTreeNode) { nameUsage = (NameUsage<?>)((NameTreeNode)value).getUserObject(); } } if(nameUsage != null) { isSynonym = nameUsage.isSynonym(); /* switch (attribute) { case RANK: value = nameUsage.getRank(); break; case NAME: value = nameUsage.getLiteral(); break; case AUTHORITY: value = nameUsage.getAuthority(); if(value == null) { nameUsage = nameUsage.getSensu(); if(nameUsage != null) value = nameUsage.getViewName(); } break; case YEAR: value = nameUsage.getAuthorityYear(); break; case SENSU: value = nameUsage.getViewName(); break; case DATASET: if (nameUsage instanceof NubNameUsage) { value = ((NubNameUsage)nameUsage).getDatasetTitle(); } break; case DESCENDANTS_COUNT: value = nameUsage.getDescendantCount(); break; default: // nothing to do break; } if (value != null) value = value.toString(); */ value = getValueAt(nameUsage, attribute); } Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); if (nameUsage != null) { if (isSynonym) { component.setForeground(disabledForeground); } else { component.setForeground(enabledForeground); } } return component; } public static Object getValueAt(NameUsage<?> nameUsage, NameUsageAttribute attribute) { Object value = null; if (nameUsage != null) { switch (attribute) { case RANK: value = nameUsage.getRank(); break; case NAME: value = nameUsage.getLiteral(); break; case AUTHORITY: value = nameUsage.getAuthority(); if(value == null) { nameUsage = nameUsage.getSensu(); if(nameUsage != null) value = nameUsage.getViewName(); } break; case YEAR: value = nameUsage.getAuthorityYear(); break; case SENSU: value = nameUsage.getViewName(); break; case DATASET: if (nameUsage instanceof NubNameUsage) { value = ((NubNameUsage)nameUsage).getDatasetTitle(); } break; case DESCENDANTS_COUNT: value = nameUsage.getDescendantCount(); break; default: // nothing to do break; } if (value != null) value = value.toString(); } return value; } static private NameUsageAttribute[] attributes = NameUsageAttribute.values(); public static Object getValueAt(NameUsage<?> nameUsage, int attribute) { return getValueAt(nameUsage, attributes[attribute]); } }
/** * Copyright (c) 2011, University of Konstanz, Distributed Systems Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of Konstanz nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.treetank.data.delegates; import static com.google.common.base.Objects.toStringHelper; import static org.treetank.data.IConstants.NULL_NODE; import java.io.DataOutput; import java.io.IOException; import org.treetank.api.IData; import org.treetank.data.interfaces.ITreeStructData; import org.treetank.exception.TTIOException; import com.google.common.hash.Funnel; import com.google.common.hash.PrimitiveSink; /** * Delegate method for all nodes building up the structure. That means that all * nodes representing trees in Treetank are represented by an instance of the * interface {@link ITreeStructData} namely containing the position of all related * siblings, the first-child and all nodes defined by the {@link NodeDelegate} as well. * * @author Sebastian Graf, University of Konstanz * */ public class StructNodeDelegate implements ITreeStructData { /** * Enum for StructValueFunnel. * * @author Sebastian Graf, University of Konstanz * */ enum StructNodeDelegateFunnel implements Funnel<org.treetank.api.IData> { INSTANCE; public void funnel(org.treetank.api.IData data, PrimitiveSink into) { final ITreeStructData from = (ITreeStructData)data; into.putLong(from.getFirstChildKey()); into.putLong(from.getRightSiblingKey()); into.putLong(from.getLeftSiblingKey()); into.putLong(from.getChildCount()); } } /** Pointer to the first child of the current node. */ private long mFirstChild; /** Pointer to the right sibling of the current node. */ private long mRightSibling; /** Pointer to the left sibling of the current node. */ private long mLeftSibling; /** Pointer to the number of children. */ private long mChildCount; /** Delegate for common node information. */ private final NodeDelegate mDelegate; /** * Constructor. * * @param pDel * to be set * @param pFirstChild * to be set * @param pRightSib * to be set * @param pLeftSib * to be set * @param pChildCount * to be set */ public StructNodeDelegate(final NodeDelegate pDel, final long pFirstChild, final long pRightSib, final long pLeftSib, final long pChildCount) { mDelegate = pDel; mFirstChild = pFirstChild; mRightSibling = pRightSib; mLeftSibling = pLeftSib; mChildCount = pChildCount; } /** * {@inheritDoc} */ @Override public int getKind() { return mDelegate.getKind(); } /** * {@inheritDoc} */ @Override public boolean hasFirstChild() { return mFirstChild != NULL_NODE; } /** * {@inheritDoc} */ @Override public boolean hasLeftSibling() { return mLeftSibling != NULL_NODE; } /** * {@inheritDoc} */ @Override public boolean hasRightSibling() { return mRightSibling != NULL_NODE; } /** * {@inheritDoc} */ @Override public long getChildCount() { return mChildCount; } /** * {@inheritDoc} */ @Override public long getFirstChildKey() { return mFirstChild; } /** * {@inheritDoc} */ @Override public long getLeftSiblingKey() { return mLeftSibling; } /** * {@inheritDoc} */ @Override public long getRightSiblingKey() { return mRightSibling; } /** * {@inheritDoc} */ @Override public void setRightSiblingKey(final long pKey) { mRightSibling = pKey; } /** * {@inheritDoc} */ @Override public void setLeftSiblingKey(final long pKey) { mLeftSibling = pKey; } /** * {@inheritDoc} */ @Override public void setFirstChildKey(final long pKey) { mFirstChild = pKey; } /** * {@inheritDoc} */ @Override public void decrementChildCount() { mChildCount--; } /** * {@inheritDoc} */ @Override public void incrementChildCount() { mChildCount++; } /** * Delegate method for getNodeKey. * * @return the key of the node * @see org.treetank.data.delegates.NodeDelegate#getDataKey() */ public long getDataKey() { return mDelegate.getDataKey(); } /** * Delegate method for getParentKey. * * @return the key of the parent * @see org.treetank.data.delegates.NodeDelegate#getParentKey() */ public long getParentKey() { return mDelegate.getParentKey(); } /** * Delegate method for setParentKey. * * @param pParentKey * @see org.treetank.data.delegates.NodeDelegate#setParentKey(long) */ public void setParentKey(long pParentKey) { mDelegate.setParentKey(pParentKey); } /** * Delegate method for getHash. * * @return the hash * @see org.treetank.data.delegates.NodeDelegate#getHash() */ public long getHash() { return mDelegate.getHash(); } /** * Delegate method for setHash. * * @param pHash * @see org.treetank.data.delegates.NodeDelegate#setHash(long) */ public void setHash(long pHash) { mDelegate.setHash(pHash); } /** * Delegate method for getTypeKey. * * @return the type of the node * @see org.treetank.data.delegates.NodeDelegate#getTypeKey() */ public int getTypeKey() { return mDelegate.getTypeKey(); } /** * Delegate method for setTypeKey. * * @param pTypeKey * @see org.treetank.data.delegates.NodeDelegate#setTypeKey(int) */ public void setTypeKey(int pTypeKey) { mDelegate.setTypeKey(pTypeKey); } /** * Delegate method for hasParent. * * @return if the node has a parent * @see org.treetank.data.delegates.NodeDelegate#hasParent() */ public boolean hasParent() { return mDelegate.hasParent(); } /** * {@inheritDoc} */ @Override public String toString() { return toStringHelper(this).add("mFirstChild", mFirstChild).add("mRightSibling", mRightSibling).add( "mLeftSibling", mLeftSibling).add("mChildCount", mChildCount).add("mDelegate", mDelegate) .toString(); } /** * Serializing to given dataput * * @param pOutput * to serialize to * @throws TTIOException */ public void serialize(final DataOutput pOutput) throws TTIOException { try { pOutput.writeLong(getFirstChildKey()); pOutput.writeLong(getRightSiblingKey()); pOutput.writeLong(getLeftSiblingKey()); pOutput.writeLong(getChildCount()); } catch (final IOException exc) { throw new TTIOException(exc); } } @Override public Funnel<IData> getFunnel() { return StructNodeDelegateFunnel.INSTANCE; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int)(mChildCount ^ (mChildCount >>> 32)); result = prime * result + ((mDelegate == null) ? 0 : mDelegate.hashCode()); result = prime * result + (int)(mFirstChild ^ (mFirstChild >>> 32)); result = prime * result + (int)(mLeftSibling ^ (mLeftSibling >>> 32)); result = prime * result + (int)(mRightSibling ^ (mRightSibling >>> 32)); return result; } @Override public boolean equals(Object obj) { return this.hashCode() == obj.hashCode(); } }
// Copyright 2006 Konrad Twardowski // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.makagiga.todo; import static org.makagiga.commons.UI.i18n; import java.awt.Color; import java.awt.Component; import java.io.File; import java.io.InputStream; import java.io.OutputStream; import java.text.ParseException; import java.util.LinkedList; import java.util.List; import java.util.Map; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.makagiga.commons.BooleanProperty; import org.makagiga.commons.ColorProperty; import org.makagiga.commons.FS; import org.makagiga.commons.MCalendar; import org.makagiga.commons.MDate; import org.makagiga.commons.MLogger; import org.makagiga.commons.TK; import org.makagiga.commons.UI; import org.makagiga.commons.html.HTMLBuilder; import org.makagiga.commons.swing.AbstractListTableModel; import org.makagiga.commons.swing.MMessage; import org.makagiga.commons.swing.MStatusBar; import org.makagiga.commons.xml.SimpleXMLReader; import org.makagiga.commons.xml.XMLBuilder; /** * @since 2.0 */ public class TaskModel extends AbstractListTableModel<Task> { // private private boolean canAddToRecentlyCompleted = true; // package boolean repeatRulesEnabled; private static final LinkedList<Task> recentlyCompleted = new LinkedList<>(); // public public TaskModel() { super(Column.getColumnInfo()); } /** * @since 4.12 */ public static List<Task> getRecentlyCompleted() { return recentlyCompleted; } public void loadFromXML(final File file, final boolean newFile) throws Exception { try (FS.BufferedFileInput input = new FS.BufferedFileInput(file)) { loadFromXML(input, newFile); } } public void loadFromXML(final InputStream input, final boolean newFile) throws Exception { // don't parse empty file if (newFile) { setEventsEnabled(true); return; } try { setEventsEnabled(false); clear(); SimpleXMLReader reader = new SimpleXMLReader() { private Task task; @Override protected void onEnd(final String name) { if (name.equals("item")) TaskModel.this.addRow(task); } @Override protected void onStart(final String _name) { switch (_name) { case "item": task = new Task(); task.setComplete(getIntegerAttribute("complete", 0)); String p = getStringAttribute("priority"); task.setPriority((p == null) ? Priority.DEFAULT : Priority.of(p)); break; case "category": task.setCategory(getValue("")); break; case "completedatetime": task.setCompleteDateTime(Task.parseDateTime(getStringAttribute("value"))); break; case "datetime": task.setDateTime(Task.parseDateTime(getStringAttribute("value"))); break; case "startdatetime": task.setStartDateTime(Task.parseDateTime(getStringAttribute("value"))); break; case "summary": task.setSummary(getValue("")); break; case "property": { String name = getStringAttribute("name"); String type = getStringAttribute("type"); String value = getStringAttribute("value"); if (TK.isEmpty(name) || TK.isEmpty(type) || (value == null)) { MLogger.error("todo", "property: Missing \"name\", \"type\" or \"value\" attribute"); } else { Object o = null; switch (type) { case "color": try { o = ColorProperty.parseColor(value); } catch (ParseException exception) { MLogger.error("todo", "property: Invalid color value: %s=%s", name, value); } break; case "boolean": try { o = BooleanProperty.parseBoolean(value); } catch (ParseException exception) { MLogger.error("todo", "property: Invalid boolean value: %s=%s", name, value); } break; case "integer": try { o = Integer.valueOf(value); } catch (NumberFormatException exception) { MLogger.error("todo", "property: Invalid integer value: %s=%s", name, value); } break; case "string": o = value; break; default: MLogger.error("todo", "property: Unknown \"%s\" property type: %s", name, type); } if (o != null) task.setProperty(name, o); } } break; } } }; reader.read(input); } finally { UI.invokeLater(new Runnable() { @Override public void run() { fireTableDataChanged(); } } ); setEventsEnabled(true); } } public void saveToXML(final File file) throws Exception { try (FS.BufferedFileOutput output = new FS.BufferedFileOutput(file)) { saveToXML(output); } } public void saveToXML(final OutputStream output) throws Exception { XMLBuilder builder = new XMLBuilder(); builder.beginTag( "todo", "version", 4 ); for (Task i : this) writeItemAsXML(builder, i); builder.endTag("todo"); builder.write(output, false); } // AbstractTableModel @Override @SuppressFBWarnings("URV_INHERITED_METHOD_WITH_RELATED_TYPES") public Object getValueAt(final int row, final int column) { if (isEmpty() || (row < 0) || (row > getRowCount() - 1)) return null; Task task = getRowAt(row); if (column == Column.DONE.ordinal()) return task.isDone(); if (column == Column.SUMMARY.ordinal()) return task.getSummary(); if (column == Column.PRIORITY.ordinal()) return task.getPriority(); if (column == Column.COMPLETE.ordinal()) return task.getComplete(); if (column == Column.DATE_TIME.ordinal()) return task.getDateTime(); if (column == Column.START_DATE_TIME.ordinal()) return task.getStartDateTime(); if (column == Column.COMPLETE_DATE_TIME.ordinal()) return task.getCompleteDateTime(); if (column == Column.CATEGORY.ordinal()) return task.getCategory(); if (column == Column.CIRCLE.ordinal()) return task; if (column == Column.DURATION.ordinal()) return task.getDuration(); return null; } /** * @since 4.4 */ public void setValueAt(final Object value, final int row, final Column column) { setValueAt(value, row, column.ordinal()); } @Override public void setValueAt(final Object value, final int row, final int column) { Task task = getRowAt(row); Task before = createCopyForUndo(task); if (column == Column.DONE.ordinal()) { task.setComplete(Boolean.TRUE.equals(value) ? 100 : 0); updateCompleteInfo(task); } else if (column == Column.SUMMARY.ordinal()) { task.setSummary((String)value); } else if (column == Column.PRIORITY.ordinal()) { task.setPriority((Priority)value); } else if (column == Column.COMPLETE.ordinal()) { task.setComplete((Integer)value); updateCompleteInfo(task); } else if (column == Column.DATE_TIME.ordinal()) { task.setDateTime((MDate)value); } else if (column == Column.START_DATE_TIME.ordinal()) { task.setStartDateTime((MDate)value); } else if (column == Column.COMPLETE_DATE_TIME.ordinal()) { task.setCompleteDateTime((MDate)value); } else if (column == Column.CATEGORY.ordinal()) { task.setCategory((String)value); } else if (column == Column.CIRCLE.ordinal()) { // unused } else if (column == Column.DURATION.ordinal()) { task.setDuration((int)value); } else return; if (!task.equals(before)) updateRowsAndUndo(before, task, row); } /** * @since 5.0 */ public void setCanAddToRecentlyCompleted(final boolean value) { canAddToRecentlyCompleted = value; } // protected @Override protected Task createCopyForUndo(final Task original) { return original.copy(); } // private private void updateCompleteInfo(final Task task) { // 1. set "completed date/time" if (task.getComplete() > 0) task.setCompleteDateTime(MDate.now()); else task.setCompleteDateTime(MDate.invalid()); // 2. add to recently completed if (task.isDone() && canAddToRecentlyCompleted) { synchronized (recentlyCompleted) { if (recentlyCompleted.size() == 10) { recentlyCompleted.removeLast(); // HACK: trigger size change detection in SummaryData recentlyCompleted.removeLast(); } recentlyCompleted.addFirst(task.copy()); } } // apply repeat rule if (task.isDone() && repeatRulesEnabled) { RepeatRule repeatRule = new RepeatRule(task); if (repeatRule.isValid()) { if (task.getDateTime().isValid()) { MCalendar taskCalendar = task.getDateTime().toCalendar(); MCalendar preview = taskCalendar.copy(); repeatRule.apply(preview); if (preview.equals(taskCalendar)) return; int options = MDate.FANCY_FORMAT_ABSOLUTE; if ( (repeatRule.getType() == RepeatRule.Type.HOURLY) || (repeatRule.getType() == RepeatRule.Type.MINUTELY) ) options |= MDate.FANCY_FORMAT_APPEND_TIME; String currentDate = task.getDateTime().fancyFormat(options); String setDate = preview.toDate().fancyFormat(options); if (!repeatRule.confirm) { if (repeatRule.apply(task)) task.setComplete(0); MStatusBar.info(currentDate + " -> " + setDate); return; } Component focus = UI.getFocusOwner(); if (new MMessage.Builder() .title(i18n("Repeat Rule")) .icon(RepeatRule.REPEAT_RULE_ACTION_INFO.getIcon()) .ok(RepeatRule.APPLY_REPEAT_RULE_ACTION_INFO) .cancel(RepeatRule.DO_NOT_REPEAT_ACTION_INFO) .text(i18n("Apply repeat rule and set task as incomplete?")) .list( new HTMLBuilder.TableColumn(), new HTMLBuilder.TableColumn(), i18n("Repeat Rule:"), repeatRule.toDisplayString(), i18n("Current Date/Time:"), currentDate, i18n("Set Next Date/Time:"), setDate ) .exec() ) { if (repeatRule.apply(task)) task.setComplete(0); } // restore old focus if (focus != null) { TaskTable table = UI.getAncestorOfClass(TaskTable.class, focus); if (table != null) table.requestFocusInWindow(); } } else { MStatusBar.error(i18n("Cannot apply repeat rule to task without date/time")); } } } } private void writeItemAsXML(final XMLBuilder builder, final Task task) { builder.beginTag( "item", "priority", task.getPriority().ordinal(), "complete", task.getComplete() ); // TODO: filter out illegal XML characters before write <http://www.w3.org/TR/REC-xml/#charsets> // TEST: echo -e "\b"|<copy> // API: TK.escapeXML with flags builder.doubleTag( "summary", builder.escape(task.getSummary()) ); builder.doubleTag( "category", builder.escape(task.getCategory()) ); writeItemDate(builder, task.getDateTime(), "datetime"); writeItemDate(builder, task.getStartDateTime(), "startdatetime"); writeItemDate(builder, task.getCompleteDateTime(), "completedatetime"); for (Map.Entry<String, Object> i : task.getProperties().entrySet()) { Object value = i.getValue(); if (value == null) continue; // for String name = i.getKey(); String type; String valueString; if (value instanceof Boolean) { type = "boolean"; valueString = value.toString(); } else if (value instanceof Color) { type = "color"; valueString = ColorProperty.toString((Color)value); } else if (value instanceof Integer) { type = "integer"; valueString = value.toString(); } else if (value instanceof String) { type = "string"; valueString = value.toString(); } else { MLogger.error("todo", "property: Unknown \"%s\" property type: %s", name, value.getClass()); continue; // for } builder.singleTag( "property", "name", name, "type", type, "value", valueString ); } builder.endTag("item"); } private void writeItemDate(final XMLBuilder builder, final MDate value, final String attribute) { builder.singleTag( attribute, "value", value.isValid() ? value.formatRFC3339() : 0 ); } // package void updateRowsAndUndo(final Task before, final Task after, final int row) { fireTableRowsUpdated(row, row); fireUndoableEditHappened(new ChangeUndo(before, createCopyForUndo(after), row)); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.scheduler.mesos; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.inject.Inject; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.protobuf.ByteString; import org.apache.aurora.Protobufs; import org.apache.aurora.codec.ThriftBinaryCodec; import org.apache.aurora.common.quantity.Amount; import org.apache.aurora.common.quantity.Data; import org.apache.aurora.scheduler.ResourceSlot; import org.apache.aurora.scheduler.TierManager; import org.apache.aurora.scheduler.base.CommandUtil; import org.apache.aurora.scheduler.base.JobKeys; import org.apache.aurora.scheduler.base.SchedulerException; import org.apache.aurora.scheduler.base.Tasks; import org.apache.aurora.scheduler.storage.entities.IAssignedTask; import org.apache.aurora.scheduler.storage.entities.IDockerContainer; import org.apache.aurora.scheduler.storage.entities.IDockerParameter; import org.apache.aurora.scheduler.storage.entities.IJobKey; import org.apache.aurora.scheduler.storage.entities.ITaskConfig; import org.apache.mesos.Protos; import org.apache.mesos.Protos.CommandInfo; import org.apache.mesos.Protos.ContainerInfo; import org.apache.mesos.Protos.ExecutorID; import org.apache.mesos.Protos.ExecutorInfo; import org.apache.mesos.Protos.Resource; import org.apache.mesos.Protos.SlaveID; import org.apache.mesos.Protos.TaskID; import org.apache.mesos.Protos.TaskInfo; import org.apache.mesos.Protos.Volume; import static java.util.Objects.requireNonNull; /** * A factory to create mesos task objects. */ public interface MesosTaskFactory { /** * Creates a mesos task object. * * @param task Assigned task to translate into a task object. * @param slaveId Id of the slave the task is being assigned to. * @return A new task. * @throws SchedulerException If the task could not be encoded. */ TaskInfo createFrom(IAssignedTask task, SlaveID slaveId) throws SchedulerException; // TODO(wfarner): Move this class to its own file to reduce visibility to package private. class MesosTaskFactoryImpl implements MesosTaskFactory { private static final Logger LOG = Logger.getLogger(MesosTaskFactoryImpl.class.getName()); private static final String EXECUTOR_PREFIX = "thermos-"; /** * Name to associate with task executors. */ @VisibleForTesting static final String EXECUTOR_NAME = "aurora.task"; private final ExecutorSettings executorSettings; private final TierManager tierManager; @Inject MesosTaskFactoryImpl(ExecutorSettings executorSettings, TierManager tierManager) { this.executorSettings = requireNonNull(executorSettings); this.tierManager = requireNonNull(tierManager); } @VisibleForTesting static ExecutorID getExecutorId(String taskId) { return ExecutorID.newBuilder().setValue(EXECUTOR_PREFIX + taskId).build(); } private static String getJobSourceName(IJobKey jobkey) { return String.format("%s.%s.%s", jobkey.getRole(), jobkey.getEnvironment(), jobkey.getName()); } private static String getJobSourceName(ITaskConfig task) { return getJobSourceName(task.getJob()); } @VisibleForTesting static String getInstanceSourceName(ITaskConfig task, int instanceId) { return String.format("%s.%s", getJobSourceName(task), instanceId); } /** * Resources to 'allocate' to the executor in the ExecutorInfo. We do this since mesos * disallows an executor with zero resources, but the tasks end up in the same container * anyway. */ @VisibleForTesting static final ResourceSlot RESOURCES_EPSILON = new ResourceSlot( 0.01, Amount.of(32L, Data.MB), Amount.of(1L, Data.MB), 0); @Override public TaskInfo createFrom(IAssignedTask task, SlaveID slaveId) throws SchedulerException { requireNonNull(task); requireNonNull(slaveId); byte[] taskInBytes; try { taskInBytes = ThriftBinaryCodec.encode(task.newBuilder()); } catch (ThriftBinaryCodec.CodingException e) { LOG.log(Level.SEVERE, "Unable to serialize task.", e); throw new SchedulerException("Internal error.", e); } ITaskConfig config = task.getTask(); ResourceSlot resourceSlot = ResourceSlot.from(config) .withOverhead(executorSettings) .subtract(RESOURCES_EPSILON); // TODO(wfarner): Re-evaluate if/why we need to continue handling unset assignedPorts field. List<Resource> resources = resourceSlot.toResourceList( task.isSetAssignedPorts() ? ImmutableSet.copyOf(task.getAssignedPorts().values()) : ImmutableSet.of(), tierManager.getTier(task.getTask())); if (LOG.isLoggable(Level.FINE)) { LOG.fine("Setting task resources to " + Iterables.transform(resources, Protobufs::toString)); } TaskInfo.Builder taskBuilder = TaskInfo.newBuilder() .setName(JobKeys.canonicalString(Tasks.getJob(task))) .setTaskId(TaskID.newBuilder().setValue(task.getTaskId())) .setSlaveId(slaveId) .addAllResources(resources) .setData(ByteString.copyFrom(taskInBytes)); if (config.getContainer().isSetMesos()) { configureTaskForNoContainer(task, config, taskBuilder); } else if (config.getContainer().isSetDocker()) { configureTaskForDockerContainer(task, config, taskBuilder); } else { throw new SchedulerException("Task had no supported container set."); } return taskBuilder.build(); } private void configureTaskForNoContainer( IAssignedTask task, ITaskConfig config, TaskInfo.Builder taskBuilder) { CommandInfo commandInfo = CommandUtil.create( executorSettings.getExecutorPath(), executorSettings.getExecutorResources(), "./", executorSettings.getExecutorFlags()).build(); ExecutorInfo.Builder executorBuilder = configureTaskForExecutor(task, config, commandInfo); taskBuilder.setExecutor(executorBuilder.build()); } private void configureTaskForDockerContainer( IAssignedTask task, ITaskConfig taskConfig, TaskInfo.Builder taskBuilder) { IDockerContainer config = taskConfig.getContainer().getDocker(); Iterable<Protos.Parameter> parameters = Iterables.transform(config.getParameters(), new Function<IDockerParameter, Protos.Parameter>() { @Override public Protos.Parameter apply(IDockerParameter item) { return Protos.Parameter.newBuilder().setKey(item.getName()) .setValue(item.getValue()).build(); } }); ContainerInfo.DockerInfo.Builder dockerBuilder = ContainerInfo.DockerInfo.newBuilder() .setImage(config.getImage()).addAllParameters(parameters); ContainerInfo.Builder containerBuilder = ContainerInfo.newBuilder() .setType(ContainerInfo.Type.DOCKER) .setDocker(dockerBuilder.build()); configureContainerVolumes(containerBuilder); // TODO(SteveNiemitz): Allow users to specify an executor per container type. CommandInfo.Builder commandInfoBuilder = CommandUtil.create( executorSettings.getExecutorPath(), executorSettings.getExecutorResources(), "$MESOS_SANDBOX/", executorSettings.getExecutorFlags()); ExecutorInfo.Builder execBuilder = configureTaskForExecutor(task, taskConfig, commandInfoBuilder.build()) .setContainer(containerBuilder.build()); taskBuilder.setExecutor(execBuilder.build()); } private ExecutorInfo.Builder configureTaskForExecutor( IAssignedTask task, ITaskConfig config, CommandInfo commandInfo) { return ExecutorInfo.newBuilder() .setCommand(commandInfo) .setExecutorId(getExecutorId(task.getTaskId())) .setName(EXECUTOR_NAME) .setSource(getInstanceSourceName(config, task.getInstanceId())) .addAllResources(RESOURCES_EPSILON.toResourceList(tierManager.getTier(config))); } private void configureContainerVolumes(ContainerInfo.Builder containerBuilder) { containerBuilder.addVolumes( Volume.newBuilder() .setContainerPath(executorSettings.getThermosObserverRoot()) .setHostPath(executorSettings.getThermosObserverRoot()) .setMode(Volume.Mode.RW) .build()); for (org.apache.aurora.gen.Volume v : executorSettings.getGlobalContainerMounts()) { // This has already been validated to be correct in ExecutorSettings(). containerBuilder.addVolumes( Volume.newBuilder() .setHostPath(v.getHostPath()) .setContainerPath(v.getContainerPath()) .setMode(Volume.Mode.valueOf(v.getMode().getValue())) .build()); } } } }
package hudson.plugins.promoted_builds; import antlr.ANTLRException; import hudson.BulkChange; import hudson.Extension; import hudson.Util; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import hudson.model.AutoCompletionCandidates; import hudson.model.ParameterValue; import hudson.model.Cause; import hudson.model.Cause.UserCause; import hudson.model.DependencyGraph; import hudson.model.Describable; import hudson.model.Descriptor; import hudson.model.Descriptor.FormException; import hudson.model.Failure; import hudson.model.FreeStyleProject; import hudson.model.Hudson; import hudson.model.ItemGroup; import hudson.model.JDK; import hudson.model.Job; import hudson.model.Label; import hudson.model.ParametersAction; import hudson.model.PermalinkProjectAction.Permalink; import hudson.model.Queue.Item; import hudson.model.Run; import hudson.model.Saveable; import hudson.model.labels.LabelAtom; import hudson.model.labels.LabelExpression; import hudson.plugins.promoted_builds.conditions.ManualCondition.ManualApproval; import hudson.security.ACL; import hudson.tasks.BuildStep; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.util.DescribableList; import hudson.util.FormValidation; import jenkins.model.Jenkins; import jenkins.util.TimeDuration; import net.sf.json.JSONObject; import org.kohsuke.stapler.HttpResponses; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import javax.servlet.ServletException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.concurrent.Future; import java.util.logging.Logger; import java.util.regex.Pattern; /** * A dummy {@link AbstractProject} to carry out promotion operations. * * @author Kohsuke Kawaguchi */ public final class PromotionProcess extends AbstractProject<PromotionProcess,Promotion> implements Saveable, Describable<PromotionProcess> { /** * {@link PromotionCondition}s. All have to be met for a build to be promoted. */ public final DescribableList<PromotionCondition,PromotionConditionDescriptor> conditions = new DescribableList<PromotionCondition, PromotionConditionDescriptor>(this); /** * The icon that represents this promotion process. This is the name of * the GIF icon that can be found in ${rootURL}/plugin/promoted-builds/icons/16x16/ * and ${rootURL}/plugin/promoted-builds/icons/32x32/, e.g. <code>"star-gold"</code>. */ public String icon; /** * The label that promotion process can be run on. */ public String assignedLabel; private List<BuildStep> buildSteps = new ArrayList<BuildStep>(); /*package*/ PromotionProcess(JobPropertyImpl property, String name) { super(property, name); } /*package*/ PromotionProcess(ItemGroup parent, String name) { super(parent, name); } /** * Creates unconnected {@link PromotionProcess} instance from the JSON configuration. * This is mostly only useful for capturing its configuration in XML format. */ public static PromotionProcess fromJson(StaplerRequest req, JSONObject o) throws FormException, IOException { String name = o.getString("name"); try { Hudson.checkGoodName(name); } catch (Failure f) { throw new Descriptor.FormException(f.getMessage(), name); } PromotionProcess p = new PromotionProcess(null,name); BulkChange bc = new BulkChange(p); try { p.configure(req, o); // apply configuration. prevent it from trying to save to disk while we do this } finally { bc.abort(); } return p; } @Override public void doSetName(String name) { super.doSetName(name); } /*package*/ void configure(StaplerRequest req, JSONObject c) throws Descriptor.FormException, IOException { // apply configuration conditions.rebuild(req,c.optJSONObject("conditions"), PromotionCondition.all()); buildSteps = (List)Descriptor.newInstancesFromHeteroList( req, c, "buildStep", (List) PromotionProcess.getAll()); icon = c.getString("icon"); if (c.optBoolean("hasAssignedLabel")) { assignedLabel = Util.fixEmptyAndTrim(c.optString("assignedLabelString")); } else { assignedLabel = null; } save(); } /** * Returns the root project value. * * @return the root project value. */ @Override public AbstractProject getRootProject() { return getParent().getOwner().getRootProject(); } @Override public JobPropertyImpl getParent() { return (JobPropertyImpl)super.getParent(); } /** * Gets the owner {@link AbstractProject} that configured {@link JobPropertyImpl} as * a job property. */ public AbstractProject<?,?> getOwner() { return getParent().getOwner(); } @Override public ACL getACL() { return getOwner().getACL(); } /** * Get the promotion condition by referencing it fully qualified class name */ public PromotionCondition getPromotionCondition(String promotionClassName) { for (PromotionCondition condition : conditions) { if (condition.getClass().getName().equals(promotionClassName)) { return condition; } } return null; } public DescribableList<Publisher, Descriptor<Publisher>> getPublishersList() { // TODO: extract from the buildsSteps field? Or should I separate builders and publishers? return new DescribableList<Publisher,Descriptor<Publisher>>(this); } protected Class<Promotion> getBuildClass() { return Promotion.class; } public List<BuildStep> getBuildSteps() { return buildSteps; } /** * Gets the textual representation of the assigned label as it was entered by the user. */ @Override public String getAssignedLabelString() { if (assignedLabel == null) return null; try { LabelExpression.parseExpression(assignedLabel); return assignedLabel; } catch (ANTLRException e) { // must be old label or host name that includes whitespace or other unsafe chars return LabelAtom.escape(assignedLabel); } } @Override public Label getAssignedLabel() { // Really would like to run on the exact node that the promoted build ran on, // not just the same label.. but at least this works if job is tied to one node: if (assignedLabel == null) return getOwner().getAssignedLabel(); return Hudson.getInstance().getLabel(assignedLabel); } @Override public JDK getJDK() { return getOwner().getJDK(); } /** * Gets the customWorkspace of the owner project. * * Support for FreeStyleProject only. * @return customWorkspace */ public String getCustomWorkspace() { AbstractProject<?, ?> p = getOwner(); if (p instanceof FreeStyleProject) return ((FreeStyleProject) p).getCustomWorkspace(); return null; } /** * Get the icon name, without the extension. It will always return a non null * and non empty string, as <code>"star-gold"</code> is used for compatibility * for older promotions configurations. * * @return the icon name */ public String getIcon() { return getIcon(icon); } /** * Handle compatibility with pre-1.8 configs. * * @param sIcon * the name of the icon used by this promotion; if null or empty, * we return the gold icon for compatibility with previous releases * @return the icon file name for this promotion */ private static String getIcon(String sIcon) { if ((sIcon == null) || sIcon.equals("")) return "star-gold"; else return sIcon; } /** * Get the badges of conditions that were passed for this promotion for the build */ public List<PromotionBadge> getMetQualifications(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if (b != null) badges.add(b); } return badges; } /** * Get the conditions that have not been met for this promotion for the build */ public List<PromotionCondition> getUnmetConditions(AbstractBuild<?,?> build) { List<PromotionCondition> unmetConditions = new ArrayList<PromotionCondition>(); for (PromotionCondition cond : conditions) { if (cond.isMet(this, build) == null) unmetConditions.add(cond); } return unmetConditions; } /** * Checks if all the conditions to promote a build is met. * * @return * null if promotion conditions are not met. * otherwise returns a list of badges that record how the promotion happened. */ public Status isMet(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if(b==null) return null; badges.add(b); } return new Status(this,badges); } /** * @deprecated * Use {@link #considerPromotion2(AbstractBuild)} */ public boolean considerPromotion(AbstractBuild<?,?> build) throws IOException { return considerPromotion2(build)!=null; } /** * Checks if the build is promotable, and if so, promote it. * * @return * null if the build was not promoted, otherwise Future that kicks in when the build is completed. * @throws IOException */ public Future<Promotion> considerPromotion2(AbstractBuild<?, ?> build) throws IOException { LOGGER.fine("Considering the promotion of "+build+" via "+getName()+" without parmeters"); // If the build has manual approvals, use the parameters from it List<ParameterValue> params = new ArrayList<ParameterValue>(); List<ManualApproval> approvals = build.getActions(ManualApproval.class); for (ManualApproval approval : approvals) { if (approval.name.equals(getName())) { LOGGER.fine("Getting parameters from existing manual promotion"); params = approval.badge.getParameterValues(); LOGGER.finer("Using paramters: "+params.toString()); } } return considerPromotion2(build, params); } public Future<Promotion> considerPromotion2(AbstractBuild<?,?> build, List<ParameterValue> params) throws IOException { if (!isActive()) return null; // not active PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // if it's already promoted, no need to do anything. if(a!=null && a.contains(this)) return null; LOGGER.fine("Considering the promotion of "+build+" via "+getName()+" with parameters"); Status qualification = isMet(build); if(qualification==null) return null; // not this time LOGGER.fine("Promotion condition of "+build+" is met: "+qualification); Future<Promotion> f = promote2(build, new UserCause(), qualification, params); // TODO: define promotion cause if (f==null) LOGGER.warning(build+" qualifies for a promotion but the queueing failed."); return f; } public void promote(AbstractBuild<?,?> build, Cause cause, PromotionBadge... badges) throws IOException { promote2(build,cause,new Status(this,Arrays.asList(badges))); } /** * @deprecated * Use {@link #promote2(AbstractBuild, Cause, Status)} */ public void promote(AbstractBuild<?,?> build, Cause cause, Status qualification) throws IOException { promote2(build,cause,qualification); } /** * Promote the given build by using the given qualification. * * @param cause * Why the build is promoted? * @return * Future to track the completion of the promotion. */ public Future<Promotion> promote2(AbstractBuild<?,?> build, Cause cause, Status qualification) throws IOException { return promote2(build, cause, qualification, null); } public Future<Promotion> promote2(AbstractBuild<?,?> build, Cause cause, Status qualification, List<ParameterValue> params) throws IOException { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // build is qualified for a promotion. if(a!=null) { a.add(qualification); } else { build.addAction(new PromotedBuildAction(build,qualification)); build.save(); } // schedule promotion activity. return scheduleBuild2(build,cause, params); } /** * @deprecated * You need to be using {@link #scheduleBuild(AbstractBuild)} */ public boolean scheduleBuild() { return super.scheduleBuild(); } public boolean scheduleBuild(AbstractBuild<?,?> build) { return scheduleBuild(build,new UserCause()); } /** * @deprecated * Use {@link #scheduleBuild2(AbstractBuild, Cause)} */ public boolean scheduleBuild(AbstractBuild<?,?> build, Cause cause) { return scheduleBuild2(build,cause)!=null; } public Future<Promotion> scheduleBuild2(AbstractBuild<?,?> build, Cause cause, List<ParameterValue> params) { assert build.getProject()==getOwner(); List<Action> actions = new ArrayList<Action>(); Promotion.buildParametersAction(actions, build, params); actions.add(new PromotionTargetAction(build)); // remember what build we are promoting return super.scheduleBuild2(0, cause, actions.toArray(new Action[actions.size()])); } @Override public void doBuild(StaplerRequest req, StaplerResponse rsp, @QueryParameter TimeDuration delay) throws IOException, ServletException { throw HttpResponses.error(404, "Promotion processes may not be built directly"); } public Future<Promotion> scheduleBuild2(AbstractBuild<?,?> build, Cause cause) { return scheduleBuild2(build, cause, null); } public boolean isInQueue(AbstractBuild<?,?> build) { for (Item item : Hudson.getInstance().getQueue().getItems(this)) if (item.getAction(PromotionTargetAction.class).resolve(this)==build) return true; return false; } // // these are dummy implementations to implement abstract methods. // need to think about what the implications are. // public boolean isFingerprintConfigured() { throw new UnsupportedOperationException(); } protected void buildDependencyGraph(DependencyGraph graph) { throw new UnsupportedOperationException(); } public static List<Descriptor<? extends BuildStep>> getAll() { List<Descriptor<? extends BuildStep>> list = new ArrayList<Descriptor<? extends BuildStep>>(); addTo(Builder.all(), list); addTo(Publisher.all(), list); return list; } private static void addTo(List<? extends Descriptor<? extends BuildStep>> source, List<Descriptor<? extends BuildStep>> list) { for (Descriptor<? extends BuildStep> d : source) { if (d instanceof BuildStepDescriptor) { BuildStepDescriptor bsd = (BuildStepDescriptor) d; if(bsd.isApplicable(PromotionProcess.class)) list.add(d); } } } public Permalink asPermalink() { return new Permalink() { @Override public String getDisplayName() { return Messages.PromotionProcess_PermalinkDisplayName(PromotionProcess.this.getDisplayName()); } @Override public String getId() { return PromotionProcess.this.getName(); } @Override public Run<?, ?> resolve(Job<?, ?> job) { String id = getId(); for( Run<?,?> build : job.getBuilds() ) { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); if(a!=null && a.contains(id)) return build; } return null; } }; } public DescriptorImpl getDescriptor() { return (DescriptorImpl)Jenkins.getInstance().getDescriptorOrDie(getClass()); } @Override public String getShortUrl() { // Must be overridden since JobPropertyImpl.getUrlChildPrefix is "" not "process" as you might expect (also see e50f0f5 in 1.519) return "process/" + Util.rawEncode(getName()) + '/'; } public boolean isActive() { return !isDisabled(); } @Extension public static class DescriptorImpl extends Descriptor<PromotionProcess> { @Override public String getDisplayName() { return "Promotion Process"; } public FormValidation doCheckLabelString(@QueryParameter String value) { if (Util.fixEmpty(value)==null) return FormValidation.ok(); // nothing typed yet try { Label.parseExpression(value); } catch (ANTLRException e) { return FormValidation.error(e, Messages.JobPropertyImpl_LabelString_InvalidBooleanExpression(e.getMessage())); } // TODO: if there's an atom in the expression that is empty, report it if (Hudson.getInstance().getLabel(value).isEmpty()) return FormValidation.warning(Messages.JobPropertyImpl_LabelString_NoMatch()); return FormValidation.ok(); } public AutoCompletionCandidates doAutoCompleteAssignedLabelString(@QueryParameter String value) { AutoCompletionCandidates c = new AutoCompletionCandidates(); Set<Label> labels = Hudson.getInstance().getLabels(); List<String> queries = new AutoCompleteSeeder(value).getSeeds(); for (String term : queries) { for (Label l : labels) { if (l.getName().startsWith(term)) { c.add(l.getName()); } } } return c; } /** * Utility class for taking the current input value and computing a list * of potential terms to match against the list of defined labels. */ static class AutoCompleteSeeder { private String source; AutoCompleteSeeder(String source) { this.source = source; } List<String> getSeeds() { ArrayList<String> terms = new ArrayList(); boolean trailingQuote = source.endsWith("\""); boolean leadingQuote = source.startsWith("\""); boolean trailingSpace = source.endsWith(" "); if (trailingQuote || (trailingSpace && !leadingQuote)) { terms.add(""); } else { if (leadingQuote) { int quote = source.lastIndexOf('"'); if (quote == 0) { terms.add(source.substring(1)); } else { terms.add(""); } } else { int space = source.lastIndexOf(' '); if (space > -1) { terms.add(source.substring(space + 1)); } else { terms.add(source); } } } return terms; } } // exposed for Jelly public List<PromotionConditionDescriptor> getApplicableConditions(AbstractProject<?,?> p) { return p==null ? PromotionCondition.all() : PromotionCondition.getApplicableTriggers(p); } public List<PromotionConditionDescriptor> getApplicableConditions(Object context) { return PromotionCondition.all(); } // exposed for Jelly public List<Descriptor<? extends BuildStep>> getApplicableBuildSteps() { return PromotionProcess.getAll(); } // exposed for Jelly public final Class<PromotionProcess> promotionProcessType = PromotionProcess.class; public FormValidation doCheckName(@QueryParameter String name) { name = Util.fixEmptyAndTrim(name); if (name == null) { return FormValidation.error(Messages.JobPropertyImpl_ValidateRequired()); } try { Hudson.checkGoodName(name); } catch (Failure f) { return FormValidation.error(f.getMessage()); } return FormValidation.ok(); } } private static final Logger LOGGER = Logger.getLogger(PromotionProcess.class.getName()); public Future<Promotion> considerPromotion2(AbstractBuild<?, ?> build, ManualApproval approval) throws IOException { return considerPromotion2(build, approval.badge.getParameterValues()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.Locale; import static org.hamcrest.Matchers.equalTo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; public class IndicesClientIT extends ESRestHighLevelClientTestCase { @SuppressWarnings("unchecked") public void testCreateIndex() throws IOException { { // Create index String indexName = "plain_index"; assertFalse(indexExists(indexName)); CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); CreateIndexResponse createIndexResponse = execute(createIndexRequest, highLevelClient().indices()::createIndex, highLevelClient().indices()::createIndexAsync); assertTrue(createIndexResponse.isAcknowledged()); assertTrue(indexExists(indexName)); } { // Create index with mappings, aliases and settings String indexName = "rich_index"; assertFalse(indexExists(indexName)); CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); Alias alias = new Alias("alias_name"); alias.filter("{\"term\":{\"year\":2016}}"); alias.routing("1"); createIndexRequest.alias(alias); Settings.Builder settings = Settings.builder(); settings.put(SETTING_NUMBER_OF_REPLICAS, 2); createIndexRequest.settings(settings); XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); mappingBuilder.endObject().endObject().endObject(); createIndexRequest.mapping("type_name", mappingBuilder); CreateIndexResponse createIndexResponse = execute(createIndexRequest, highLevelClient().indices()::createIndex, highLevelClient().indices()::createIndexAsync); assertTrue(createIndexResponse.isAcknowledged()); Map<String, Object> indexMetaData = getIndexMetadata(indexName); Map<String, Object> settingsData = (Map) indexMetaData.get("settings"); Map<String, Object> indexSettings = (Map) settingsData.get("index"); assertEquals("2", indexSettings.get("number_of_replicas")); Map<String, Object> aliasesData = (Map) indexMetaData.get("aliases"); Map<String, Object> aliasData = (Map) aliasesData.get("alias_name"); assertEquals("1", aliasData.get("index_routing")); Map<String, Object> filter = (Map) aliasData.get("filter"); Map<String, Object> term = (Map) filter.get("term"); assertEquals(2016, term.get("year")); Map<String, Object> mappingsData = (Map) indexMetaData.get("mappings"); Map<String, Object> typeData = (Map) mappingsData.get("type_name"); Map<String, Object> properties = (Map) typeData.get("properties"); Map<String, Object> field = (Map) properties.get("field"); assertEquals("text", field.get("type")); } } public void testDeleteIndex() throws IOException { { // Delete index if exists String indexName = "test_index"; createIndex(indexName); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName); DeleteIndexResponse deleteIndexResponse = execute(deleteIndexRequest, highLevelClient().indices()::deleteIndex, highLevelClient().indices()::deleteIndexAsync); assertTrue(deleteIndexResponse.isAcknowledged()); assertFalse(indexExists(indexName)); } { // Return 404 if index doesn't exist String nonExistentIndex = "non_existent_index"; assertFalse(indexExists(nonExistentIndex)); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(nonExistentIndex); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(deleteIndexRequest, highLevelClient().indices()::deleteIndex, highLevelClient().indices()::deleteIndexAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); } } public void testOpenExistingIndex() throws IOException { String[] indices = randomIndices(1, 5); for (String index : indices) { createIndex(index); closeIndex(index); ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest("GET", index + "/_search")); assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); assertThat(exception.getMessage().contains(index), equalTo(true)); } OpenIndexRequest openIndexRequest = new OpenIndexRequest(indices); OpenIndexResponse openIndexResponse = execute(openIndexRequest, highLevelClient().indices()::openIndex, highLevelClient().indices()::openIndexAsync); assertTrue(openIndexResponse.isAcknowledged()); for (String index : indices) { Response response = client().performRequest("GET", index + "/_search"); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } } public void testOpenNonExistentIndex() throws IOException { String[] nonExistentIndices = randomIndices(1, 5); for (String nonExistentIndex : nonExistentIndices) { assertFalse(indexExists(nonExistentIndex)); } OpenIndexRequest openIndexRequest = new OpenIndexRequest(nonExistentIndices); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(openIndexRequest, highLevelClient().indices()::openIndex, highLevelClient().indices()::openIndexAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); OpenIndexRequest lenientOpenIndexRequest = new OpenIndexRequest(nonExistentIndices); lenientOpenIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); OpenIndexResponse lenientOpenIndexResponse = execute(lenientOpenIndexRequest, highLevelClient().indices()::openIndex, highLevelClient().indices()::openIndexAsync); assertThat(lenientOpenIndexResponse.isAcknowledged(), equalTo(true)); OpenIndexRequest strictOpenIndexRequest = new OpenIndexRequest(nonExistentIndices); strictOpenIndexRequest.indicesOptions(IndicesOptions.strictExpandOpen()); ElasticsearchException strictException = expectThrows(ElasticsearchException.class, () -> execute(openIndexRequest, highLevelClient().indices()::openIndex, highLevelClient().indices()::openIndexAsync)); assertEquals(RestStatus.NOT_FOUND, strictException.status()); } private static String[] randomIndices(int minIndicesNum, int maxIndicesNum) { int numIndices = randomIntBetween(minIndicesNum, maxIndicesNum); String[] indices = new String[numIndices]; for (int i = 0; i < numIndices; i++) { indices[i] = "index-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); } return indices; } private static void createIndex(String index) throws IOException { Response response = client().performRequest("PUT", index); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } private static boolean indexExists(String index) throws IOException { Response response = client().performRequest("HEAD", index); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } private static void closeIndex(String index) throws IOException { Response response = client().performRequest("POST", index + "/_close"); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } @SuppressWarnings("unchecked") private Map<String, Object> getIndexMetadata(String index) throws IOException { Response response = client().performRequest("GET", index); XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); Map<String, Object> responseEntity = XContentHelper.convertToMap(entityContentType.xContent(), response.getEntity().getContent(), false); Map<String, Object> indexMetaData = (Map) responseEntity.get(index); assertNotNull(indexMetaData); return indexMetaData; } }
// Copyright 2012 Google Inc. All Rights Reserved. package com.google.typography.font.tools.fontinfo; import com.google.typography.font.sfntly.Font; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import java.io.IOException; /** * This is the main class for the command-line version of the font info tool * * @author Han-Wen Yeh * */ public class FontInfoMain { private static final String PROGRAM_NAME = "java -jar fontinfo.jar"; public static void main(String[] args) { CommandOptions options = new CommandOptions(); JCommander commander = null; try { commander = new JCommander(options, args); } catch (ParameterException e) { System.out.println(e.getMessage()); commander = new JCommander(options, "--help"); } // Display help if (options.help) { commander.setProgramName(PROGRAM_NAME); commander.usage(); return; } // No font loaded if (options.files.size() != 1) { System.out.println( "Please specify a single font. Try '" + PROGRAM_NAME + " --help' for more information."); return; } // Default option if (!(options.metrics || options.general || options.cmap || options.chars || options.blocks || options.scripts || options.glyphs || options.all)) { options.general = true; } // Obtain file name String fileName = options.files.get(0); // Load font Font[] fonts = null; try { fonts = FontUtils.getFonts(fileName); } catch (IOException e) { System.out.println("Unable to load font " + fileName); return; } for (int i = 0; i < fonts.length; i++) { Font font = fonts[i]; if (fonts.length > 1 && !options.csv) { System.out.println("==== Information for font index " + i + " ====\n"); } // Print general information if (options.general || options.all) { if (options.csv) { System.out.println(String.format("sfnt version: %s", FontInfo.sfntVersion(font))); System.out.println(); System.out.println("Font Tables"); System.out.println( prependDataAndBuildCsv(FontInfo.listTables(font).csvStringArray(), fileName, i)); System.out.println(); System.out.println("Name Table Entries:"); System.out.println( prependDataAndBuildCsv(FontInfo.listNameEntries(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println(String.format("sfnt version: %s", FontInfo.sfntVersion(font))); System.out.println(); System.out.println("Font Tables:"); FontInfo.listTables(font).prettyPrint(); System.out.println(); System.out.println("Name Table Entries:"); FontInfo.listNameEntries(font).prettyPrint(); System.out.println(); } } // Print metrics if (options.metrics || options.all) { if (options.csv) { System.out.println("Font Metrics:"); System.out.println( prependDataAndBuildCsv(FontInfo.listFontMetrics(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println("Font Metrics:"); FontInfo.listFontMetrics(font).prettyPrint(); System.out.println(); } } // Print glyph metrics if (options.metrics || options.glyphs || options.all) { if (options.csv) { System.out.println("Glyph Metrics:"); System.out.println(prependDataAndBuildCsv( FontInfo.listGlyphDimensionBounds(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println("Glyph Metrics:"); FontInfo.listGlyphDimensionBounds(font).prettyPrint(); System.out.println(); } } // Print cmap list if (options.cmap || options.all) { if (options.csv) { System.out.println("Cmaps in the font:"); System.out.println( prependDataAndBuildCsv(FontInfo.listCmaps(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println("Cmaps in the font:"); FontInfo.listCmaps(font).prettyPrint(); System.out.println(); } } // Print blocks if (options.blocks || options.all) { if (options.csv) { System.out.println("Unicode block coverage:"); System.out.println(prependDataAndBuildCsv( FontInfo.listCharBlockCoverage(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println("Unicode block coverage:"); FontInfo.listCharBlockCoverage(font).prettyPrint(); System.out.println(); } } // Print scripts if (options.scripts || options.all) { if (options.csv) { System.out.println("Unicode script coverage:"); System.out.println(prependDataAndBuildCsv( FontInfo.listScriptCoverage(font).csvStringArray(), fileName, i)); System.out.println(); if (options.detailed) { System.out.println("Uncovered code points in partially-covered scripts:"); System.out.println(prependDataAndBuildCsv( FontInfo.listCharsNeededToCoverScript(font).csvStringArray(), fileName, i)); System.out.println(); } } else { System.out.println("Unicode script coverage:"); FontInfo.listScriptCoverage(font).prettyPrint(); System.out.println(); if (options.detailed) { System.out.println("Uncovered code points in partially-covered scripts:"); FontInfo.listCharsNeededToCoverScript(font).prettyPrint(); System.out.println(); } } } // Print char list if (options.chars || options.all) { if (options.csv) { System.out.println("Characters with valid glyphs:"); System.out.println( prependDataAndBuildCsv(FontInfo.listChars(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println("Characters with valid glyphs:"); FontInfo.listChars(font).prettyPrint(); System.out.println(); System.out.println(String.format( "Total number of characters with valid glyphs: %d", FontInfo.numChars(font))); System.out.println(); } } // Print glyph information if (options.glyphs || options.all) { DataDisplayTable unmappedGlyphs = FontInfo.listUnmappedGlyphs(font); if (options.csv) { System.out.println(String.format("Total hinting size: %s", FontInfo.hintingSize(font))); System.out.println(String.format( "Number of unmapped glyphs: %d / %d", unmappedGlyphs.getNumRows(), FontInfo.numGlyphs(font))); System.out.println(); if (options.detailed) { System.out.println("Unmapped glyphs:"); System.out.println( prependDataAndBuildCsv(unmappedGlyphs.csvStringArray(), fileName, i)); System.out.println(); } System.out.println("Subglyphs used by characters in the font:"); System.out.println(prependDataAndBuildCsv( FontInfo.listSubglyphFrequency(font).csvStringArray(), fileName, i)); System.out.println(); } else { System.out.println(String.format("Total hinting size: %s", FontInfo.hintingSize(font))); System.out.println(String.format( "Number of unmapped glyphs: %d / %d", unmappedGlyphs.getNumRows(), FontInfo.numGlyphs(font))); System.out.println(); if (options.detailed) { System.out.println("Unmapped glyphs:"); unmappedGlyphs.prettyPrint(); System.out.println(); } System.out.println("Subglyphs used by characters in the font:"); FontInfo.listSubglyphFrequency(font).prettyPrint(); System.out.println(); } } } } private static String prependDataAndBuildCsv(String[] arr, String fontName, int fontIndex) { StringBuilder output = new StringBuilder("Font,font index,").append(arr[0]).append('\n'); for (int i = 1; i < arr.length; i++) { String row = arr[i]; output.append(fontName) .append(',') .append("font index ") .append(fontIndex) .append(',') .append(row) .append('\n'); } return output.toString(); } }
/******************************************************************************* * Manchester Centre for Integrative Systems Biology * University of Manchester * Manchester M1 7ND * United Kingdom * * Copyright (C) 2007 University of Manchester * * This program is released under the Academic Free License ("AFL") v3.0. * (http://www.opensource.org/licenses/academic.php) *******************************************************************************/ package org.mcisb.ui.util.data; import java.awt.*; import java.beans.*; import java.util.*; import javax.swing.*; import org.mcisb.ui.util.*; import org.mcisb.util.*; import org.mcisb.util.data.*; /** * * @author Neil Swainston */ public class DataDisplayManagerPanel extends JPanel implements PropertyChangeListener, Disposable { /** * */ private static final long serialVersionUID = 1L; /** * */ protected final JLabel label = new JLabel(); /** * */ protected final DataDisplayPanel[] dataDisplayPanels; /** * */ private final boolean resetXRange; /** * */ private final ListManager[] listManagers; /** * */ private final ResizableJLayeredPane[] layeredPanes; /** * */ private final ManipulatorPanel[] manipulatorPanels; /** * * @param dataDisplayPanel * @param data * @param manipulatable * @param resetXRange */ public DataDisplayManagerPanel( final DataDisplayPanel dataDisplayPanel, final java.util.List<Spectra> data, final boolean manipulatable, final boolean resetXRange ) { this( new DataDisplayPanel[] { dataDisplayPanel }, Arrays.asList( data ), new boolean[] { manipulatable }, resetXRange ); } /** * * @param dataDisplayPanels * @param data * @param manipulatables * @param resetXRange */ public DataDisplayManagerPanel( final DataDisplayPanel[] dataDisplayPanels, final java.util.List<java.util.List<Spectra>> data, final boolean[] manipulatables, final boolean resetXRange ) { super( new BorderLayout() ); this.dataDisplayPanels = Arrays.copyOf( dataDisplayPanels, dataDisplayPanels.length ); this.resetXRange = resetXRange; listManagers = new ListManager[ dataDisplayPanels.length ]; layeredPanes = new ResizableJLayeredPane[ dataDisplayPanels.length ]; manipulatorPanels = new ManipulatorPanel[ dataDisplayPanels.length ]; setBackground( Color.WHITE ); label.setHorizontalAlignment( SwingConstants.CENTER ); add( label, BorderLayout.NORTH ); final JTabbedPane tabbedPane = new JTabbedPane(); for( int i = 0; i < dataDisplayPanels.length; i++ ) { if( manipulatables[ i ] ) { manipulatorPanels[ i ] = new ManipulatorPanel( dataDisplayPanels[ i ] ); dataDisplayPanels[ i ].addMouseListener( manipulatorPanels[ i ] ); dataDisplayPanels[ i ].addMouseMotionListener( manipulatorPanels[ i ] ); layeredPanes[ i ] = new ResizableJLayeredPane(); layeredPanes[ i ].add( dataDisplayPanels[ i ], JLayeredPane.DEFAULT_LAYER ); layeredPanes[ i ].add( manipulatorPanels[ i ], JLayeredPane.DRAG_LAYER ); tabbedPane.addTab( dataDisplayPanels[ i ].getName(), null, layeredPanes[ i ], dataDisplayPanels[ i ].getToolTipText() ); } else { tabbedPane.addTab( dataDisplayPanels[ i ].getName(), null, dataDisplayPanels[ i ], dataDisplayPanels[ i ].getToolTipText() ); } listManagers[ i ] = new ListManager( data.get( i ) ); listManagers[ i ].addPropertyChangeListener( this ); } final JComponent component = new ListManagerPanel( listManagers ); component.setOpaque( false ); add( component, BorderLayout.SOUTH ); for( int i = 0; i < listManagers.length; i++ ) { listManagers[ i ].init(); } final int FIRST = 0; add( ( dataDisplayPanels.length > 1 ) ? tabbedPane : tabbedPane.getComponent( FIRST ), BorderLayout.CENTER ); } /** * * @param title */ public void setTitle( final String title ) { label.setText( title ); repaint(); } /** * * @param index * @return DataDisplayPanel */ public DataDisplayPanel getDataDisplayPanel( int index ) { return dataDisplayPanels[ index ]; } /* * (non-Javadoc) * * @see java.beans.PropertyChangeListener#propertyChange(java.beans. * PropertyChangeEvent) */ @Override public void propertyChange( final PropertyChangeEvent e ) { final String propertyName = e.getPropertyName(); if( propertyName.equals( ListManager.OBJECT ) ) { final Object data = e.getNewValue(); if( data instanceof Spectra ) { try { final Spectra newSpectra = (Spectra)data; final Collection<String> labels = new LinkedHashSet<>(); for( Iterator<Spectrum> iterator = newSpectra.iterator(); iterator.hasNext(); ) { labels.add( iterator.next().getLabel() ); } final int BRACKET_LENGTH = 1; final String title = Arrays.toString( labels.toArray() ); setTitle( title.substring( BRACKET_LENGTH, title.length() - BRACKET_LENGTH ) ); dataDisplayPanels[ Arrays.asList( listManagers ).indexOf( e.getSource() ) ].setSpectra( newSpectra ); if( resetXRange ) { dataDisplayPanels[ Arrays.asList( listManagers ).indexOf( e.getSource() ) ].reset(); } } catch( Exception ex ) { final JDialog errorDialog = new ExceptionComponentFactory().getExceptionDialog( null, ExceptionUtils.toString( ex ), ex ); ComponentUtils.setLocationCentral( errorDialog ); errorDialog.setVisible( true ); } } } } /* * * (non-Javadoc) * * @see org.mcisb.util.Disposable#dispose() */ @Override public void dispose() { for( int i = 0; i < dataDisplayPanels.length; i++ ) { if( listManagers[ i ] != null ) { listManagers[ i ].removePropertyChangeListener( this ); } if( layeredPanes[ i ] != null ) { layeredPanes[ i ].dispose(); } if( manipulatorPanels[ i ] != null ) { dataDisplayPanels[ i ].removeMouseListener( manipulatorPanels[ i ] ); } dataDisplayPanels[ i ].dispose(); } } }
package gov.va.hmp.healthtime; import gov.va.hmp.healthtime.*; import org.joda.time.*; import org.junit.Assert; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class PointInTimeTest { @Test public void testClone() { PointInTime t = new PointInTime(1975, 7, 23, 3, 52); PointInTime clonedT = t.clone(); assertEquals(t, clonedT); Assert.assertNotSame(t, clonedT); } @Test public void testCreateWithYear() { PointInTime t = new PointInTime(1975); Assert.assertEquals(Precision.YEAR, t.getPrecision()); Assert.assertFalse(t.isMonthSet()); Assert.assertFalse(t.isDateSet()); Assert.assertFalse(t.isHourSet()); Assert.assertFalse(t.isMinuteSet()); Assert.assertFalse(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals("1975", t.toString()); try { t.getMonth(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getDate(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getHour(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMinute(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getSecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMillisecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } } @Test public void testCreateWithYearAndMonth() { PointInTime t = new PointInTime(1975, 7); Assert.assertEquals(Precision.MONTH, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertFalse(t.isDateSet()); Assert.assertFalse(t.isHourSet()); Assert.assertFalse(t.isMinuteSet()); Assert.assertFalse(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals("197507", t.toString()); try { t.getDate(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getHour(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMinute(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getSecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMillisecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } } @Test public void testCreateWithYearMonthAndDate() { PointInTime t = new PointInTime(1975, 7, 23); Assert.assertEquals(Precision.DATE, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertFalse(t.isHourSet()); Assert.assertFalse(t.isMinuteSet()); Assert.assertFalse(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals("19750723", t.toString()); try { t.getHour(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMinute(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getSecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMillisecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } } @Test public void testCreateWithYearMonthDateAndHour() { PointInTime t = new PointInTime(1975, 7, 23, 15); Assert.assertEquals(Precision.HOUR, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertTrue(t.isHourSet()); Assert.assertFalse(t.isMinuteSet()); Assert.assertFalse(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals(15, t.getHour()); Assert.assertEquals("1975072315", t.toString()); try { t.getMinute(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getSecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMillisecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } } @Test public void testCreateWithYearMonthDateHourAndMinute() { PointInTime t = new PointInTime(1975, 7, 23, 15, 23); Assert.assertEquals(Precision.MINUTE, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertTrue(t.isHourSet()); Assert.assertTrue(t.isMinuteSet()); Assert.assertFalse(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals(15, t.getHour()); Assert.assertEquals(23, t.getMinute()); Assert.assertEquals("197507231523", t.toString()); try { t.getSecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } try { t.getMillisecond(); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { Assert.assertTrue(t == ex.getPointInTime()); } } @Test public void testCreateWithYearMonthDateHourMinuteAndSecond() { PointInTime t = new PointInTime(1975, 7, 23, 15, 23, 42); Assert.assertEquals(Precision.SECOND, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertTrue(t.isHourSet()); Assert.assertTrue(t.isMinuteSet()); Assert.assertTrue(t.isSecondSet()); Assert.assertFalse(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals(15, t.getHour()); Assert.assertEquals(23, t.getMinute()); Assert.assertEquals(42, t.getSecond()); Assert.assertEquals("19750723152342", t.toString()); } @Test public void testCreateWithYearMonthDateHourMinuteSecondAndMillisecond() { PointInTime t = new PointInTime(1975, 7, 23, 15, 23, 42, 398); Assert.assertEquals(Precision.MILLISECOND, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertTrue(t.isHourSet()); Assert.assertTrue(t.isMinuteSet()); Assert.assertTrue(t.isSecondSet()); Assert.assertTrue(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals(15, t.getHour()); Assert.assertEquals(23, t.getMinute()); Assert.assertEquals(42, t.getSecond()); Assert.assertEquals(398, t.getMillisecond()); Assert.assertEquals("19750723152342.398", t.toString()); } @Test public void testCreateFromHL7String() throws Exception { PointInTime t = new PointInTime("19750723152342.398"); Assert.assertEquals(Precision.MILLISECOND, t.getPrecision()); Assert.assertTrue(t.isMonthSet()); Assert.assertTrue(t.isDateSet()); Assert.assertTrue(t.isHourSet()); Assert.assertTrue(t.isMinuteSet()); Assert.assertTrue(t.isSecondSet()); Assert.assertTrue(t.isMillisecondSet()); Assert.assertEquals(1975, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(23, t.getDate()); Assert.assertEquals(15, t.getHour()); Assert.assertEquals(23, t.getMinute()); Assert.assertEquals(42, t.getSecond()); Assert.assertEquals(398, t.getMillisecond()); Assert.assertEquals("19750723152342.398", t.toString()); } @Test public void testEquality() { PointInTime t1 = new PointInTime(1975); PointInTime t2 = new PointInTime(1975); Assert.assertEquals(t1.getPrecision(), t2.getPrecision()); assertEquals(t1, t2); t1 = new PointInTime(1975, 7); t2 = new PointInTime(1975, 7); assertEquals(t1, t2); t1 = new PointInTime(1975, 7, 23); t2 = new PointInTime(1975, 7, 23); assertEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10); t2 = new PointInTime(1975, 7, 23, 10); assertEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10, 54); t2 = new PointInTime(1975, 7, 23, 10, 54); assertEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10, 54, 41); t2 = new PointInTime(1975, 7, 23, 10, 54, 41); assertEquals(t1, t2); } @Test public void testInequalityOfValue() { PointInTime t1 = new PointInTime(1975); PointInTime t2 = new PointInTime(1984); assertNotEquals(t1, t2); t1 = new PointInTime(1975, 7); t2 = new PointInTime(1975, 3); assertNotEquals(t1, t2); t1 = new PointInTime(1975, 7, 23); t2 = new PointInTime(1975, 7, 11); assertNotEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10); t2 = new PointInTime(1975, 7, 23, 6); assertNotEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10, 54); t2 = new PointInTime(1975, 7, 23, 10, 31); assertNotEquals(t1, t2); t1 = new PointInTime(1975, 7, 23, 10, 54, 17); t2 = new PointInTime(1975, 7, 23, 10, 54, 47); assertNotEquals(t1, t2); } @Test public void testInequalityOfDifferentPrecision() { PointInTime[] times = new PointInTime[6]; times[0] = new PointInTime(1975); times[1] = new PointInTime(1975, 7); times[2] = new PointInTime(1975, 7, 23); times[3] = new PointInTime(1975, 7, 23, 10); times[4] = new PointInTime(1975, 7, 23, 10, 54); times[5] = new PointInTime(1975, 7, 23, 10, 54, 41); for (int i = 0; i < times.length; i++) { for (int j = 0; j < times.length; j++) { if (i == j) continue; assertNotEquals(times[i], times[j]); } } } @Test public void testComparisonOfSamePrecision() { PointInTime t1 = new PointInTime(1975, 7, 23); PointInTime t2 = new PointInTime(1984, 3, 11); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(1975, 7, 23, 10, 30); t2 = new PointInTime(1984, 7, 23, 10, 30); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(1984, 7, 22, 10); t2 = new PointInTime(1984, 7, 23, 10); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(1984, 7, 23, 10); t2 = new PointInTime(1984, 7, 23, 10); Assert.assertTrue(t1.compareTo(t2) == 0); Assert.assertTrue(t2.compareTo(t1) == 0); } @Test public void testComparisonOfDifferentPrecision() { PointInTime t1 = new PointInTime(1975, 7, 23); PointInTime t2 = new PointInTime(1984, 3, 11, 10); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(2004, 12, 16, 18, 0); t2 = new PointInTime(2004, 12, 16, 18, 0, 1); Assert.assertTrue(t1.getPrecision().compareTo(t2.getPrecision()) < 0); Assert.assertTrue(t2.getPrecision().compareTo(t1.getPrecision()) > 0); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(2004, 12, 15); t2 = new PointInTime(2004, 12, 16, 18, 0, 1); Assert.assertTrue(t1.compareTo(t2) < 0); Assert.assertTrue(t2.compareTo(t1) > 0); t1 = new PointInTime(2004, 12, 17); t2 = new PointInTime(2004, 12, 16, 18, 0); Assert.assertTrue(t1.compareTo(t2) > 0); Assert.assertTrue(t2.compareTo(t1) < 0); } @Test public void testComparisonWithNull() { PointInTime t1 = new PointInTime(1975, 7, 23); Assert.assertTrue(t1.compareTo(null) > 0); } @Test public void testAddPeriod() { PointInTime t = new PointInTime(2003, 9, 9); t = t.add(Period.days(60)); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(11, t.getMonth()); Assert.assertEquals(8, t.getDate()); } @Test public void testAddYears() { PointInTime t = new PointInTime(2003, 9, 9); t = t.add(Period.years(2)); Assert.assertEquals(2005, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(9, t.getDate()); } @Test public void testAddMonths() { PointInTime t = new PointInTime(2003, 9, 9); t = t.add(Period.months(18)); Assert.assertEquals(2005, t.getYear()); Assert.assertEquals(3, t.getMonth()); Assert.assertEquals(9, t.getDate()); } @Test public void testAddWeeks() { PointInTime t = new PointInTime(2003, 9, 9); t = t.addWeeks(2); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(23, t.getDate()); assertThat(t.getPrecision(), is(Precision.DATE)); t = new PointInTime(2003, 9, 9); t = t.add(Weeks.weeks(2)); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(23, t.getDate()); assertThat(t.getPrecision(), is(Precision.DATE)); t = new PointInTime(2003, 9); t = t.addWeeks(2); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); assertThat(t.getPrecision(), is(Precision.MONTH)); } @Test public void testAddDays() { PointInTime t = new PointInTime(2003, 9, 9); t = t.add(Period.days(60)); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(11, t.getMonth()); Assert.assertEquals(8, t.getDate()); } @Test public void testAddHours() { PointInTime t = new PointInTime(2003, 9, 9, 18, 25); t = t.addHours(7); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(10, t.getDate()); Assert.assertEquals(1, t.getHour()); Assert.assertEquals(25, t.getMinute()); } @Test public void testAddMinutes() { PointInTime t = new PointInTime(2003, 9, 9, 18, 25); t = t.addMinutes(82); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(9, t.getDate()); Assert.assertEquals(19, t.getHour()); Assert.assertEquals(47, t.getMinute()); } @Test public void testAddSeconds() { PointInTime t = new PointInTime(2003, 9, 9, 18, 25, 56); t = t.addSeconds(93); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(9, t.getDate()); Assert.assertEquals(18, t.getHour()); Assert.assertEquals(27, t.getMinute()); Assert.assertEquals(29, t.getSecond()); } @Test public void testAddMilliseconds() { PointInTime t = new PointInTime(2003, 9, 9, 18, 25, 56, 672); t = t.addMilliseconds(567); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(9, t.getDate()); Assert.assertEquals(18, t.getHour()); Assert.assertEquals(25, t.getMinute()); Assert.assertEquals(57, t.getSecond()); Assert.assertEquals(239, t.getMillisecond()); } @Test public void testSubtractPeriod() { PointInTime t = new PointInTime(2003, 9, 25); t = t.subtract(Period.days(30)); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(8, t.getMonth()); Assert.assertEquals(26, t.getDate()); // precision doesn't change t = t.subtract(new Period(0, 0, 0, 10, 23, 50, 40, 0)); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(8, t.getMonth()); Assert.assertEquals(16, t.getDate()); } @Test public void testSubtractWeeks() { PointInTime t = new PointInTime(2003, 9, 25); t = t.subtractWeeks(2); Assert.assertEquals(2003, t.getYear()); Assert.assertEquals(9, t.getMonth()); Assert.assertEquals(11, t.getDate()); assertThat(t.getPrecision(), is(Precision.DATE)); t = new PointInTime(2005, 7, 25); t = t.subtract(Weeks.weeks(2)); Assert.assertEquals(2005, t.getYear()); Assert.assertEquals(7, t.getMonth()); Assert.assertEquals(11, t.getDate()); assertThat(t.getPrecision(), is(Precision.DATE)); } @Test public void testSubtractPointInTimeWithSamePrecision() { PointInTime t1 = new PointInTime(2003, 9, 25); PointInTime t2 = new PointInTime(2003, 8, 26); ReadableDuration d = t1.subtract(t2); Period p = d.toPeriod(); Assert.assertEquals(720, p.getHours()); Assert.assertEquals(0, p.getMinutes()); Assert.assertEquals(0, p.getSeconds()); Assert.assertEquals(0, p.getMillis()); Assert.assertEquals(30, Days.standardDaysIn(p).getDays()); t1 = new PointInTime(2003, 9, 25, 10, 35, 12); t2 = new PointInTime(2003, 9, 25, 18, 12, 10); d = t1.subtract(t2); p = d.toPeriod(); Assert.assertEquals(0, p.getDays()); Assert.assertEquals(-7, p.getHours()); Assert.assertEquals(-36, p.getMinutes()); Assert.assertEquals(-58, p.getSeconds()); Assert.assertEquals(0, p.getMillis()); } @Test public void testSubtractPointInTimeWithDifferentPrecision() { PointInTime t1 = new PointInTime(2003, 9, 25); PointInTime t2 = new PointInTime(2003, 8, 26, 8, 30); try { t1.subtract(t2); Assert.fail("expected " + ImprecisePointInTimeException.class); } catch (ImprecisePointInTimeException ex) { } } @Test public void testPromoteYearPrecision() { PointInTime t = new PointInTime(1975); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 1, 1, 0, 0, 0, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1976, 1, 1, 0, 0, 0, 0, i.getHigh()); } @Test public void testPromoteMonthPrecision() { PointInTime t = new PointInTime(1975, 7); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 1, 0, 0, 0, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 8, 1, 0, 0, 0, 0, i.getHigh()); } @Test public void testPromoteDatePrecision() { PointInTime t = new PointInTime(1975, 7, 23); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 0, 0, 0, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 24, 0, 0, 0, 0, i.getHigh()); } @Test public void testPromoteHourPrecision() { PointInTime t = new PointInTime(1975, 7, 23, 10); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 0, 0, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 11, 0, 0, 0, i.getHigh()); } @Test public void testPromoteMinutePrecision() { PointInTime t = new PointInTime(1975, 7, 23, 10, 42); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 42, 0, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 43, 0, 0, i.getHigh()); } @Test public void testPromoteSecondPrecision() { PointInTime t = new PointInTime(1975, 7, 23, 10, 42, 15); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 42, 15, 0, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 42, 16, 0, i.getHigh()); } @Test public void testPromoteMillisecondPrecision() { PointInTime t = new PointInTime(1975, 7, 23, 10, 42, 15, 532); IntervalOfTime i = t.promote(); Assert.assertTrue(i.isLowClosed()); Assert.assertFalse(i.isHighClosed()); Assert.assertEquals(Precision.MILLISECOND, i.getLow().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 42, 15, 532, i.getLow()); Assert.assertEquals(Precision.MILLISECOND, i.getHigh().getPrecision()); IntervalOfTimeTest.assertEquals(1975, 7, 23, 10, 42, 15, 533, i.getHigh()); } @Test public void testBefore() { PointInTime t1 = new PointInTime(1975, 7, 24); PointInTime t2 = new PointInTime(1975, 7, 23); Assert.assertTrue(t2.before(t1)); Assert.assertFalse(t2.before(t2)); t2 = new PointInTime(1975, 7, 24, 10, 0, 0, 0); Assert.assertFalse(t2.before(t1)); t2 = new PointInTime(1975, 7, 22, 23, 59, 59, 999); Assert.assertTrue(t2.before(t1)); t1 = new PointInTime(2004, 12, 15, 11, 39, 05); t2 = new PointInTime(2004, 12, 15, 16, 37, 25); Assert.assertTrue(t1.before(t2)); Assert.assertTrue(t1.compareTo(t2) < 0); } @Test public void testAfter() { PointInTime t1 = new PointInTime(1975, 7, 23); PointInTime t2 = new PointInTime(1975, 7, 24); Assert.assertTrue(t2.after(t1)); Assert.assertFalse(t2.after(t2)); t2 = new PointInTime(1975, 7, 23, 23, 59, 59, 999); Assert.assertFalse(t2.after(t1)); t2 = new PointInTime(1975, 7, 24, 0, 0, 0, 0); Assert.assertTrue(t2.after(t1)); } @Test public void testMidnight() { PointInTime midnight = new PointInTime(1984, 3, 31).toPointInTimeAtMidnight(); Assert.assertEquals(1984, midnight.getYear()); Assert.assertEquals(4, midnight.getMonth()); Assert.assertEquals(1, midnight.getDate()); Assert.assertEquals(0, midnight.getHour()); Assert.assertEquals(0, midnight.getMinute()); Assert.assertEquals(0, midnight.getSecond()); Assert.assertEquals(0, midnight.getMillisecond()); } @Test public void testNoon() { PointInTime noon = new PointInTime(1984, 3, 31).toPointInTimeAtNoon(); Assert.assertEquals(1984, noon.getYear()); Assert.assertEquals(3, noon.getMonth()); Assert.assertEquals(31, noon.getDate()); Assert.assertEquals(12, noon.getHour()); Assert.assertEquals(0, noon.getMinute()); Assert.assertEquals(0, noon.getSecond()); Assert.assertEquals(0, noon.getMillisecond()); } @Test public void testNow() { NowStrategy f = mock(NowStrategy.class); PointInTime.setNowStrategy(f); LocalDateTime now = new LocalDateTime(); when(f.now()).thenReturn(PointInTime.fromLocalDateTime(now)); PointInTime n = PointInTime.now(); verify(f).now(); Assert.assertEquals(Precision.MILLISECOND, n.getPrecision()); Assert.assertEquals(now.getYear(), n.getYear()); Assert.assertEquals(now.getMonthOfYear(), n.getMonth()); Assert.assertEquals(now.getDayOfMonth(), n.getDate()); Assert.assertEquals(now.getHourOfDay(), n.getHour()); Assert.assertEquals(now.getMinuteOfHour(), n.getMinute()); Assert.assertEquals(now.getSecondOfMinute(), n.getSecond()); Assert.assertEquals(now.getMillisOfSecond(), n.getMillisecond()); PointInTime.setNowStrategy(null); } @Test public void testToday() { NowStrategy f = mock(NowStrategy.class); PointInTime.setNowStrategy(f); LocalDateTime now = new LocalDateTime(); when(f.now()).thenReturn(PointInTime.fromLocalDateTime(now)); PointInTime t = PointInTime.today(); verify(f).now(); Assert.assertEquals(Precision.DATE, t.getPrecision()); Assert.assertEquals(now.getYear(), t.getYear()); Assert.assertEquals(now.getMonthOfYear(), t.getMonth()); Assert.assertEquals(now.getDayOfMonth(), t.getDate()); PointInTime.setNowStrategy(null); } @Test public void testToLocalDateTime() { PointInTime t = new PointInTime(1975, 7, 23); try { LocalDateTime t1 = t.toLocalDateTime(); Assert.fail("expected " + ImprecisePointInTimeException.class.getName()); } catch (ImprecisePointInTimeException e) { // NOOP } t = new PointInTime(1975, 7, 23, 10, 55, 34, 123); LocalDateTime t1 = t.toLocalDateTime(); Assert.assertEquals(1975, t1.getYear()); Assert.assertEquals(7, t1.getMonthOfYear()); Assert.assertEquals(23, t1.getDayOfMonth()); Assert.assertEquals(10, t1.getHourOfDay()); Assert.assertEquals(55, t1.getMinuteOfHour()); Assert.assertEquals(34, t1.getSecondOfMinute()); Assert.assertEquals(123, t1.getMillisOfSecond()); } public static void assertNotEquals(PointInTime t1, PointInTime t2) { Assert.assertFalse(t1.equals(t2)); Assert.assertFalse(t2.equals(t1)); } public static void assertEquals(PointInTime t1, PointInTime t2) { Assert.assertTrue(t1.equals(t2)); Assert.assertTrue(t2.equals(t1)); } }
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.j2cl.integration.lambdas; import static com.google.j2cl.integration.testing.Asserts.assertEquals; import static com.google.j2cl.integration.testing.Asserts.assertThrowsClassCastException; import static com.google.j2cl.integration.testing.Asserts.assertTrue; import java.io.Serializable; public class Main { public static void main(String[] args) { Captures captures = new Captures(); captures.testLambdaNoCapture(); captures.testInstanceofLambda(); captures.testLambdaCaptureField(); captures.testLambdaCaptureLocal(); captures.testLambdaCaptureFieldAndLocal(); testSpecialLambdas(); testSpecializedLambda(); testVarargsLambdas(); testVarKeywordInLambda(); testSerializableLambda(); testNestedLambdas(); testArbitraryNesting(); testIntersectionTypeLambdas(); } private interface IntToIntFunction { int apply(int i); } private static class Captures { private int field = 100; private int test(IntToIntFunction f, int n) { return this.field + f.apply(n); } private void testLambdaNoCapture() { int result = test(i -> i + 1, 10); assertTrue(result == 111); result = test( i -> { return i + 2; }, 10); assertTrue(result == 112); } private void testInstanceofLambda() { IntToIntFunction f = i -> i + 1; assertTrue(f instanceof IntToIntFunction); } private void testLambdaCaptureField() { int result = test(i -> field + i + 1, 10); assertTrue(result == 211); class Local { int field = 10; class Inner { int getOuterField() { return Local.this.field; } } } assertEquals(10, new Local().new Inner().getOuterField()); } private void testLambdaCaptureLocal() { int x = 1; int result = test(i -> x + i + 1, 10); assertTrue(result == 112); } private void testLambdaCaptureFieldAndLocal() { int x = 1; int result = test( i -> { int y = 1; return x + y + this.field + i + 1; }, 10); assertTrue(result == 213); } } interface Equals<T> { @Override boolean equals(Object object); default T get() { return null; } } interface SubEquals extends Equals<String> { @Override String get(); } @SuppressWarnings({"SelfEquals", "EqualsIncompatibleType"}) private static void testSpecialLambdas() { SubEquals getHello = () -> "Hello"; assertTrue(getHello.equals(getHello)); assertTrue(!getHello.equals("Hello")); assertTrue("Hello".equals(getHello.get())); } @SuppressWarnings({"rawtypes", "unchecked"}) private static void testSpecializedLambda() { Consumer<String> stringConsumer = s -> s.substring(1); Consumer rawConsumer = stringConsumer; assertThrowsClassCastException(() -> rawConsumer.accept(new Object()), String.class); VarargsIntFunction<String> firstA = ns -> ns[0].indexOf("a"); VarargsIntFunction rawVarargsFunction = firstA; assertThrowsClassCastException( () -> rawVarargsFunction.apply(new Object[] {"bbabb", "aabb"}), String[].class); } interface Consumer<T> { void accept(T t); } interface VarargsIntFunction<T> { int apply(T... t); } private static void testVarargsLambdas() { VarargsFunction<String> changeFirstElement = ss -> { ss[0] = ss[0] + " world"; return ss; }; String[] params = new String[] {"hello"}; assertEquals(params, changeFirstElement.apply(params)); assertEquals("hello world", params[0]); } interface VarargsFunction<T> { T[] apply(T... t); } private static void testVarKeywordInLambda() { IntToIntFunction f = (var i) -> i + 1; assertEquals(3, f.apply(2)); } private static void testSerializableLambda() { Object lambda = (Consumer<Object> & Serializable) o -> {}; assertTrue(lambda instanceof Serializable); } private static void testArbitraryNesting() { class A { public void a() { int[] x = new int[] {42}; class B { public int b() { IntToIntFunction i = new IntToIntFunction() { @Override public int apply(int a) { IntToIntFunction ii = n -> { return new IntToIntFunction() { @Override public int apply(int b) { IntToIntFunction iii = m -> x[0] = x[0] + a + b + n + m; return iii.apply(100); } }.apply(200); }; return ii.apply(300); } }; return i.apply(400); } } int result = new B().b(); assertTrue(result == 1042); assertTrue(x[0] == 1042); } } } private static void testNestedLambdas() { int a = 10; IntToIntFunction i = m -> { int b = 20; IntToIntFunction ii = n -> a + b + m + n; return ii.apply(100); }; assertTrue((i.apply(200) == 330)); } private interface IdentityWithDefault<T> { T identityaccept(T t); default IdentityWithDefault<T> self() { return this; } } private interface InterfaceWithDefaultMethod { static final String MY_TEXT = "from Non Functional"; default String defaultMethod() { return MY_TEXT; } } private static void testIntersectionTypeLambdas() { Object obj = (IdentityWithDefault<String> & InterfaceWithDefaultMethod) o -> o; assertTrue(obj instanceof IdentityWithDefault); assertTrue(obj instanceof InterfaceWithDefaultMethod); assertEquals(obj, ((IdentityWithDefault<String>) obj).self()); assertEquals( InterfaceWithDefaultMethod.MY_TEXT, ((InterfaceWithDefaultMethod) obj).defaultMethod()); } }
package fr.minibilles.basics.ui.action; import fr.minibilles.basics.Basics; import java.util.HashMap; import java.util.Map; import org.eclipse.swt.SWT; import org.eclipse.swt.events.KeyEvent; public class KeyCode { private static final Map<Integer, String> keyDescription = new HashMap<Integer, String>(); public static final String getKeyDescription(int key) { String description = keyDescription.get(key); if ( description == null ) { char[] chars = null; try { chars = Character.toChars(key); } catch (Throwable e) { chars = new char[] { '?', '?', '?' }; } description ="'" + String.valueOf(chars) + "'"; } return description; } /** * ASCII character convenience constant for the backspace character * (value is the <code>char</code> '\b'). */ public static final char BS = '\b'; /** * ASCII character convenience constant for the carriage return character * (value is the <code>char</code> '\r'). */ public static final char CR = '\r'; /** * ASCII character convenience constant for the delete character * (value is the <code>char</code> with value 127). */ public static final char DEL = 0x7F; /** * ASCII character convenience constant for the escape character * (value is the <code>char</code> with value 27). */ public static final char ESC = 0x1B; /** * ASCII character convenience constant for the tab character * (value is the <code>char</code> '\t'). * * @since 2.1 */ public static final char TAB = '\t'; /** * ASCII character convenience constant for the space character * (value is the <code>char</code> ' '). * * @since 3.7 */ public static final char SPACE = ' '; /** * keyboard and/or mouse event mask indicating that the ALT key * was pushed on the keyboard when the event was generated * (value is 1&lt;&lt;16). */ public static final int ALT = 1 << 16; /** * Keyboard and/or mouse event mask indicating that the SHIFT key * was pushed on the keyboard when the event was generated * (value is 1&lt;&lt;17). */ public static final int SHIFT = 1 << 17; /** * Keyboard and/or mouse event mask indicating that the CTRL key * was pushed on the keyboard when the event was generated * (value is 1&lt;&lt;18). */ public static final int CTRL = 1 << 18; /** * Keyboard and/or mouse event mask indicating that the CTRL key * was pushed on the keyboard when the event was generated. This * is a synonym for CTRL (value is 1&lt;&lt;18). */ public static final int CONTROL = CTRL; /** * Keyboard and/or mouse event mask indicating that the COMMAND key * was pushed on the keyboard when the event was generated * (value is 1&lt;&lt;22). * * @since 2.1 */ public static final int COMMAND = 1 << 22; /** * Keyboard and/or mouse event mask indicating that the MOD1 key * was pushed on the keyboard when the event was generated. * * This is the primary keyboard modifier for the platform. * * @since 2.1 */ public static final int MOD1; /** * Keyboard and/or mouse event mask indicating that the MOD2 key * was pushed on the keyboard when the event was generated. * * This is the secondary keyboard modifier for the platform. * * @since 2.1 */ public static final int MOD2; /** * Keyboard and/or mouse event mask indicating that the MOD3 key * was pushed on the keyboard when the event was generated. * * @since 2.1 */ public static final int MOD3; /** * Keyboard and/or mouse event mask indicating that the MOD4 key * was pushed on the keyboard when the event was generated. * * @since 2.1 */ public static final int MOD4; /** * Accelerator constant used to differentiate a key code from a * unicode character. * * If this bit is set, then the key stroke * portion of an accelerator represents a key code. If this bit * is not set, then the key stroke portion of an accelerator is * a unicode character. * * The following expression is false: * * <code>((SWT.MOD1 | SWT.MOD2 | 'T') & SWT.KEYCODE_BIT) != 0</code>. * * The following expression is true: * * <code>((SWT.MOD3 | SWT.F2) & SWT.KEYCODE_BIT) != 0</code>. * * (value is (1&lt;&lt;24)) * * @since 2.1 */ public static final int KEYCODE_BIT = (1 << 24); /** * Accelerator constant used to extract the key stroke portion of * an accelerator. * * The key stroke may be a key code or a unicode * value. If the key stroke is a key code <code>KEYCODE_BIT</code> * will be set. * * @since 2.1 */ public static final int KEY_MASK = KEYCODE_BIT + 0xFFFF; /** * Keyboard event constant representing the UP ARROW key * (value is (1&lt;&lt;24)+1). */ public static final int ARROW_UP = KEYCODE_BIT + 1; /** * Keyboard event constant representing the DOWN ARROW key * (value is (1&lt;&lt;24)+2). */ public static final int ARROW_DOWN = KEYCODE_BIT + 2; /** * Keyboard event constant representing the LEFT ARROW key * (value is (1&lt;&lt;24)+3). */ public static final int ARROW_LEFT = KEYCODE_BIT + 3; /** * Keyboard event constant representing the RIGHT ARROW key * (value is (1&lt;&lt;24)+4). */ public static final int ARROW_RIGHT = KEYCODE_BIT + 4; /** * Keyboard event constant representing the PAGE UP key * (value is (1&lt;&lt;24)+5). */ public static final int PAGE_UP = KEYCODE_BIT + 5; /** * Keyboard event constant representing the PAGE DOWN key * (value is (1&lt;&lt;24)+6). */ public static final int PAGE_DOWN = KEYCODE_BIT + 6; /** * Keyboard event constant representing the HOME key * (value is (1&lt;&lt;24)+7). */ public static final int HOME = KEYCODE_BIT + 7; /** * Keyboard event constant representing the END key * (value is (1&lt;&lt;24)+8). */ public static final int END = KEYCODE_BIT + 8; /** * Keyboard event constant representing the INSERT key * (value is (1&lt;&lt;24)+9). */ public static final int INSERT = KEYCODE_BIT + 9; /** * Keyboard event constant representing the F1 key * (value is (1&lt;&lt;24)+10). */ public static final int F1 = KEYCODE_BIT + 10; /** * Keyboard event constant representing the F2 key * (value is (1&lt;&lt;24)+11). */ public static final int F2 = KEYCODE_BIT + 11; /** * Keyboard event constant representing the F3 key * (value is (1&lt;&lt;24)+12). */ public static final int F3 = KEYCODE_BIT + 12; /** * Keyboard event constant representing the F4 key * (value is (1&lt;&lt;24)+13). */ public static final int F4 = KEYCODE_BIT + 13; /** * Keyboard event constant representing the F5 key * (value is (1&lt;&lt;24)+14). */ public static final int F5 = KEYCODE_BIT + 14; /** * Keyboard event constant representing the F6 key * (value is (1&lt;&lt;24)+15). */ public static final int F6 = KEYCODE_BIT + 15; /** * Keyboard event constant representing the F7 key * (value is (1&lt;&lt;24)+16). */ public static final int F7 = KEYCODE_BIT + 16; /** * Keyboard event constant representing the F8 key * (value is (1&lt;&lt;24)+17). */ public static final int F8 = KEYCODE_BIT + 17; /** * Keyboard event constant representing the F9 key * (value is (1&lt;&lt;24)+18). */ public static final int F9 = KEYCODE_BIT + 18; /** * Keyboard event constant representing the F10 key * (value is (1&lt;&lt;24)+19). */ public static final int F10 = KEYCODE_BIT + 19; /** * Keyboard event constant representing the F11 key * (value is (1&lt;&lt;24)+20). */ public static final int F11 = KEYCODE_BIT + 20; /** * Keyboard event constant representing the F12 key * (value is (1&lt;&lt;24)+21). */ public static final int F12 = KEYCODE_BIT + 21; /** * Keyboard event constant representing the F13 key * (value is (1&lt;&lt;24)+22). * * @since 3.0 */ public static final int F13 = KEYCODE_BIT + 22; /** * Keyboard event constant representing the F14 key * (value is (1&lt;&lt;24)+23). * * @since 3.0 */ public static final int F14 = KEYCODE_BIT + 23; /** * Keyboard event constant representing the F15 key * (value is (1&lt;&lt;24)+24). * * @since 3.0 */ public static final int F15 = KEYCODE_BIT + 24; /** * Keyboard event constant representing the F16 key * (value is (1&lt;&lt;25)+25). * * @since 3.6 */ public static final int F16 = KEYCODE_BIT + 25; /** * Keyboard event constant representing the F17 key * (value is (1&lt;&lt;26)+26). * * @since 3.6 */ public static final int F17 = KEYCODE_BIT + 26; /** * Keyboard event constant representing the F18 key * (value is (1&lt;&lt;27)+27). * * @since 3.6 */ public static final int F18 = KEYCODE_BIT + 27; /** * Keyboard event constant representing the F19 key * (value is (1&lt;&lt;28)+28). * * @since 3.6 */ public static final int F19 = KEYCODE_BIT + 28; /** * Keyboard event constant representing the F20 key * (value is (1&lt;&lt;29)+29). * * @since 3.6 */ public static final int F20 = KEYCODE_BIT + 29; /** * Keyboard event constant representing the help * key (value is (1&lt;&lt;24)+81). * * NOTE: The HELP key maps to the key labeled "help", * not "F1". If your keyboard does not have a HELP key, * you will never see this key press. To listen for * help on a control, use SWT.Help. * * @since 3.0 * * @see SWT#Help */ public static final int HELP = KEYCODE_BIT + 81; /** * Keyboard event constant representing the pause * key (value is (1&lt;&lt;24)+85). * * @since 3.0 */ public static final int PAUSE = KEYCODE_BIT + 85; /** * Keyboard event constant representing the break * key (value is (1&lt;&lt;24)+86). * * @since 3.0 */ public static final int BREAK = KEYCODE_BIT + 86; /** * Keyboard event constant representing the print screen * key (value is (1&lt;&lt;24)+87). * * @since 3.0 */ public static final int PRINT_SCREEN = KEYCODE_BIT + 87; /** * Keyboard and/or mouse event mask indicating all possible * keyboard modifiers. * * To allow for the future, this mask is intended to be used in * place of code that references each individual keyboard mask. * For example, the following expression will determine whether * any modifier is pressed and will continue to work as new modifier * masks are added. * * <code>(stateMask & SWT.MODIFIER_MASK) != 0</code>. * * @since 2.1 */ public static final int MODIFIER_MASK; static { /* * These values represent bit masks that may need to * expand in the future. Therefore they are not initialized * in the declaration to stop the compiler from inlining. */ MODIFIER_MASK = ALT | SHIFT | CTRL | COMMAND; /* * These values can be different on different platforms. * Therefore they are not initialized in the declaration * to stop the compiler from inlining. */ if (Basics.isMac()) { //$NON-NLS-1$ //$NON-NLS-2$ MOD1 = COMMAND; MOD2 = SHIFT; MOD3 = ALT; MOD4 = CONTROL; } else { MOD1 = CONTROL; MOD2 = SHIFT; MOD3 = ALT; MOD4 = 0; } // complete map keyDescription.put((int) BS, "Backspace"); keyDescription.put((int) CR, "Return"); keyDescription.put((int) DEL, "Del"); keyDescription.put((int) ESC, "Escape"); keyDescription.put((int) TAB, "Tab"); keyDescription.put((int) SPACE, "Space"); keyDescription.put(ALT, "Alt"); keyDescription.put(SHIFT, "Shift"); keyDescription.put(CTRL, "Ctrl"); keyDescription.put(COMMAND, "Command"); keyDescription.put(ARROW_LEFT, "\u2190"); keyDescription.put(ARROW_UP, "\u2191"); keyDescription.put(ARROW_RIGHT, "\u2192"); keyDescription.put(ARROW_DOWN, "\u2193"); keyDescription.put(PAGE_UP, "\u21DE"); keyDescription.put(PAGE_DOWN, "\u21DF"); keyDescription.put(HOME, "Home"); keyDescription.put(END, "End"); keyDescription.put(INSERT, "Insert"); keyDescription.put(F1, "F1"); keyDescription.put(F2, "F2"); keyDescription.put(F3, "F3"); keyDescription.put(F4, "F4"); keyDescription.put(F5, "F5"); keyDescription.put(F6, "F6"); keyDescription.put(F7, "F7"); keyDescription.put(F8, "F8"); keyDescription.put(F9, "F9"); keyDescription.put(F10, "F10"); keyDescription.put(F11, "F11"); keyDescription.put(F12, "F12"); keyDescription.put(F13, "F13"); keyDescription.put(F14, "F14"); keyDescription.put(F15, "F15"); keyDescription.put(F16, "F16"); keyDescription.put(F17, "F17"); keyDescription.put(F18, "F18"); keyDescription.put(F19, "F19"); keyDescription.put(F20, "F20"); keyDescription.put(HELP, "Help"); keyDescription.put(PAUSE, "Pause"); keyDescription.put(BREAK, "Break"); keyDescription.put(PRINT_SCREEN, "Print Screen"); } private final int keycode; private final String description; public KeyCode(int ... keys) { if ( keys.length == 0 ) throw new IllegalArgumentException("Can't create key code with no key."); int keycode = 0; boolean containsCharacter = false; StringBuilder description = new StringBuilder(); for ( int key : keys ) { if ( ( key & KEY_MASK ) != 0 ) { if ( containsCharacter ) { throw new IllegalArgumentException("Can't create key code with more than one letter."); } else { containsCharacter = true; } } keycode |= key; if ( description.length() > 0 ) description.append("+"); description.append(getKeyDescription(key)); } this.keycode = keycode; this.description = description.toString(); } public int getKeycode() { return keycode; } public String getDescription() { return description; } public boolean contains(char character) { char thisCharacter = (char) (keycode & KEY_MASK); return thisCharacter == character; } public boolean contains(int code) { return (keycode & code) != 0; } @Override public String toString() { return description; } public static KeyCode fromEvent(KeyEvent event) { int[] keys = null; if ( event.stateMask == 0 || event.stateMask == event.keyCode ) { keys = new int[] { event.keyCode }; } else { keys = new int[] { event.stateMask, event.keyCode }; } return new KeyCode(keys); } }
/* * The MIT License * * Copyright 2017 Kohwalter. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package br.uff.ic.SimilarityCollapse.graphgenerator; import br.uff.ic.utility.Utils; import br.uff.ic.utility.graph.Edge; import br.uff.ic.utility.graph.Vertex; import edu.uci.ics.jung.graph.DirectedGraph; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; /** * * @author Kohwalter */ public class ClusteringEvaluator { double noiseProbability = 1.0F; // ArrayList<ArrayList<Double>> precision = new ArrayList<ArrayList<Double>>(); // ArrayList<ArrayList<Double>> recall = new ArrayList<ArrayList<Double>>(); // ArrayList<ArrayList<Double>> fmeasure = new ArrayList<ArrayList<Double>>(); // ArrayList<ArrayList<Double>> clusters = new ArrayList<ArrayList<Double>>(); ArrayList<Double> p_similarity = new ArrayList<>(); ArrayList<Double> r_similarity = new ArrayList<>(); ArrayList<Double> f_similarity = new ArrayList<>(); ArrayList<Double> c_similarity = new ArrayList<>(); ArrayList<Double> t_similarity = new ArrayList<>(); ArrayList<Double> p_similarityFT = new ArrayList<>(); ArrayList<Double> r_similarityFT = new ArrayList<>(); ArrayList<Double> f_similarityFT = new ArrayList<>(); ArrayList<Double> c_similarityFT = new ArrayList<>(); ArrayList<Double> t_similarityFT = new ArrayList<>(); ArrayList<Double> p_similarityTT = new ArrayList<>(); ArrayList<Double> r_similarityTT = new ArrayList<>(); ArrayList<Double> f_similarityTT = new ArrayList<>(); ArrayList<Double> c_similarityTT = new ArrayList<>(); ArrayList<Double> t_similarityTT = new ArrayList<>(); ArrayList<Double> p_dbscan = new ArrayList<>(); ArrayList<Double> r_dbscan = new ArrayList<>(); ArrayList<Double> f_dbscan = new ArrayList<>(); ArrayList<Double> c_dbscan = new ArrayList<>(); // number of clusters ArrayList<Double> t_dbscan = new ArrayList<>(); ArrayList<Double> f_all_similarity = new ArrayList<>(); ArrayList<Double> f_all_dbscan = new ArrayList<>(); ArrayList<Double> f_all_ft = new ArrayList<>(); ArrayList<Double> f_all_tt = new ArrayList<>(); private boolean isMonotonic = false; OracleGraph oracleGraph; int similarity = 0; int similarityFT = 0; int similarityTT = 0; int dbscan = 0; BufferedWriter bw; BufferedWriter bwR; BufferedWriter bwR_all; ClusteringEvaluator(boolean b, OracleGraph oracle) { isMonotonic = b; oracleGraph = oracle; } public void setMonotonic(boolean t) { isMonotonic = t; } /** * Method to compare the clusters found with the ones from the Oracle and * adds the results (precision, recall, f-measure) in their respective lists * * @param oracle is the oracle that contains the "correct" answers * @param collapseGroups * @param list is the clusters found by an algorithm * @param p is a variable that will store the precision for this trial * @param r is a variable that will store the recall for this trial * @param f is a variable that will store the f-measure for this trial * @param c is a variable that will store the number of clusters found for * this trial * @throws IOException */ public void comparePRF(DirectedGraph<Object, Edge> oracle, ArrayList<ConcurrentHashMap<String, Object>> collapseGroups, ArrayList<Double> p, ArrayList<Double> r, ArrayList<Double> f, ArrayList<Double> c) throws IOException { // List<String> clusters = new ArrayList<>(); double relevantDocuments = oracle.getVertexCount(); double retrievedDocuments; double intersection = 0; double precision = 0; double recall = 0; double fmeasure = 0; // String[] elements = list.split(" "); // clusters.addAll(Arrays.asList(elements)); // retrievedDocuments = clusters.size(); retrievedDocuments = collapseGroups.size(); if(retrievedDocuments != 0) { for (ConcurrentHashMap<String, Object> subGraph : collapseGroups) { // boolean computedCluster = false; int collapsedOraclesSameGroup = 0; if (subGraph.size() > 0) { for (Object v : oracle.getVertices()) { String id = ((Vertex) v).getID(); if (subGraph.containsKey(id)){ // && !computedCluster) { // computedCluster = true; collapsedOraclesSameGroup++; // intersection++; } } } if(collapsedOraclesSameGroup == 1) { intersection++; } } precision = intersection / retrievedDocuments; recall = intersection / relevantDocuments; if((precision != 0) || (recall != 0)) fmeasure = 2 * (precision * recall) / (precision + recall); else fmeasure = 0; } else { System.out.println("intersection: " + intersection); System.out.println("retrievedDocuments: " + retrievedDocuments); System.out.println("precision: " + precision); System.out.println("recall: " + recall); System.out.println("fmeasure: " + fmeasure); } p.add(precision); r.add(recall); f.add(fmeasure); c.add(retrievedDocuments); } /** * Main method used in the experiment * * @param NUMBER_OF_ORACLE_GRAPHS defines how many oracles we will generate * in each iteration * @param NUMBER_OF_NOISE_GRAPHS defines the number of generated noise graph * for each oracle graph * @param INITIAL_NOISE_GRAPH_SIZE defines the initial number of noise * vertices in the first iteration * @param NOISE_INCREASE_NUMBER defines the growth factor (multiplicative) * of noise graphs in each iteration * @param NUMBER_ITERATIONS defines the number of iterations * @param fileName defines the file name for this trial * @param typeGraph defines the type of graph that we will experiment with * @param epsMod is a configuration value for dbscan algorithms * @param TF_size is a configuration value for one of the similarity * algorithms * @param TF_increase is a configuration value for one of the similarity * algorithms * @param TF_qnt is a configuration value for one of the similarity * algorithms * @param TT_size is a configuration value for one of the similarity * algorithms * @param TT_increase is a configuration value for one of the similarity * algorithms * @param TT_qnt is a configuration value for one of the similarity * algorithms * @param FT_size is a configuration value for one of the similarity * algorithms * @param FT_increase is a configuration value for one of the similarity * algorithms * @param FT_qnt is a configuration value for one of the similarity * algorithms * @throws IOException * @throws InterruptedException */ public void collapse(int NUMBER_OF_ORACLE_GRAPHS, int NUMBER_OF_NOISE_GRAPHS, double INITIAL_NOISE_GRAPH_SIZE, double NOISE_INCREASE_NUMBER, int NUMBER_ITERATIONS, String fileName, String typeGraph, double epsMod, int TF_size, int TF_increase, double TF_qnt, int TT_size, int TT_increase, double TT_qnt, int FT_size, int FT_increase, double FT_qnt) throws IOException, InterruptedException { int i; int j; int w; int total_similarity = 0; int total_dbscan = 0; int total_tt = 0; int total_ft = 0; double noiseFactor = INITIAL_NOISE_GRAPH_SIZE; File fileR_all = new File("R_Data_All_" + fileName + ".txt"); bwR_all = new BufferedWriter(new FileWriter(fileR_all.getAbsoluteFile())); for (w = 0; w < NUMBER_ITERATIONS; w++) { System.out.println("Iteration NUMBER #" + w); createIterationFile(fileName, w, epsMod, TF_size, TF_increase, TF_qnt, TT_size, TT_increase, TT_qnt, FT_size, FT_increase, FT_qnt); bw.write("=============================================================="); bw.newLine(); bw.write("ITERATION NUMBER #" + w); bw.newLine(); DirectedGraph<Object, Edge> oracle; oracle = oracleGraph.createOracleGraph(typeGraph); bw.write("Oracle size: " + oracle.getVertexCount()); bw.newLine(); bw.write("NoiseGraph size: " + oracle.getVertexCount() * noiseFactor); bw.newLine(); for (j = 1; j <= NUMBER_OF_ORACLE_GRAPHS; j++) { oracle = oracleGraph.createOracleGraph(typeGraph); for (i = 0; i < NUMBER_OF_NOISE_GRAPHS; i++) { NoiseGraph instance = new NoiseGraph(oracle, oracleGraph.attribute, isMonotonic, "" + j + i); DirectedGraph<Object, Edge> noiseGraph = instance.generateNoiseGraph(noiseFactor, noiseProbability); double time; ArrayList<ConcurrentHashMap<String, Object>> clusters1 = new ArrayList<>(); ArrayList<ConcurrentHashMap<String, Object>> clusters2 = new ArrayList<>(); ArrayList<ConcurrentHashMap<String, Object>> clusters3 = new ArrayList<>(); ArrayList<ConcurrentHashMap<String, Object>> clusters4 = new ArrayList<>(); Thread t1 = new Thread(new SimilarityThread(clusters1, oracleGraph, noiseGraph, true, false, t_similarity, TF_size, TF_increase, TF_qnt)); Thread t2 = new Thread(new SimilarityThread(clusters2, oracleGraph, noiseGraph, false, true, t_similarityFT, FT_size, FT_increase, FT_qnt)); Thread t3 = new Thread(new SimilarityThread(clusters3, oracleGraph, noiseGraph, true, true, t_similarityTT, TT_size, TT_increase, TT_qnt)); Thread t4 = new Thread(new DbscanThread(clusters4, oracleGraph, noiseGraph, epsMod, t_dbscan)); t1.start(); t2.start(); t3.start(); t4.start(); t1.join(); t2.join(); t3.join(); t4.join(); comparePRF(oracle, clusters1, p_similarity, r_similarity, f_similarity, c_similarity); comparePRF(oracle, clusters2, p_similarityFT, r_similarityFT, f_similarityFT, c_similarityFT); comparePRF(oracle, clusters3, p_similarityTT, r_similarityTT, f_similarityTT, c_similarityTT); comparePRF(oracle, clusters4, p_dbscan, r_dbscan, f_dbscan, c_dbscan); } } printResults(bw, bwR, w); bw.newLine(); bw.write("Similarity wins: " + similarity); bw.newLine(); bw.write("dbscan wins: " + dbscan); bw.newLine(); bw.write("Similarity TF wins: " + similarityFT); bw.newLine(); bw.write("Similarity TT wins: " + similarityTT); bw.newLine(); total_similarity += similarity; total_dbscan += dbscan; total_ft += similarityFT; total_tt += similarityTT; similarity = 0; dbscan = 0; similarityTT = 0; similarityFT = 0; noiseFactor *= NOISE_INCREASE_NUMBER; bw.close(); bwR.close(); } createFinalResultsFile(fileName, total_similarity, total_dbscan, total_ft, total_tt, epsMod, TF_size, TF_increase, TF_qnt, TT_size, TT_increase, TT_qnt, FT_size, FT_increase, FT_qnt); } /** * Method to export the results in a txt file during each iteration * * @param name * @param iteration * @param epsMod * @param TF_size * @param TF_increase * @param TF_qnt * @param TT_size * @param TT_increase * @param TT_qnt * @param FT_size * @param FT_increase * @param FT_qnt * @throws IOException */ public void createIterationFile(String name, int iteration, double epsMod, int TF_size, int TF_increase, double TF_qnt, int TT_size, int TT_increase, double TT_qnt, int FT_size, int FT_increase, double FT_qnt) throws IOException { File file = new File("Evaluation_" + name + iteration + ".txt"); File fileR = new File("R_Data_" + name + iteration + ".txt"); // if file doesnt exists, then create it if (!file.exists()) { file.createNewFile(); } if (!fileR.exists()) { fileR.createNewFile(); } bw = new BufferedWriter(new FileWriter(file.getAbsoluteFile())); bwR = new BufferedWriter(new FileWriter(fileR.getAbsoluteFile())); bw.write("DBSCAN EPS: " + epsMod); bw.newLine(); bw.write("(TF) Similarity small cluster definition: " + TF_size); bw.newLine(); bw.write("(TF) Similarity threshold small cluster (multiplicates the normal threshold by): " + TF_increase); bw.newLine(); bw.write("(TF) Similarity Threshold (in STD): " + TF_qnt); bw.newLine(); bw.write("(TT) Similarity small cluster definition: " + TT_size); bw.newLine(); bw.write("(TT) Similarity threshold small cluster (multiplicates the normal threshold by): " + TT_increase); bw.newLine(); bw.write("(TT) Similarity Threshold (in STD): " + TT_qnt); bw.newLine(); // bw.write("(FT) Similarity small cluster definition: " + TF_size); // bw.newLine(); // bw.write("(FT) Similarity threshold small cluster (multiplicates the normal threshold by): " + FT_increase); // bw.newLine(); bw.write("(FT) Similarity Threshold (in STD): " + FT_qnt); bw.newLine(); } /** * Method to export the final results from the trial * * @param fileName * @param total_similarity * @param total_dbscan * @param total_ft * @param total_tt * @param epsMod * @param TF_size * @param TF_increase * @param TF_qnt * @param TT_size * @param TT_increase * @param TT_qnt * @param FT_size * @param FT_increase * @param FT_qnt * @throws IOException */ public void createFinalResultsFile(String fileName, int total_similarity, int total_dbscan, int total_ft, int total_tt, double epsMod, int TF_size, int TF_increase, double TF_qnt, int TT_size, int TT_increase, double TT_qnt, int FT_size, int FT_increase, double FT_qnt) throws IOException { BufferedWriter results; File file = new File("Evaluation_" + fileName + "_Results.txt"); // if file doesnt exists, then create it if (!file.exists()) { file.createNewFile(); } results = new BufferedWriter(new FileWriter(file.getAbsoluteFile())); System.out.println("==============================="); System.out.println("Final Result"); System.out.println("Similarity: " + total_similarity); System.out.println("dbscan: " + total_dbscan); results.write("DBSCAN EPS: " + epsMod); results.newLine(); results.write("(TF) Similarity small cluster definition: " + TF_increase); results.newLine(); results.write("(TF) Similarity threshold small cluster (multiplicates the normal threshold by): " + TF_increase); results.newLine(); results.write("(TF) Similarity Threshold (in STD): " + TF_qnt); results.newLine(); results.write("(TT) Similarity small cluster definition: " + TT_increase); results.newLine(); results.write("(TT) Similarity threshold small cluster (multiplicates the normal threshold by): " + TT_increase); results.newLine(); results.write("(TT) Similarity Threshold (in STD): " + TT_qnt); results.newLine(); results.write("(FT) Similarity small cluster definition: " + FT_increase); results.newLine(); results.write("(FT) Similarity threshold small cluster (multiplicates the normal threshold by): " + FT_increase); results.newLine(); results.write("(FT) Similarity Threshold (in STD): " + FT_qnt); results.newLine(); results.write("==============================="); results.newLine(); results.write("Final Result"); results.newLine(); results.write("Similarity (tf): " + total_similarity); results.newLine(); results.write("dbscan (ff): " + total_dbscan); results.newLine(); results.write("Similarity (ft): " + total_ft); results.newLine(); results.write("Similarity (tt): " + total_tt); results.newLine(); results.close(); bwR_all.write(printValues(f_all_similarity, "ve")); bwR_all.newLine(); bwR_all.write(printValues(f_all_dbscan, "dbscan")); bwR_all.newLine(); bwR_all.write(printValues(f_all_ft, "ic")); bwR_all.newLine(); bwR_all.write(printValues(f_all_tt, "icve")); bwR_all.newLine(); bwR_all.close(); } /** * Method to print the results of each algorithm * * @param bw * @param bwR * @param iteration * @throws IOException */ private void printResults(BufferedWriter bw, BufferedWriter bwR, int iteration) throws IOException { bw.write("========================="); bw.newLine(); bw.write("Similarity Collapse (TF)"); bw.newLine(); printPrf(p_similarity, r_similarity, f_similarity, c_similarity, t_similarity, bw, bwR, "s", iteration); bw.write("========================="); bw.newLine(); bw.write("DBSCAN (FF)"); bw.newLine(); printPrf(p_dbscan, r_dbscan, f_dbscan, c_dbscan, t_dbscan, bw, bwR, "d", iteration); bw.write("========================="); bw.newLine(); bw.write("Similarity Collapse (FT)"); bw.newLine(); printPrf(p_similarityFT, r_similarityFT, f_similarityFT, c_similarityFT, t_similarityFT, bw, bwR, "ft", iteration); bw.write("========================="); bw.newLine(); bw.write("Similarity Collapse (TT)"); bw.newLine(); printPrf(p_similarityTT, r_similarityTT, f_similarityTT, c_similarityTT, t_similarityTT, bw, bwR, "tt", iteration); checkWinner(); f_all_similarity.addAll(f_similarity); f_all_dbscan.addAll(f_dbscan); f_all_ft.addAll(f_similarityFT); f_all_tt.addAll(f_similarityTT); clearLists(p_similarity, r_similarity, f_similarity, c_similarity, t_similarity); clearLists(p_similarityFT, r_similarityFT, f_similarityFT, c_similarityFT, t_similarityFT); clearLists(p_similarityTT, r_similarityTT, f_similarityTT, c_similarityTT, t_similarityTT); clearLists(p_dbscan, r_dbscan, f_dbscan, c_dbscan, t_dbscan); } /** * Method that summarizes the results of each algorithm * * @param p * @param r * @param f * @param c * @param t * @param bw * @param bwR * @param name * @param iteration * @throws IOException */ private void printPrf(ArrayList<Double> p, ArrayList<Double> r, ArrayList<Double> f, ArrayList<Double> c, ArrayList<Double> t, BufferedWriter bw, BufferedWriter bwR, String name, int iteration) throws IOException { String precision; String recall; String fmeasure; String clusters; String time; String efficiency; precision = "Precision> Mean: " + Utils.mean(Utils.listToDoubleArray(p)) + " / STD:" + Utils.stdev(Utils.listToDoubleArray(p)) + " / Min: " + Utils.minimumValue(Utils.listToDoubleArray(p)); recall = "Recall> Mean: " + Utils.mean(Utils.listToDoubleArray(r)) + " / STD:" + Utils.stdev(Utils.listToDoubleArray(r)) + " / Min: " + Utils.minimumValue(Utils.listToDoubleArray(r)); fmeasure = "F-Measure> Mean: " + Utils.mean(Utils.listToDoubleArray(f)) + " / STD:" + Utils.stdev(Utils.listToDoubleArray(f)) + " / Min: " + Utils.minimumValue(Utils.listToDoubleArray(f)); clusters = "#Clusters> Mean: " + Utils.mean(Utils.listToDoubleArray(c)) + " / STD:" + Utils.stdev(Utils.listToDoubleArray(c)) + " / Min: " + Utils.minimumValue(Utils.listToDoubleArray(c)) + " / Max: " + Utils.maximumValue(Utils.listToDoubleArray(c)); time = "Time (milliseconds)> Mean: " + Utils.mean(Utils.listToDoubleArray(t)) + " / STD:" + Utils.stdev(Utils.listToDoubleArray(t)) + " / Min: " + Utils.minimumValue(Utils.listToDoubleArray(t)) + " / Max: " + Utils.maximumValue(Utils.listToDoubleArray(t)); efficiency = "Efficiency (precision/time)>: " + Utils.mean(Utils.listToDoubleArray(p)) / (Utils.mean(Utils.listToDoubleArray(t)) / 60000); bw.write(precision); bw.newLine(); bw.write(recall); bw.newLine(); bw.write(fmeasure); bw.newLine(); bw.write(clusters); bw.newLine(); bw.write(time); bw.newLine(); bw.write(efficiency); bw.newLine(); bwR.write(printValues(p, "p" + name + iteration)); bwR.newLine(); bwR.write(printValues(r, "r" + name + iteration)); bwR.newLine(); bwR.write(printValues(f, "f" + name + iteration)); bwR.newLine(); bwR.write(printValues(t, "t" + name + iteration)); bwR.newLine(); bwR.newLine(); } /** * Method that converts the values for exporting * * @param v * @param type * @return */ private String printValues(ArrayList<Double> v, String type) { DecimalFormat df = new DecimalFormat("#.###"); String values = type + " <- c("; for (Double e : v) { if(e.isNaN()) { values += 0 + ","; System.out.println("NaN" + " " + type); } else values += Double.valueOf(df.format(e)) + ","; } values = values.substring(0, values.length() - 1); values += ")"; return values; } /** * Method to clean the arraylists * * @param p * @param r * @param f * @param c * @param t */ public void clearLists(ArrayList<Double> p, ArrayList<Double> r, ArrayList<Double> f, ArrayList<Double> c, ArrayList<Double> t) { p.clear(); r.clear(); f.clear(); c.clear(); t.clear(); } /** * Method to clean the array lists * * @param p * @param r * @param f * @param c */ public void clearLists(ArrayList<Double> p, ArrayList<Double> r, ArrayList<Double> f, ArrayList<Double> c) { p.clear(); r.clear(); f.clear(); c.clear(); } /** * Method to count the number of times each algorithm "won" */ private void checkWinner() { countWinnings(f_similarity, f_dbscan, f_similarityFT, f_similarityTT); } public int isWinner(ArrayList<Double> first, ArrayList<Double> second) { int win = 0; for (int i = 0; i < first.size(); i++) { if (first.get(i) >= second.get(i)) { win += 1; } } return win; } /** * Method to count the number of times each algorithm "won" * * @param sim * @param db * @param ft * @param tt */ public void countWinnings(ArrayList<Double> sim, ArrayList<Double> db, ArrayList<Double> ft, ArrayList<Double> tt) { for (int i = 0; i < sim.size(); i++) { if ((sim.get(i) >= db.get(i)) && (sim.get(i) >= ft.get(i)) && (sim.get(i) >= tt.get(i))) { similarity += 1; } if ((db.get(i) >= sim.get(i)) && (db.get(i) >= ft.get(i)) && (db.get(i) >= tt.get(i))) { dbscan += 1; } if ((ft.get(i) >= db.get(i)) && (ft.get(i) >= sim.get(i)) && (ft.get(i) >= tt.get(i))) { similarityFT += 1; } if ((tt.get(i) >= db.get(i)) && (tt.get(i) >= ft.get(i)) && (tt.get(i) >= sim.get(i))) { similarityTT += 1; } } } }
/* * Copyright (C) 1996-2010 Power System Engineering Research Center * Copyright (C) 2010-2011 Richard Lincoln * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.cornell.pserc.jpower; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Map; import cern.colt.matrix.tdcomplex.DComplexFactory1D; import cern.colt.matrix.tdcomplex.DComplexMatrix1D; import cern.colt.matrix.tdouble.DoubleFactory1D; import cern.colt.matrix.tdouble.DoubleMatrix1D; import cern.colt.matrix.tint.IntFactory1D; import cern.colt.matrix.tint.IntMatrix1D; import cern.colt.matrix.tint.algo.IntSorting; import static edu.emory.mathcs.utils.Utils.ifunc; import static edu.emory.mathcs.utils.Utils.dfunc; import static edu.emory.mathcs.utils.Utils.cfunc; import static edu.emory.mathcs.utils.Utils.irange; import static edu.emory.mathcs.utils.Utils.nonzero; import static edu.emory.mathcs.utils.Utils.polar; import static edu.emory.mathcs.utils.Utils.intm; import static edu.emory.mathcs.utils.Utils.diff; import static edu.emory.mathcs.utils.Utils.complex; import static edu.emory.mathcs.utils.Utils.icat; import static edu.emory.mathcs.utils.Utils.any; import edu.cornell.pserc.jpower.jpc.Branch; import edu.cornell.pserc.jpower.jpc.Bus; import edu.cornell.pserc.jpower.jpc.Gen; import edu.cornell.pserc.jpower.jpc.JPC; import static edu.cornell.pserc.jpower.Djp_jpoption.jpoption; /** * Prints power flow results. * * @author Ray Zimmerman * @author Richard Lincoln */ @SuppressWarnings("static-access") public class Djp_printpf { public static void printpf(JPC results) { printpf(results, System.out); } public static void printpf(JPC results, String fname) { printpf(results, fname, jpoption()); } public static void printpf(JPC results, String fname, Map<String, Double> jpopt) { FileOutputStream output; try { output = new FileOutputStream(fname); printpf(results, output, jpoption()); output.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static void printpf(JPC results, OutputStream output) { printpf(results, output, jpoption()); } public static void printpf(JPC results, OutputStream output, Map<String, Double> jpopt) { PrintWriter pw; int i, k, nb, nl, ng, nout, mini, maxi, a, nxfmr; int OUT_ALL, OUT_ALL_LIM, OUT_V_LIM, OUT_LINE_LIM, OUT_PG_LIM, OUT_QG_LIM; int[] ties, xfmr, nzld, s_areas, nzsh, allg, ong, onld, out, ib, ig, igon, ildon, inzld, inzsh, ibrch, in_tie, out_tie, g, vg; boolean success, isOPF, isDC, anyP, anyQ, anyP_ld, anyQ_ld, anyF; boolean OUT_ANY, OUT_SYS_SUM, OUT_AREA_SUM, OUT_BUS, OUT_BRANCH, OUT_GEN, OUT_RAW; double baseMVA, et, ptol; double[] min, max; String str; Double f; Bus bus; Gen gen; Branch branch; IntMatrix1D i2e, e2i, tiesm, ld, sorted_areas, s_areasm, shunt, isload, notload, gs, bs, a_gbus, a_bus, hasload, hasshunt, a_fbus, a_tbus, _g, _vg, rated, Uf, Ut; DoubleMatrix1D fchg, tchg, Pinj, Qinj, Ptie, Qtie, Qlim, genlamP = null, genlamQ = null, Ff, Ft, F_tol; DComplexMatrix1D tap, V, loss, z, br_b, cfchg, ctchg, Sf, St; pw = new PrintWriter(output); if (jpopt.get("OUT_ALL").equals(0) || jpopt.get("OUT_RAW").equals(0)) return; baseMVA = results.baseMVA; bus = results.bus.copy(); gen = results.gen.copy(); branch = results.branch.copy(); success = results.success; et = results.et; f = results.f; isOPF = (f != null); /* FALSE -> only simple PF data, TRUE -> OPF data */ /* options */ isDC = jpopt.get("PF_DC") == 1; // use DC formulation? OUT_ALL = jpopt.get("OUT_ALL").intValue(); OUT_ANY = OUT_ALL == 1; // set to true if any pretty output is to be generated OUT_SYS_SUM = OUT_ALL == 1 || (OUT_ALL == -1 && jpopt.get("OUT_SYS_SUM") == 1); OUT_AREA_SUM = OUT_ALL == 1 || (OUT_ALL == -1 && jpopt.get("OUT_AREA_SUM") == 1); OUT_BUS = OUT_ALL == 1 || (OUT_ALL == -1 && jpopt.get("OUT_BUS") == 1); OUT_BRANCH = OUT_ALL == 1 || (OUT_ALL == -1 && jpopt.get("OUT_BRANCH") == 1); OUT_GEN = OUT_ALL == 1 || (OUT_ALL == -1 && jpopt.get("OUT_GEN") == 1); OUT_ANY = OUT_ANY || (OUT_ALL == -1 && (OUT_SYS_SUM || OUT_AREA_SUM || OUT_BUS || OUT_BRANCH || OUT_GEN)); if (OUT_ALL == -1) { OUT_ALL_LIM = jpopt.get("OUT_ALL_LIM").intValue(); } else if (OUT_ALL == 1) { OUT_ALL_LIM = 2; } else { OUT_ALL_LIM = 0; } OUT_ANY = OUT_ANY || OUT_ALL_LIM >= 1; if (OUT_ALL_LIM == -1) { OUT_V_LIM = jpopt.get("OUT_V_LIM").intValue(); OUT_LINE_LIM = jpopt.get("OUT_LINE_LIM").intValue(); OUT_PG_LIM = jpopt.get("OUT_PG_LIM").intValue(); OUT_QG_LIM = jpopt.get("OUT_QG_LIM").intValue(); } else { OUT_V_LIM = OUT_ALL_LIM; OUT_LINE_LIM = OUT_ALL_LIM; OUT_PG_LIM = OUT_ALL_LIM; OUT_QG_LIM = OUT_ALL_LIM; } OUT_ANY = OUT_ANY || (OUT_ALL_LIM == -1 && (OUT_V_LIM > 0 || OUT_LINE_LIM > 0 || OUT_PG_LIM > 0 || OUT_QG_LIM > 0)); OUT_RAW = jpopt.get("OUT_RAW") == 1; ptol = 1e-6; // tolerance for displaying shadow prices /* internal bus number */ i2e = bus.bus_i.copy(); e2i = IntFactory1D.sparse.make(i2e.aggregate(ifunc.max, ifunc.identity) + 1); e2i.viewSelection(i2e.toArray()).assign( irange(bus.size()) ); /* sizes */ nb = bus.size(); // number of buses nl = branch.size(); // number of branches ng = gen.size(); // number of generators /* zero out some data to make printout consistent for DC case */ if (isDC) { bus.Qd.assign(0); bus.Bs.assign(0); gen.Qg.assign(0); gen.Qmax.assign(0); gen.Qmin.assign(0); branch.br_r.assign(0); branch.br_b.assign(0); } /* parameters */ tiesm = bus.bus_area.viewSelection(e2i.viewSelection(branch.f_bus.toArray()).toArray()); tiesm.assign(bus.bus_area.viewSelection(e2i.viewSelection(branch.t_bus.toArray()).toArray()), ifunc.equals); tiesm.assign(ifunc.equals(0)); ties = tiesm.toArray(); // area inter-ties tap = DComplexFactory1D.dense.make(nl).assign(1, 0); // default tap ratio = 1 for lines xfmr = nonzero(branch.tap); // indices of transformers tap.viewSelection(xfmr).assignReal(branch.tap.viewSelection(xfmr)); // include transformer tap ratios tap.assign(polar(tap.getRealPart(), branch.shift, false)); // add phase shifters ld = intm(bus.Pd); ld.assign(intm(bus.Qd), ifunc.or); nzld = nonzero(ld); sorted_areas = IntSorting.quickSort.sort(bus.bus_area); s_areasm = sorted_areas.viewSelection(nonzero(diff(sorted_areas))).copy(); s_areas = s_areasm.toArray(); // area numbers shunt = intm(bus.Gs); shunt.assign(intm(bus.Bs), ifunc.or); nzsh = nonzero(shunt); isload = Djp_isload.isload(gen); notload = isload.copy(); notload.assign(ifunc.equals(0)); allg = nonzero(notload); gs = gen.gen_status.copy(); gs.assign(notload, ifunc.and); ong = nonzero(gs); gs = gen.gen_status.copy(); gs.assign(isload, ifunc.and); onld = nonzero(gs); V = polar(bus.Vm, bus.Va, false); bs = branch.br_status.copy(); out = nonzero( bs.assign(ifunc.equals(0)) ); // out-of-service branches nout = out.length; loss = DComplexFactory1D.dense.make(nl);; if (!isDC) { z = complex(branch.br_r, branch.br_x); loss.assign(V.viewSelection(e2i.viewSelection(branch.f_bus.toArray()).toArray())); loss.assign(tap, cfunc.div); loss.assign(V.viewSelection(e2i.viewSelection(branch.t_bus.toArray()).toArray()), cfunc.minus); loss.assign(cfunc.abs).assign(cfunc.square).assign(z, cfunc.div); loss.assign(cfunc.mult(baseMVA)); } br_b = DComplexFactory1D.dense.make(nl); br_b.assignReal(branch.br_b).assign(cfunc.mult(baseMVA)).assign(cfunc.div(2)); cfchg = DComplexFactory1D.dense.make(nl); cfchg.assign(V.viewSelection(e2i.viewSelection(branch.f_bus.toArray()).toArray())); cfchg.assign(tap, cfunc.div).assign(cfunc.abs).assign(cfunc.square).assign(br_b, cfunc.mult); fchg = cfchg.getRealPart(); ctchg = DComplexFactory1D.dense.make(nl); ctchg.assign(V.viewSelection(e2i.viewSelection(branch.t_bus.toArray()).toArray())); ctchg.assign(cfunc.abs).assign(cfunc.square).assign(br_b, cfunc.mult); tchg = ctchg.getRealPart(); loss.viewSelection(out).assign( DComplexFactory1D.dense.make(nout) ); fchg.viewSelection(out).assign( DoubleFactory1D.dense.make(nout) ); tchg.viewSelection(out).assign( DoubleFactory1D.dense.make(nout) ); /* ----- print the stuff ----- */ if (OUT_ANY) { /* convergence & elapsed time */ if (success) { pw.printf("\nConverged in %.2f seconds", et); } else { pw.printf("\nDid not converge (%.2f seconds)\n", et); } /* objective function value */ if (isOPF) pw.printf("\nObjective Function Value = %.2f $/hr", f); } if (OUT_SYS_SUM) { pw.printf("\n================================================================================"); pw.printf("\n| System Summary |"); pw.printf("\n================================================================================"); pw.printf("\n\nHow many? How much? P (MW) Q (MVAr)"); pw.printf("\n--------------------- ------------------- ------------- -----------------"); pw.printf("\nBuses %6d Total Gen Capacity %7.1f %7.1f to %.1f", nb, gen.Pmax.viewSelection(allg).zSum(), gen.Qmin.viewSelection(allg).zSum(), gen.Qmax.viewSelection(allg).zSum()); pw.printf("\nGenerators %5d On-line Capacity %7.1f %7.1f to %.1f", allg.length, gen.Pmax.viewSelection(ong).zSum(), gen.Qmin.viewSelection(ong).zSum(), gen.Qmax.viewSelection(ong).zSum()); pw.printf("\nCommitted Gens %5d Generation (actual) %7.1f %7.1f", ong.length, gen.Pg.viewSelection(ong).zSum(), gen.Qg.viewSelection(ong).zSum()); pw.printf("\nLoads %5d Load %7.1f %7.1f", nzld.length+onld.length, bus.Pd.viewSelection(nzld).zSum()-gen.Pg.viewSelection(onld).zSum(), bus.Qd.viewSelection(nzld).zSum()-gen.Qg.viewSelection(onld).zSum()); pw.printf("\n Fixed %5d Fixed %7.1f %7.1f", nzld.length, bus.Pd.viewSelection(nzld).zSum(), bus.Qd.viewSelection(nzld).zSum()); pw.printf("\n Dispatchable %5d Dispatchable %7.1f of %-7.1f%7.1f", onld.length, -gen.Pg.viewSelection(onld).zSum(), -gen.Pmin.viewSelection(onld).zSum(), -gen.Pg.viewSelection(onld).zSum()); Pinj = DoubleFactory1D.dense.make(bus.Vm.viewSelection(nzsh).toArray()); Pinj.assign(dfunc.square).assign(bus.Gs.viewSelection(nzsh), dfunc.mult); Qinj = DoubleFactory1D.dense.make(bus.Vm.viewSelection(nzsh).toArray()); Qinj.assign(dfunc.square).assign(bus.Bs.viewSelection(nzsh), dfunc.mult); pw.printf("\nShunts %5d Shunt (inj) %7.1f %7.1f", nzsh.length, -Pinj.zSum(), Qinj.zSum()); pw.printf("\nBranches %5d Losses (I^2 * Z) %8.2f %8.2f", nl, loss.getRealPart().zSum(), loss.getImaginaryPart().zSum()); pw.printf("\nTransformers %5d Branch Charging (inj) - %7.1f", xfmr.length, fchg.zSum() + tchg.zSum() ); Ptie = DoubleFactory1D.dense.make(branch.Pf.viewSelection(ties).toArray()); Ptie.assign(branch.Pt.viewSelection(ties), dfunc.minus).assign(dfunc.abs); Qtie = DoubleFactory1D.dense.make(branch.Qf.viewSelection(ties).toArray()); Qtie.assign(branch.Qt.viewSelection(ties), dfunc.minus).assign(dfunc.abs); pw.printf("\nInter-ties %5d Total Inter-tie Flow %7.1f %7.1f", ties.length, Ptie.zSum() / 2, Qtie.zSum() / 2); pw.printf("\nAreas %5d", s_areas.length); pw.printf("\n"); pw.printf("\n Minimum Maximum"); pw.printf("\n ------------------------- --------------------------------"); min = bus.Vm.getMinLocation(); max = bus.Vm.getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nVoltage Magnitude %7.3f p.u. @ bus %-4d %7.3f p.u. @ bus %-4d", min[0], bus.bus_i.get(mini), max[0], bus.bus_i.get(maxi)); min = bus.Va.getMinLocation(); max = bus.Va.getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nVoltage Angle %8.2f deg @ bus %-4d %8.2f deg @ bus %-4d", min[0], bus.bus_i.get(mini), max[0], bus.bus_i.get(maxi)); if (!isDC) { min = loss.getRealPart().getMinLocation(); max = loss.getRealPart().getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nP Losses (I^2*R) - %8.2f MW @ line %d-%d", max[0], branch.f_bus.get(maxi), branch.t_bus.get(maxi)); min = loss.getImaginaryPart().getMinLocation(); max = loss.getImaginaryPart().getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nQ Losses (I^2*X) - %8.2f MVAr @ line %d-%d", max[0], branch.f_bus.get(maxi), branch.t_bus.get(maxi)); } if (isOPF) { min = bus.lam_P.getMinLocation(); max = bus.lam_P.getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nLambda P %8.2f $/MWh @ bus %-4d %8.2f $/MWh @ bus %-4d", min[0], bus.bus_i.get(mini), max[0], bus.bus_i.get(maxi)); min = bus.lam_Q.getMinLocation(); max = bus.lam_Q.getMaxLocation(); mini = new Double(min[1]).intValue(); maxi = new Double(max[1]).intValue(); pw.printf("\nLambda Q %8.2f $/MWh @ bus %-4d %8.2f $/MWh @ bus %-4d", min[0], bus.bus_i.get(mini), max[0], bus.bus_i.get(maxi)); } pw.printf("\n"); } if (OUT_AREA_SUM) { pw.printf("\n================================================================================"); pw.printf("\n| Area Summary |"); pw.printf("\n================================================================================"); pw.printf("\nArea # of # of Gens # of Loads # of # of # of # of"); pw.printf("\n Num Buses Total Online Total Fixed Disp Shunt Brchs Xfmrs Ties"); pw.printf("\n---- ----- ----- ------ ----- ----- ----- ----- ----- ----- -----"); for (i = 0; i < s_areas.length; i++) { a = s_areas[i]; ib = nonzero(bus.bus_area.copy().assign(ifunc.equals(a))); a_gbus = bus.bus_area.viewSelection( e2i.viewSelection(gen.gen_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); ig = nonzero(a_gbus.copy().assign(notload, ifunc.and)); // ig = find(bus(e2i(gen(:, GEN_BUS)), BUS_AREA) == a & ~isload(gen)); igon = nonzero(a_gbus.copy().assign(gen.gen_status, ifunc.and).assign(notload, ifunc.and)); // igon = find(bus(e2i(gen(:, GEN_BUS)), BUS_AREA) == a & gen(:, GEN_STATUS) > 0 & ~isload(gen)); ildon = nonzero(a_gbus.copy().assign(gen.gen_status, ifunc.and).assign(isload, ifunc.and)); // ildon = find(bus(e2i(gen(:, GEN_BUS)), BUS_AREA) == a & gen(:, GEN_STATUS) > 0 & isload(gen)); a_bus = bus.bus_area.copy().assign(ifunc.equals(a)); hasload = intm( bus.Pd.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)); hasload.assign(intm( bus.Qd.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)), ifunc.or); inzld = nonzero(a_bus.copy().assign(hasload, ifunc.and)); // inzld = find(bus(:, BUS_AREA) == a & (bus(:, PD) | bus(:, QD))); hasshunt = intm( bus.Gs.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)); hasshunt.assign(intm( bus.Bs.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)), ifunc.or); inzsh = nonzero(a_bus.copy().assign(hasshunt, ifunc.and)); // inzsh = find(bus(:, BUS_AREA) == a & (bus(:, GS) | bus(:, BS))); a_fbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.f_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); a_tbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.t_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); ibrch = nonzero(a_fbus.copy().assign(a_tbus, ifunc.and)); // ibrch = find(bus(e2i(branch(:, F_BUS)), BUS_AREA) == a & bus(e2i(branch(:, T_BUS)), BUS_AREA) == a); in_tie = nonzero( a_fbus.copy().assign(a_tbus.copy().assign(ifunc.equals(0)), ifunc.and) ); // in_tie = find(bus(e2i(branch(:, F_BUS)), BUS_AREA) == a & bus(e2i(branch(:, T_BUS)), BUS_AREA) ~= a); out_tie = nonzero( a_fbus.copy().assign(ifunc.equals(0)).assign(a_tbus, ifunc.and) ); // out_tie = find(bus(e2i(branch(:, F_BUS)), BUS_AREA) ~= a & bus(e2i(branch(:, T_BUS)), BUS_AREA) == a); if (xfmr.length == 0) { nxfmr = 0; } else { a_fbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.f_bus.viewSelection(xfmr).toArray()).toArray() ).copy().assign(ifunc.equals(a)); a_tbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.t_bus.viewSelection(xfmr).toArray()).toArray() ).copy().assign(ifunc.equals(a)); nxfmr = nonzero(a_fbus.copy().assign(a_tbus, ifunc.and)).length; } pw.printf("\n%3d %6d %5d %5d %5d %5d %5d %5d %5d %5d %5d", a, ib.length, ig.length, igon.length, inzld.length + ildon.length, inzld.length, ildon.length, inzsh.length, ibrch.length, nxfmr, in_tie.length + out_tie.length); } pw.printf("\n---- ----- ----- ------ ----- ----- ----- ----- ----- ----- -----"); pw.printf("\nTot: %6d %5d %5d %5d %5d %5d %5d %5d %5d %5d", nb, allg.length, ong.length, nzld.length + onld.length, nzld.length, onld.length, nzsh.length, nl, xfmr.length, ties.length); pw.printf("\n"); pw.printf("\nArea Total Gen Capacity On-line Gen Capacity Generation"); pw.printf("\n Num MW MVAr MW MVAr MW MVAr"); pw.printf("\n---- ------ ------------------ ------ ------------------ ------ ------"); for (i = 0; i < s_areas.length; i++) { a = s_areas[i]; a_gbus = bus.bus_area.viewSelection( e2i.viewSelection(gen.gen_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); ig = nonzero(a_gbus.copy().assign(notload, ifunc.and)); igon = nonzero(a_gbus.copy().assign(gen.gen_status, ifunc.and).assign(notload, ifunc.and)); pw.printf("\n%3d %7.1f %7.1f to %-7.1f %7.1f %7.1f to %-7.1f %7.1f %7.1f", a, gen.Pmax.viewSelection(ig).zSum(), gen.Qmin.viewSelection(ig).zSum(), gen.Qmax.viewSelection(ig).zSum(), gen.Pmax.viewSelection(igon).zSum(), gen.Qmin.viewSelection(igon).zSum(), gen.Qmax.viewSelection(igon).zSum(), gen.Pg.viewSelection(igon).zSum(), gen.Qg.viewSelection(igon).zSum() ); } pw.printf("\n---- ------ ------------------ ------ ------------------ ------ ------"); pw.printf("\nTot: %7.1f %7.1f to %-7.1f %7.1f %7.1f to %-7.1f %7.1f %7.1f", gen.Pmax.viewSelection(allg).zSum(), gen.Qmin.viewSelection(allg).zSum(), gen.Qmax.viewSelection(allg).zSum(), gen.Pmax.viewSelection(ong).zSum(), gen.Qmin.viewSelection(ong).zSum(), gen.Qmax.viewSelection(ong).zSum(), gen.Pg.viewSelection(ong).zSum(), gen.Qg.viewSelection(ong).zSum() ); pw.printf("\n"); pw.printf("\nArea Disp Load Cap Disp Load Fixed Load Total Load"); pw.printf("\n Num MW MVAr MW MVAr MW MVAr MW MVAr"); pw.printf("\n---- ------ ------ ------ ------ ------ ------ ------ ------"); Qlim = gen.Qmin.copy().assign(dfunc.equals(0)).assign(gen.Qmax, dfunc.mult); Qlim.assign(gen.Qmax.copy().assign(dfunc.equals(0)).assign(gen.Qmin, dfunc.mult), dfunc.plus); // Qlim = (gen(:, QMIN) == 0) .* gen(:, QMAX) + (gen(:, QMAX) == 0) .* gen(:, QMIN); for (i = 0; i < s_areas.length; i++) { a = s_areas[i]; a_gbus = bus.bus_area.viewSelection( e2i.viewSelection(gen.gen_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); ildon = nonzero(a_gbus.copy().assign(gen.gen_status, ifunc.and).assign(isload, ifunc.and)); a_bus = bus.bus_area.copy().assign(ifunc.equals(a)); hasload = intm( bus.Pd.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)); hasload.assign(intm( bus.Qd.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)), ifunc.or); inzld = nonzero(a_bus.copy().assign(hasload, ifunc.and)); // ildon = find(bus(e2i(gen(:, GEN_BUS)), BUS_AREA) == a & gen(:, GEN_STATUS) > 0 & isload(gen)); // inzld = find(bus(:, BUS_AREA) == a & (bus(:, PD) | bus(:, QD))); pw.printf("\n%3d %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f", a, -gen.Pmin.viewSelection(ildon).zSum(), -Qlim.viewSelection(ildon).zSum(), -gen.Pg.viewSelection(ildon).zSum(), -gen.Qg.viewSelection(ildon).zSum(), bus.Pd.viewSelection(inzld).zSum(), bus.Qd.viewSelection(inzld).zSum(), -gen.Pg.viewSelection(ildon).zSum() + bus.Pd.viewSelection(inzld).zSum(), -gen.Qg.viewSelection(ildon).zSum() + bus.Qd.viewSelection(inzld).zSum() ); } pw.printf("\n---- ------ ------ ------ ------ ------ ------ ------ ------"); pw.printf("\nTot: %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f", -gen.Pmin.viewSelection(onld).zSum(), -Qlim.viewSelection(onld).zSum(), -gen.Pg.viewSelection(onld).zSum(), -gen.Qg.viewSelection(onld).zSum(), bus.Pd.viewSelection(nzld).zSum(), bus.Qd.viewSelection(nzld).zSum(), -gen.Pg.viewSelection(onld).zSum() + bus.Pd.viewSelection(nzld).zSum(), -gen.Qg.viewSelection(onld).zSum() + bus.Qd.viewSelection(nzld).zSum() ); pw.printf("\n"); pw.printf("\nArea Shunt Inj Branch Series Losses Net Export"); pw.printf("\n Num MW MVAr Charging MW MVAr MW MVAr"); pw.printf("\n---- ------ ------ -------- ------ ------ ------ ------"); for (i = 0; i < s_areas.length; i++) { a = s_areas[i]; a_bus = bus.bus_area.copy().assign(ifunc.equals(a)); hasshunt = intm( bus.Gs.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)); hasshunt.assign(intm( bus.Bs.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)), ifunc.or); inzsh = nonzero(a_bus.copy().assign(hasshunt, ifunc.and)); a_fbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.f_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); a_tbus = bus.bus_area.viewSelection( e2i.viewSelection(branch.t_bus.toArray()).toArray() ).copy().assign(ifunc.equals(a)); ibrch = nonzero( a_fbus.copy().assign(a_tbus, ifunc.and).assign(branch.br_status, ifunc.and) ); in_tie = nonzero( a_fbus.copy().assign(a_tbus.copy().assign(ifunc.equals(0)), ifunc.and).assign(branch.br_status, ifunc.and) ); out_tie = nonzero( a_fbus.copy().assign(ifunc.equals(0)).assign(a_tbus, ifunc.and).assign(branch.br_status, ifunc.and) ); pw.printf("\n%3d %7.1f %7.1f %7.1f %7.2f %7.2f %7.1f %7.1f", a, -bus.Vm.viewSelection(inzsh).copy().assign(dfunc.square).assign(bus.Gs.viewSelection(inzsh), dfunc.mult).zSum(), bus.Vm.viewSelection(inzsh).copy().assign(dfunc.square).assign(bus.Bs.viewSelection(inzsh), dfunc.mult).zSum(), fchg.viewSelection(ibrch).zSum() + tchg.viewSelection(ibrch).zSum() + fchg.viewSelection(out_tie).zSum() + tchg.viewSelection(in_tie).zSum(), loss.viewSelection(ibrch).getRealPart().zSum() + loss.viewSelection(icat(in_tie, out_tie)).getRealPart().zSum() / 2, loss.viewSelection(ibrch).getImaginaryPart().zSum() + loss.viewSelection(icat(in_tie, out_tie)).getImaginaryPart().zSum() / 2, branch.Pt.viewSelection(in_tie).zSum()+branch.Pf.viewSelection(out_tie).zSum() - loss.viewSelection(icat(in_tie, out_tie)).getRealPart().zSum() / 2, branch.Qt.viewSelection(in_tie).zSum()+branch.Qf.viewSelection(out_tie).zSum() - loss.viewSelection(icat(in_tie, out_tie)).getImaginaryPart().zSum() / 2 ); } pw.printf("\n---- ------ ------ -------- ------ ------ ------ ------"); pw.printf("\nTot: %7.1f %7.1f %7.1f %7.2f %7.2f - -", -bus.Vm.viewSelection(nzsh).assign(dfunc.square).assign(bus.Gs.viewSelection(nzsh), dfunc.mult).zSum(), bus.Vm.viewSelection(nzsh).assign(dfunc.square).assign(bus.Bs.viewSelection(nzsh), dfunc.mult).zSum(), fchg.zSum() + tchg.zSum(), loss.getRealPart().zSum(), loss.getImaginaryPart().zSum() ); pw.printf("\n"); } /* generator data */ if (OUT_GEN) { if (isOPF) { genlamP = bus.lam_P.viewSelection((e2i.viewSelection(gen.gen_bus.toArray())).toArray()); genlamQ = bus.lam_Q.viewSelection((e2i.viewSelection(gen.gen_bus.toArray())).toArray()); } pw.printf("\n================================================================================"); pw.printf("\n| Generator Data |"); pw.printf("\n================================================================================"); pw.printf("\n Gen Bus Status Pg Qg "); if (isOPF) { pw.printf(" Lambda ($/MVA-hr)"); } pw.printf("\n # # (MW) (MVAr) "); if (isOPF) { pw.printf(" P Q "); } pw.printf("\n---- ----- ------ -------- --------"); if (isOPF) { pw.printf(" -------- --------"); } for (k = 0; k < ong.length; k++) { i = ong[k]; pw.printf("\n%3d %6d %2d ", i, gen.gen_bus.get(i), gen.gen_status.get(i)); if (gen.gen_status.get(i) > 0 && (gen.Pg.get(i) > 0 || gen.Qg.get(i) > 0)) { pw.printf("%10.2f%10.2f", gen.Pg.get(i), gen.Qg.get(i)); } else { pw.printf(" - - "); } if (isOPF) { pw.printf("%10.2f%10.2f", genlamP.get(i), genlamQ.get(i)); } } pw.printf("\n -------- --------"); pw.printf("\n Total: %9.2f%10.2f", gen.Pg.viewSelection(ong).zSum(), gen.Qg.viewSelection(ong).zSum()); pw.printf("\n"); if (onld.length > 1) { pw.printf("\n================================================================================"); pw.printf("\n| Dispatchable Load Data |"); pw.printf("\n================================================================================"); pw.printf("\n Gen Bus Status Pd Qd "); if (isOPF) { pw.printf(" Lambda ($/MVA-hr)"); } pw.printf("\n # # (MW) (MVAr) "); if (isOPF) { pw.printf(" P Q "); } pw.printf("\n---- ----- ------ -------- --------"); if (isOPF) { pw.printf(" -------- --------"); } for (k = 0; k < onld.length; k++) { i = onld[k]; pw.printf("\n%3d %6d %2d ", i, gen.gen_bus.get(i), gen.gen_status.get(i)); if (gen.gen_status.get(i) > 0 && (gen.Pg.get(i) > 0 || gen.Qg.get(i) > 0)) { pw.printf("%10.2f%10.2f", -gen.Pg.get(i), -gen.Qg.get(i)); } else { pw.printf(" - - "); } if (isOPF) { pw.printf("%10.2f%10.2f", genlamP.get(i), genlamQ.get(i)); } } pw.printf("\n -------- --------"); pw.printf("\n Total: %9.2f%10.2f", -gen.Pg.viewSelection(onld).zSum(), -gen.Qg.viewSelection(onld).zSum()); pw.printf("\n"); } } /* bus data */ if (OUT_BUS) { pw.printf("\n================================================================================"); pw.printf("\n| Bus Data |"); pw.printf("\n================================================================================"); pw.printf("\n Bus Voltage Generation Load "); if (isOPF) { pw.printf(" Lambda($/MVA-hr)"); } pw.printf("\n # Mag(pu) Ang(deg) P (MW) Q (MVAr) P (MW) Q (MVAr)"); if (isOPF) { pw.printf(" P Q "); } pw.printf("\n----- ------- -------- -------- -------- -------- --------"); if (isOPF) { pw.printf(" ------- -------"); } for (i = 0; i < nb; i++) { pw.printf("\n%5d%7.3f%9.3f", bus.bus_i.get(i), bus.Vm.get(i), bus.Va.get(i)); _g = gen.gen_bus.copy().assign(ifunc.equals(bus.bus_i.get(i))); g = _g.assign(gen.gen_status, ifunc.and).assign(notload, ifunc.and).toArray(); _vg = gen.gen_bus.copy().assign(ifunc.equals(bus.bus_i.get(i))); vg = _vg.assign(gen.gen_status, ifunc.and).assign(isload, ifunc.and).toArray(); if (g.length > 0) { pw.printf("%10.2f%10.2f", gen.Pg.viewSelection(g).zSum(), gen.Qg.viewSelection(g).zSum()); } else { pw.printf(" - - "); } if (bus.Pd.get(i) > 0 || bus.Qd.get(i) > 0 || vg.length > 0) { if (vg.length > 0) { pw.printf("%10.2f*%9.2f*", bus.Pd.get(i) - gen.Pg.viewSelection(vg).zSum(), bus.Qd.get(i) - gen.Qg.viewSelection(vg).zSum()); } else { pw.printf("%10.2f%10.2f ", bus.Pd.get(i), bus.Qd.get(i)); } } else { pw.printf(" - - "); } if (isOPF) { pw.printf("%9.3f", bus.lam_P.get(i)); if (dfunc.abs.apply(bus.lam_Q.get(i)) > ptol) { pw.printf("%8.3f", bus.lam_Q.get(i)); } else { pw.printf(" -"); } } } pw.printf("\n -------- -------- -------- --------"); pw.printf("\n Total: %9.2f %9.2f %9.2f %9.2f", gen.Pg.viewSelection(ong).zSum(), gen.Qg.viewSelection(ong).zSum(), bus.Pd.viewSelection(nzld).zSum() - gen.Pg.viewSelection(onld).zSum(), bus.Qd.viewSelection(nzld).zSum() - gen.Qg.viewSelection(onld).zSum() ); pw.printf("\n"); } /* branch data */ if (OUT_BRANCH) { pw.printf("\n================================================================================"); pw.printf("\n| Branch Data |"); pw.printf("\n================================================================================"); pw.printf("\nBrnch From To From Bus Injection To Bus Injection Loss (I^2 * Z) "); pw.printf("\n # Bus Bus P (MW) Q (MVAr) P (MW) Q (MVAr) P (MW) Q (MVAr)"); pw.printf("\n----- ----- ----- -------- -------- -------- -------- -------- --------"); for (i = 0; i < nl; i++) { pw.printf("\n%4d%7d%7d%10.2f%10.2f%10.2f%10.2f%10.3f%10.2f", i, branch.f_bus.get(i), branch.t_bus.get(i), branch.Pf.get(i), branch.Qf.get(i), branch.Pt.get(i), branch.Qt.get(i), loss.getRealPart().get(i), loss.getImaginaryPart().get(i) ); } pw.printf("\n -------- --------"); pw.printf("\n Total:%10.3f%10.2f", loss.getRealPart().zSum(), loss.getImaginaryPart().zSum()); pw.printf("\n"); } /* ----- constraint data ----- */ if (isOPF) { double ctol = jpopt.get("OPF_VIOLATION"); // constraint violation tolerance // voltage constraints if (!isDC && ( OUT_V_LIM == 2 || (OUT_V_LIM == 1 && (any( bus.Vm.copy().assign(bus.Vmin.assign(dfunc.plus(ctol)), dfunc.less) ) || any( bus.Vm.copy().assign(bus.Vmax.assign(dfunc.minus(ctol)), dfunc.greater) ) || any( bus.mu_Vmin.copy().assign(dfunc.greater(ptol)) ) || any( bus.mu_Vmax.copy().assign(dfunc.greater(ptol)) )))) ) { pw.printf("\n================================================================================"); pw.printf("\n| Voltage Constraints |"); pw.printf("\n================================================================================"); pw.printf("\nBus # Vmin mu Vmin |V| Vmax Vmax mu"); pw.printf("\n----- -------- ----- ----- ----- --------"); for (i = 0; i < nb; i++) { if (OUT_V_LIM == 2 || (OUT_V_LIM == 1 && (bus.Vm.get(i) < bus.Vmin.get(i) + ctol || bus.Vm.get(i) > bus.Vmax.get(i) - ctol || bus.mu_Vmin.get(i) > ptol || bus.mu_Vmax.get(i) > ptol)) ) { pw.printf("\n%5d", bus.bus_i.get(i)); if (bus.Vm.get(i) < bus.Vmin.get(i) + ctol || bus.mu_Vmin.get(i) > ptol) { pw.printf("%10.3f", bus.mu_Vmin.get(i)); } else { pw.printf(" - "); } pw.printf("%8.3f%7.3f%7.3f", bus.Vmin.get(i), bus.Vm.get(i), bus.Vmax.get(i)); if (bus.Vm.get(i) > bus.Vmax.get(i) - ctol || bus.mu_Vmax.get(i) > ptol) { pw.printf("%10.3f", bus.mu_Vmax.get(i)); } else { pw.printf(" - "); } } } pw.printf("\n"); } /* generator constraints */ anyP = ( any( gen.Pg.viewSelection(ong).copy().assign(gen.Pmin.viewSelection(ong).assign(dfunc.plus(ctol)) , dfunc.less) ) || any( gen.Pg.viewSelection(ong).copy().assign(gen.Pmax.viewSelection(ong).assign(dfunc.minus(ctol)), dfunc.less) ) ); if (gen.mu_Pmin != null) // FIXME Should add zeros for result fields when loading case anyP = anyP || any( gen.mu_Pmin.viewSelection(ong).assign(dfunc.greater(ptol)) ); if (gen.mu_Pmax != null) anyP = anyP || any( gen.mu_Pmax.viewSelection(ong).assign(dfunc.greater(ptol)) ); anyQ = ( any( gen.Qg.viewSelection(ong).copy().assign(gen.Qmin.viewSelection(ong).assign(dfunc.plus(ctol)) , dfunc.less) ) || any( gen.Qg.viewSelection(ong).copy().assign(gen.Qmax.viewSelection(ong).assign(dfunc.minus(ctol)), dfunc.less) ) ); if (gen.mu_Qmin != null) anyQ = anyQ || any( gen.mu_Qmin.viewSelection(ong).assign(dfunc.greater(ptol)) ); if (gen.mu_Qmax != null) anyQ = anyQ || any( gen.mu_Qmax.viewSelection(ong).assign(dfunc.greater(ptol)) ); if (OUT_PG_LIM == 2 || (OUT_PG_LIM == 1 && anyP) || ( !isDC && (OUT_QG_LIM == 2 || (OUT_QG_LIM == 1 && anyQ))) ) { pw.printf("\n================================================================================"); pw.printf("\n| Generation Constraints |"); pw.printf("\n================================================================================"); } /* generator P constraints */ if (OUT_PG_LIM == 2 || (OUT_PG_LIM == 1 && anyP)) { pw.printf("\n Gen Bus Active Power Limits"); pw.printf("\n # # Pmin mu Pmin Pg Pmax Pmax mu"); pw.printf("\n---- ----- ------- -------- -------- -------- -------"); for (k = 0; k < ong.length; k++) { i = ong[k]; if (OUT_PG_LIM == 2 || (OUT_PG_LIM == 1 && (gen.Pg.get(i) < gen.Pmin.get(i) + ctol || gen.Pg.get(i) > gen.Pmax.get(i) - ctol || gen.mu_Pmin.get(i) > ptol || gen.mu_Pmax.get(i) > ptol))) { pw.printf("\n%4d%6d ", i, gen.gen_bus.get(i)); if (gen.Pg.get(i) < gen.Pmin.get(i) + ctol || gen.mu_Pmin.get(i) > ptol) { pw.printf("%8.3f", gen.mu_Pmin.get(i)); } else { pw.printf(" - "); } if (gen.Pg.get(i) > 0) { pw.printf("%10.2f%10.2f%10.2f", gen.Pmin.get(i), gen.Pg.get(i), gen.Pmax.get(i)); } else { pw.printf("%10.2f - %10.2f", gen.Pmin.get(i), gen.Pmax.get(i)); } if (gen.Pg.get(i) > gen.Pmax.get(i) - ctol || gen.mu_Pmax.get(i) > ptol) { pw.printf("%9.3f", gen.mu_Pmax.get(i)); } else { pw.printf(" - "); } } } pw.printf("\n"); } /* generator Q constraints */ if (!isDC && (OUT_QG_LIM == 2 || (OUT_QG_LIM == 1 && anyQ))) { pw.printf("\nGen Bus Reactive Power Limits"); pw.printf("\n # # Qmin mu Qmin Qg Qmax Qmax mu"); pw.printf("\n--- --- ------- -------- -------- -------- -------"); for (k = 0; k < ong.length; k++) { i = ong[k]; if (OUT_QG_LIM == 2 || (OUT_QG_LIM == 1 && (gen.Qg.get(i) < gen.Qmin.get(i) + ctol || gen.Qg.get(i) > gen.Qmax.get(i) - ctol || gen.mu_Qmin.get(i) > ptol || gen.mu_Qmax.get(i) > ptol))) { pw.printf("\n%3d%5d", i, gen.gen_bus.get(i)); if (gen.Qg.get(i) < gen.Qmin.get(i) + ctol || gen.mu_Qmin.get(i) > ptol) { pw.printf("%8.3f", gen.mu_Qmin.get(i)); } else { pw.printf(" - "); } if (gen.Qg.get(i) > 0) { pw.printf("%10.2f%10.2f%10.2f", gen.Qmin.get(i), gen.Qg.get(i), gen.Qmax.get(i)); } else { pw.printf("%10.2f - %10.2f", gen.Qmin.get(i), gen.Qmax.get(i)); } if (gen.Qg.get(i) > gen.Qmax.get(i) - ctol || gen.mu_Qmax.get(i) > ptol) { pw.printf("%9.3f", gen.mu_Qmax.get(i)); } else { pw.printf(" - "); } } } pw.printf("\n"); } /* dispatchable load constraints */ anyP_ld = ( any( gen.Pg.viewSelection(onld).copy().assign(gen.Pmin.viewSelection(onld).assign(dfunc.plus(ctol)) , dfunc.less) ) || any( gen.Pg.viewSelection(onld).copy().assign(gen.Pmax.viewSelection(onld).assign(dfunc.minus(ctol)), dfunc.less) ) ); if (gen.mu_Pmin != null) // FIXME Should add zeros for result fields when loading case anyP_ld = anyP_ld || any( gen.mu_Pmin.viewSelection(onld).assign(dfunc.greater(ptol)) ); if (gen.mu_Pmax != null) anyP_ld = anyP_ld || any( gen.mu_Pmax.viewSelection(onld).assign(dfunc.greater(ptol)) ); anyQ_ld = ( any( gen.Qg.viewSelection(onld).copy().assign(gen.Qmin.viewSelection(onld).assign(dfunc.plus(ctol)) , dfunc.less) ) || any( gen.Qg.viewSelection(onld).copy().assign(gen.Qmax.viewSelection(onld).assign(dfunc.minus(ctol)), dfunc.less) ) ); if (gen.mu_Qmin != null) anyQ_ld = anyQ_ld || any( gen.mu_Qmin.viewSelection(onld).assign(dfunc.greater(ptol)) ); if (gen.mu_Qmax != null) anyQ_ld = anyQ_ld || any( gen.mu_Qmax.viewSelection(onld).assign(dfunc.greater(ptol)) ); if (OUT_PG_LIM == 2 || OUT_QG_LIM == 2 || (OUT_PG_LIM == 1 && anyP_ld) || (OUT_QG_LIM == 1 && (anyQ_ld))) { pw.printf("\n================================================================================"); pw.printf("\n| Dispatchable Load Constraints |"); pw.printf("\n================================================================================"); } /* dispatchable load P constraints */ if (OUT_PG_LIM == 2 || (OUT_PG_LIM == 1 && anyP_ld)) { pw.printf("\nGen Bus Active Power Limits"); pw.printf("\n # # Pmin mu Pmin Pg Pmax Pmax mu"); pw.printf("\n--- --- ------- -------- -------- -------- -------"); for (k = 0; k < onld.length; k++) { i = onld[k]; if (OUT_PG_LIM == 2 || (OUT_PG_LIM == 1 && (gen.Pg.get(i) < gen.Pmin.get(i) + ctol || gen.Pg.get(i) > gen.Pmax.get(i) - ctol || gen.mu_Pmin.get(i) > ptol || gen.mu_Pmax.get(i) > ptol))) { pw.printf("\n%3d%5d", i, gen.gen_bus.get(i)); if (gen.Pg.get(i) < gen.Pmin.get(i) + ctol || gen.mu_Pmin.get(i) > ptol) { pw.printf("%8.3f", gen.mu_Pmin.get(i)); } else { pw.printf(" - "); } if (gen.Pg.get(i) > 0) { pw.printf("%10.2f%10.2f%10.2f", gen.Pmin.get(i), gen.Pg.get(i), gen.Pmax.get(i)); } else { pw.printf("%10.2f - %10.2f", gen.Pmin.get(i), gen.Pmax.get(i)); } if (gen.Pg.get(i) > gen.Pmax.get(i) - ctol || gen.mu_Pmax.get(i) > ptol) { pw.printf("%9.3f", gen.mu_Pmax.get(i)); } else { pw.printf(" - "); } } } pw.printf("\n"); } /* dispatchable load Q constraints */ if (!isDC && (OUT_QG_LIM == 2 || (OUT_QG_LIM == 1 && anyQ_ld))) { pw.printf("\nGen Bus Reactive Power Limits"); pw.printf("\n # # Qmin mu Qmin Qg Qmax Qmax mu"); pw.printf("\n--- --- ------- -------- -------- -------- -------"); for (k = 0; k < onld.length; k++) { i = onld[k]; if (OUT_QG_LIM == 2 || (OUT_QG_LIM == 1 && (gen.Qg.get(i) < gen.Qmin.get(i) + ctol || gen.Qg.get(i) > gen.Qmax.get(i) - ctol || gen.mu_Qmin.get(i) > ptol || gen.mu_Qmax.get(i) > ptol))) { pw.printf("\n%3d%5d", i, gen.gen_bus.get(i)); if (gen.Qg.get(i) < gen.Qmin.get(i) + ctol || gen.mu_Qmin.get(i) > ptol) { pw.printf("%8.3f", gen.mu_Qmin.get(i)); } else { pw.printf(" - "); } if (gen.Qg.get(i) > 0) { pw.printf("%10.2f%10.2f%10.2f", gen.Qmin.get(i), gen.Qg.get(i), gen.Qmax.get(i)); } else { pw.printf("%10.2f - %10.2f", gen.Qmin.get(i), gen.Qmax.get(i)); } if (gen.Qg.get(i) > gen.Qmax.get(i) - ctol || gen.mu_Qmax.get(i) > ptol) { pw.printf("%9.3f", gen.mu_Qmax.get(i)); } else { pw.printf(" - "); } } } pw.printf("\n"); } /* line flow constraints */ if (jpopt.get("OPF_FLOW_LIM") == 1 || isDC) { // P limit Ff = branch.Pf.copy(); Ft = branch.Pt.copy(); str = "\n # Bus Pf mu Pf |Pmax| Pt Pt mu Bus"; } else if (jpopt.get("OPF_FLOW_LIM") == 2) { // |I| limit Sf = complex(branch.Pf, branch.Qf); St = complex(branch.Pt, branch.Qt); Sf.assign(V.viewSelection(e2i.viewSelection(branch.f_bus.toArray()).toArray()), cfunc.div); St.assign(V.viewSelection(e2i.viewSelection(branch.t_bus.toArray()).toArray()), cfunc.div); Ff = Sf.assign(cfunc.abs).getRealPart(); Ft = St.assign(cfunc.abs).getRealPart(); str = "\n # Bus |If| mu |If| |Imax| |It| |It| mu Bus"; } else { // |S| limit Sf = complex(branch.Pf, branch.Qf); St = complex(branch.Pt, branch.Qt); Ff = Sf.assign(cfunc.abs).getRealPart(); Ft = St.assign(cfunc.abs).getRealPart(); str = "\n # Bus |Sf| mu |Sf| |Smax| |St| |St| mu Bus"; } rated = intm( branch.rate_a.copy().assign(dfunc.equals(0)) ).assign(ifunc.equals(0)); F_tol = branch.rate_a.copy().assign(dfunc.minus(ctol)); Uf = intm(Ff.copy().assign(dfunc.abs).assign(F_tol, dfunc.greater)); // constrained from Ut = intm(Ft.copy().assign(dfunc.abs).assign(F_tol, dfunc.greater)); // constrained to anyF = (any( rated.copy().assign(Uf, ifunc.and) ) || any( rated.copy().assign(Ut, ifunc.and) )); if (branch.mu_Sf != null) // FIXME Should add zeros for result fields when loading case anyF = anyF || any( branch.mu_Sf.copy().assign(dfunc.greater(ptol)) ); if (branch.mu_St != null) anyF = anyF || any( branch.mu_St.copy().assign(dfunc.greater(ptol)) ); if (OUT_LINE_LIM == 2 || (OUT_LINE_LIM == 1 && anyF)) { pw.printf("\n================================================================================"); pw.printf("\n| Branch Flow Constraints |"); pw.printf("\n================================================================================"); pw.printf("\nBrnch From \"From\" End Limit \"To\" End To"); pw.printf(str); pw.printf("\n----- ----- ------- -------- -------- -------- ------- -----"); for (i = 0; i < nl; i++) { if (OUT_LINE_LIM == 2 || (OUT_LINE_LIM == 1 && ((branch.rate_a.get(i) != 0 && dfunc.abs.apply(Ff.get(i)) > branch.rate_a.get(i) - ctol) || (branch.rate_a.get(i) != 0 && dfunc.abs.apply(Ft.get(i)) > branch.rate_a.get(i) - ctol) || branch.mu_Sf.get(i) > ptol || branch.mu_St.get(i) > ptol))) { pw.printf("\n%4d%7d", i, branch.f_bus.get(i)); if (Ff.get(i) > branch.rate_a.get(i) - ctol || branch.mu_Sf.get(i) > ptol) { pw.printf("%10.3f", branch.mu_Sf.get(i)); } else { pw.printf(" - "); } pw.printf("%9.2f%10.2f%10.2f", Ff.get(i), branch.rate_a.get(i), Ft.get(i)); if (Ft.get(i) > branch.rate_a.get(i) - ctol || branch.mu_St.get(i) > ptol) { pw.printf("%10.3f", branch.mu_St.get(i)); } else { pw.printf(" - "); } pw.printf("%6d", branch.t_bus.get(i)); } } pw.printf("\n"); } } // TODO: execute userfcn callbacks for 'printpf' stage } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.execute; import static org.junit.Assert.assertNotNull; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Properties; import java.util.Set; import org.junit.Test; import org.apache.geode.DataSerializable; import org.apache.geode.DataSerializer; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.EntryOperation; import org.apache.geode.cache.PartitionAttributes; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.PartitionResolver; import org.apache.geode.cache.Region; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.partitioned.RegionAdvisor; import org.apache.geode.test.awaitility.GeodeAwaitility; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.SerializableRunnable; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.dunit.WaitCriterion; import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase; public class ColocationFailoverDUnitTest extends JUnit4DistributedTestCase { private static final long serialVersionUID = 1L; protected static Cache cache = null; protected static VM dataStore1 = null; protected static VM dataStore2 = null; protected static VM dataStore3 = null; protected static VM dataStore4 = null; protected static Region customerPR = null; protected static Region orderPR = null; protected static Region shipmentPR = null; public static String customerPR_Name = "ColocationFailoverDUnitTest_CustomerPR"; public static String orderPR_Name = "ColocationFailoverDUnitTest_OrderPR"; public static String shipmentPR_Name = "ColocationFailoverDUnitTest_ShipmentPR"; @Override public final void postSetUp() throws Exception { Host host = Host.getHost(0); dataStore1 = host.getVM(0); dataStore2 = host.getVM(1); dataStore3 = host.getVM(2); dataStore4 = host.getVM(3); } @Test public void testPrimaryColocationFailover() throws Throwable { createCacheInAllVms(); createCustomerPR(); createOrderPR(); createShipmentPR(); putInPRs(); verifyColocationInAllVms(); dataStore1.invoke(ColocationFailoverDUnitTest::closeCache); verifyPrimaryColocationAfterFailover(); } @Test public void testColocationFailover() throws Throwable { createCacheInAllVms(); createCustomerPR(); createOrderPR(); createShipmentPR(); putInPRs(); verifyColocationInAllVms(); dataStore1.invoke(ColocationFailoverDUnitTest::closeCache); Wait.pause(5000); // wait for volunteering primary verifyColocationAfterFailover(); } private void verifyColocationInAllVms() { verifyColocation(); dataStore1.invoke(ColocationFailoverDUnitTest::verifyColocation); dataStore2.invoke(ColocationFailoverDUnitTest::verifyColocation); dataStore3.invoke(ColocationFailoverDUnitTest::verifyColocation); dataStore4.invoke(ColocationFailoverDUnitTest::verifyColocation); } private void verifyPrimaryColocationAfterFailover() { verifyPrimaryColocation(); dataStore2.invoke(ColocationFailoverDUnitTest::verifyPrimaryColocation); dataStore3.invoke(ColocationFailoverDUnitTest::verifyPrimaryColocation); dataStore4.invoke(ColocationFailoverDUnitTest::verifyPrimaryColocation); } private void verifyColocationAfterFailover() { verifyColocation(); dataStore2.invoke(ColocationFailoverDUnitTest::verifyColocation); dataStore3.invoke(ColocationFailoverDUnitTest::verifyColocation); dataStore4.invoke(ColocationFailoverDUnitTest::verifyColocation); } public static void closeCache() { if (cache != null) { cache.close(); } } protected static boolean tryVerifyPrimaryColocation() { HashMap customerPrimaryMap = new HashMap(); RegionAdvisor customeAdvisor = ((PartitionedRegion) customerPR).getRegionAdvisor(); for (final Integer bucketId : customeAdvisor.getBucketSet()) { if (customeAdvisor.isPrimaryForBucket(bucketId)) { customerPrimaryMap.put(bucketId, customeAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } HashMap orderPrimaryMap = new HashMap(); RegionAdvisor orderAdvisor = ((PartitionedRegion) orderPR).getRegionAdvisor(); for (final Integer bucketId : orderAdvisor.getBucketSet()) { if (orderAdvisor.isPrimaryForBucket(bucketId)) { orderPrimaryMap.put(bucketId, orderAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } HashMap shipmentPrimaryMap = new HashMap(); RegionAdvisor shipmentAdvisor = ((PartitionedRegion) shipmentPR).getRegionAdvisor(); for (final Integer bucketId : shipmentAdvisor.getBucketSet()) { if (shipmentAdvisor.isPrimaryForBucket(bucketId)) { shipmentPrimaryMap.put(bucketId, shipmentAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } // verification for primary int s1, s2; s1 = customerPrimaryMap.size(); s2 = orderPrimaryMap.size(); if (s1 != s2) { excuse = "customerPrimaryMap size (" + s1 + ") != orderPrimaryMap size (" + s2 + ")"; return false; } if (!customerPrimaryMap.entrySet().equals(orderPrimaryMap.entrySet())) { excuse = "customerPrimaryMap entrySet != orderPrimaryMap entrySet"; return false; } if (!customerPrimaryMap.entrySet().equals(shipmentPrimaryMap.entrySet())) { excuse = "customerPrimaryMap entrySet != shipmentPrimaryMap entrySet"; return false; } if (!customerPrimaryMap.equals(orderPrimaryMap)) { excuse = "customerPrimaryMap != orderPrimaryMap"; return false; } if (!customerPrimaryMap.equals(shipmentPrimaryMap)) { excuse = "customerPrimaryMap != shipmentPrimaryMap"; return false; } return true; } private static void verifyPrimaryColocation() { WaitCriterion wc = new WaitCriterion() { @Override public boolean done() { return tryVerifyPrimaryColocation(); } @Override public String description() { dump(); return excuse; } }; GeodeAwaitility.await().untilAsserted(wc); } protected static void dump() { ((PartitionedRegion) customerPR).dumpAllBuckets(false); ((PartitionedRegion) orderPR).dumpAllBuckets(false); ((PartitionedRegion) shipmentPR).dumpAllBuckets(false); for (int i = 0; i < 6; i++) { ((PartitionedRegion) customerPR).dumpB2NForBucket(i); } for (int i = 0; i < 6; i++) { ((PartitionedRegion) orderPR).dumpB2NForBucket(i); } for (int i = 0; i < 6; i++) { ((PartitionedRegion) shipmentPR).dumpB2NForBucket(i); } } protected static String excuse; /** * @return true if verified */ protected static boolean tryVerifyColocation() { HashMap customerMap = new HashMap(); HashMap customerPrimaryMap = new HashMap(); RegionAdvisor customeAdvisor = ((PartitionedRegion) customerPR).getRegionAdvisor(); for (final Integer bucketId : customeAdvisor.getBucketSet()) { Set someOwners = customeAdvisor.getBucketOwners(bucketId); customerMap.put(bucketId, someOwners); if (customeAdvisor.isPrimaryForBucket(bucketId)) { customerPrimaryMap.put(bucketId, customeAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } HashMap orderMap = new HashMap(); HashMap orderPrimaryMap = new HashMap(); RegionAdvisor orderAdvisor = ((PartitionedRegion) orderPR).getRegionAdvisor(); for (final Integer bucketId : orderAdvisor.getBucketSet()) { Set someOwners = orderAdvisor.getBucketOwners(bucketId); orderMap.put(bucketId, someOwners); if (orderAdvisor.isPrimaryForBucket(bucketId)) { orderPrimaryMap.put(bucketId, orderAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } HashMap shipmentMap = new HashMap(); HashMap shipmentPrimaryMap = new HashMap(); RegionAdvisor shipmentAdvisor = ((PartitionedRegion) shipmentPR).getRegionAdvisor(); for (final Integer bucketId : shipmentAdvisor.getBucketSet()) { Set someOwners = shipmentAdvisor.getBucketOwners(bucketId); shipmentMap.put(bucketId, someOwners); if (!customerMap.get(bucketId).equals(someOwners)) { excuse = "customerMap at " + bucketId + " has wrong owners"; return false; } if (!orderMap.get(bucketId).equals(someOwners)) { excuse = "orderMap at " + bucketId + " has wrong owners"; return false; } if (shipmentAdvisor.isPrimaryForBucket(bucketId)) { shipmentPrimaryMap.put(bucketId, shipmentAdvisor.getPrimaryMemberForBucket(bucketId).getId()); } } // verification for primary if (customerPrimaryMap.size() != orderPrimaryMap.size()) { excuse = "customerPrimaryMap and orderPrimaryMap have different sizes"; return false; } if (customerPrimaryMap.size() != shipmentPrimaryMap.size()) { excuse = "customerPrimaryMap and shipmentPrimaryMap have different sizes"; return false; } if (!customerPrimaryMap.entrySet().equals(orderPrimaryMap.entrySet())) { excuse = "customerPrimaryMap and orderPrimaryMap have different entrySets"; return false; } if (!customerPrimaryMap.entrySet().equals(shipmentPrimaryMap.entrySet())) { excuse = "customerPrimaryMap and shipmentPrimaryMap have different entrySets"; return false; } if (!customerPrimaryMap.equals(orderPrimaryMap)) { excuse = "customerPrimaryMap and orderPrimaryMap not equal"; return false; } if (!customerPrimaryMap.equals(shipmentPrimaryMap)) { excuse = "customerPrimaryMap and shipmentPrimaryMap not equal"; return false; } // verification for all if (customerMap.size() != orderMap.size()) { excuse = "customerMap and orderMap have different sizes"; return false; } if (customerMap.size() != shipmentMap.size()) { excuse = "customerMap and shipmentMap have different sizes"; return false; } if (!customerMap.entrySet().equals(orderMap.entrySet())) { excuse = "customerMap and orderMap have different entrySets"; return false; } if (!customerMap.entrySet().equals(shipmentMap.entrySet())) { excuse = "customerMap and shipmentMap have different entrySets"; return false; } if (!customerMap.equals(orderMap)) { excuse = "customerMap and orderMap not equal"; return false; } if (!customerMap.equals(shipmentMap)) { excuse = "customerMap and shipmentMap not equal"; return false; } return true; } private static void verifyColocation() { // TODO does having this WaitCriterion help? WaitCriterion wc = new WaitCriterion() { @Override public boolean done() { return tryVerifyColocation(); } @Override public String description() { return excuse; } }; GeodeAwaitility.await().untilAsserted(wc); } public static void createCacheInAllVms() { createCacheInVm(); dataStore1.invoke(ColocationFailoverDUnitTest::createCacheInVm); dataStore2.invoke(ColocationFailoverDUnitTest::createCacheInVm); dataStore3.invoke(ColocationFailoverDUnitTest::createCacheInVm); dataStore4.invoke(ColocationFailoverDUnitTest::createCacheInVm); } public static void createCacheInVm() { new ColocationFailoverDUnitTest().createCache(); } public void createCache() { try { Properties props = new Properties(); DistributedSystem ds = getSystem(props); assertNotNull(ds); ds.disconnect(); ds = getSystem(props); cache = CacheFactory.create(ds); assertNotNull(cache); } catch (Exception e) { Assert.fail("Failed while creating the cache", e); } } private static void createCustomerPR() { Object[] args = new Object[] {customerPR_Name, 1, 50, 6, null}; createPR(customerPR_Name, 1, 50, 6, null); dataStore1.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore2.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore3.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore4.invoke(ColocationFailoverDUnitTest.class, "createPR", args); } private static void createOrderPR() { Object[] args = new Object[] {orderPR_Name, 1, 50, 6, customerPR_Name}; createPR(orderPR_Name, 1, 50, 6, customerPR_Name); dataStore1.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore2.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore3.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore4.invoke(ColocationFailoverDUnitTest.class, "createPR", args); } private static void createShipmentPR() { Object[] args = new Object[] {shipmentPR_Name, 1, 50, 6, orderPR_Name}; createPR(shipmentPR_Name, 1, 50, 6, orderPR_Name); dataStore1.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore2.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore3.invoke(ColocationFailoverDUnitTest.class, "createPR", args); dataStore4.invoke(ColocationFailoverDUnitTest.class, "createPR", args); } public static void createPR(String partitionedRegionName, Integer redundancy, Integer localMaxMemory, Integer totalNumBuckets, String colocatedWith) { PartitionAttributesFactory paf = new PartitionAttributesFactory(); PartitionAttributes prAttr = paf.setRedundantCopies(redundancy) .setLocalMaxMemory(localMaxMemory).setTotalNumBuckets(totalNumBuckets) .setColocatedWith(colocatedWith).setPartitionResolver(new KeyPartitionResolver()).create(); AttributesFactory attr = new AttributesFactory(); attr.setPartitionAttributes(prAttr); assertNotNull(cache); if (partitionedRegionName.equals(customerPR_Name)) { customerPR = cache.createRegion(partitionedRegionName, attr.create()); assertNotNull(customerPR); LogWriterUtils.getLogWriter().info( "Partitioned Region " + partitionedRegionName + " created Successfully :" + customerPR); } if (partitionedRegionName.equals(orderPR_Name)) { orderPR = cache.createRegion(partitionedRegionName, attr.create()); assertNotNull(orderPR); LogWriterUtils.getLogWriter().info( "Partitioned Region " + partitionedRegionName + " created Successfully :" + orderPR); } if (partitionedRegionName.equals(shipmentPR_Name)) { shipmentPR = cache.createRegion(partitionedRegionName, attr.create()); assertNotNull(shipmentPR); LogWriterUtils.getLogWriter().info( "Partitioned Region " + partitionedRegionName + " created Successfully :" + shipmentPR); } } private static void putInPRs() { put(); dataStore1.invoke(ColocationFailoverDUnitTest::put); dataStore2.invoke(ColocationFailoverDUnitTest::put); dataStore3.invoke(ColocationFailoverDUnitTest::put); dataStore4.invoke(ColocationFailoverDUnitTest::put); } public static void put() { for (int i = 0; i < 20; i++) { customerPR.put("CPing--" + i, "CPong--" + i); orderPR.put("OPing--" + i, "OPong--" + i); shipmentPR.put("SPing--" + i, "SPong--" + i); } } @Override public final void preTearDown() throws Exception { closeCache(); Invoke.invokeInEveryVM(new SerializableRunnable() { @Override public void run() { closeCache(); } }); } } class KeyPartitionResolver implements PartitionResolver { public KeyPartitionResolver() {} @Override public String getName() { return getClass().getName(); } @Override public Serializable getRoutingObject(EntryOperation opDetails) { // Serializable routingbject = null; String key = (String) opDetails.getKey(); return new RoutingObject("" + key.charAt(key.length() - 1)); } @Override public void close() {} public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof KeyPartitionResolver)) { return false; } KeyPartitionResolver otherKeyPartitionResolver = (KeyPartitionResolver) o; return otherKeyPartitionResolver.getName().equals(getName()); } } class RoutingObject implements DataSerializable { public RoutingObject(String value) { this.value = value; } private String value; @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { value = DataSerializer.readString(in); } @Override public void toData(DataOutput out) throws IOException { DataSerializer.writeString(value, out); } public int hashCode() { return Integer.parseInt(value); } }
/*-------------------------------------------------------------------------+ | | | Copyright 2005-2011 the ConQAT Project | | | | Licensed under the Apache License, Version 2.0 (the "License"); | | you may not use this file except in compliance with the License. | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software | | distributed under the License is distributed on an "AS IS" BASIS, | | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | | See the License for the specific language governing permissions and | | limitations under the License. | +-------------------------------------------------------------------------*/ package org.conqat.engine.sourcecode.shallowparser; import static org.conqat.engine.sourcecode.shallowparser.AbapShallowParser.EAbapParserStates.DECLARATIONS; import static org.conqat.engine.sourcecode.shallowparser.AbapShallowParser.EAbapParserStates.STATEMENTS; import static org.conqat.engine.sourcecode.shallowparser.AbapShallowParser.EAbapParserStates.TOPLEVEL; import static org.conqat.lib.scanner.ETokenType.*; import java.util.EnumSet; import java.util.List; import org.conqat.engine.sourcecode.shallowparser.framework.EShallowEntityType; import org.conqat.engine.sourcecode.shallowparser.framework.ParserState; import org.conqat.engine.sourcecode.shallowparser.framework.RecognizerBase; import org.conqat.engine.sourcecode.shallowparser.framework.ShallowParserBase; import org.conqat.lib.commons.region.Region; import org.conqat.lib.scanner.ETokenType; import org.conqat.lib.scanner.ETokenType.ETokenClass; import org.conqat.lib.scanner.IToken; /** * Shallow parser for ABAP. The following links are useful for writing the * parser: * <ul> * <li><a href="http://help.sap.com/abapdocu_702/en/">ABAP Keyword * Documentation</a></li> * <li><a * href="http://help.sap.com/abapdocu_702/en/abenabap_statements_overview.htm" * >ABAP Statements Overview</a></li> * </ul> * * @author $Author: hummelb $ * @version $Rev: 46995 $ * @ConQAT.Rating GREEN Hash: 59E954DD30BAF7C74912FAC5BD611F0E */ public class AbapShallowParser extends ShallowParserBase<AbapShallowParser.EAbapParserStates> { /** Tokens that can introduce a simple statement. */ private static final EnumSet<ETokenType> SIMPLE_STATEMENT_START_TOKENS = EnumSet .of(ADD, ADD_CORRESPONDING, APPEND, ASSERT, ASSIGN, AUTHORITY_CHECK, BACK, BREAK_POINT, CALL, CHECK, CLEAR, CLOSE, COLLECT, COMMIT, COMMUNICATION, COMPUTE, CONCATENATE, CONDENSE, CONSTANTS, CONTEXTS, CONTINUE, CONTROLS, CONVERT, CREATE, DATA, DELETE, DEMAND, DESCRIBE, DETAIL, DIVIDE, DIVIDE_CORRESPONDING, EDITOR_CALL, ENHANCEMENT_POINT, EXISTS, EXIT, EXPORT, EXTRACT, FETCH, FIELDS, FIND, FORMAT, FREE, GENERATE, GET, HIDE, IDENTIFIER, IMPORT, INCLUDE, INFOTYPES, INPUT, INSERT, LEAVE, LOAD, LOCAL, LOG_POINT, MAXIMUM, MESSAGE, MINIMUM, MODIFY, MOVE, MOVE_CORRESPONDING, MULTIPLY_CORRESPONDING, MULTIPLY, NAME, NEW_LINE, NEW_PAGE, NEW_SECTION, OPEN, OVERLAY, PACK, PACKAGE, PERFORM, POSITION, PRINT_CONTROL, PUT, RAISE, RANGES, READ, REFRESH, REJECT, REPLACE, RESERVE, RESUME, RETURN, ROLLBACK, SCROLL, SEARCH, SET, SHIFT, SKIP, SORT, SPLIT, STOP, SUBMIT, SUBTRACT, SUBTRACT_CORRESPONDING, SUM, SUMMARY, SUMMING, SUPPLY, SUPPRESS, SYNTAX_CHECK, TRANSFER, TRANSLATE, TRUNCATE, TYPES, ULINE, UNPACK, UPDATE, WAIT, WINDOW, WRITE); /** The states used in this parser. */ public static enum EAbapParserStates { /** * Top level state used for parsing constructs that are not nested in * other constructs. */ TOPLEVEL, /** * A state to recognize declarations within classes. As many constructs * are allowed both top-level and in declarations, many rules are * registered for both. */ DECLARATIONS, /** A state to recognize statements, i.e. plain code in functions, etc. */ STATEMENTS } /** Constructor. */ public AbapShallowParser() { super(EAbapParserStates.class, TOPLEVEL); createMetaRules(); createTopLevelRules(); createTypeRules(); createMethodAndAttributeRules(); createStatementRules(); inAnyState() .sequence(DOT) .createNode(EShallowEntityType.STATEMENT, SubTypeNames.EMPTY_STATEMENT).endNode(); } /** Rules for parsing elements that are only expected top-level. */ private void createTopLevelRules() { // set of keywords that start an event block (without keywords that // require a preceeding "at") EnumSet<ETokenType> eventBlocks = EnumSet.of(INITIALIZATION, START_OF_SELECTION, END_OF_SELECTION, TOP_OF_PAGE, END_OF_PAGE, LOAD_OF_PROGRAM, GET); // set of keywords that end an event block (possibly indicating the // start of the next one) EnumSet<ETokenType> eventBlocksEnd = EnumSet.of(AT, FORM, CLASS, INTERFACE); eventBlocksEnd.addAll(eventBlocks); // since the report is not really a method, its statements are still // parsed in the toplevel scope, not the statement scope inState(TOPLEVEL).sequence(REPORT) .createNode(EShallowEntityType.METHOD, 0, 1).skipTo(DOT) .parseUntilOrEof(TOPLEVEL).sequenceBefore(eventBlocksEnd) .endNode(); inState(TOPLEVEL) .sequence( EnumSet.of(SELECTION_SCREEN, PARAMETER, SELECT_OPTIONS)) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .endNode(); inState(TOPLEVEL).sequence(eventBlocks) .createNode(EShallowEntityType.METHOD, 0).skipTo(DOT) .parseUntilOrEof(STATEMENTS).sequenceBefore(eventBlocksEnd) .endNode(); inState(TOPLEVEL) .sequence( AT, EnumSet.of(SELECTION_SCREEN, LINE_SELECTION, USER_COMMAND)) .createNode(EShallowEntityType.METHOD, new int[] { 0, 1 }) .skipTo(DOT).parseUntilOrEof(STATEMENTS) .sequenceBefore(eventBlocksEnd).endNode(); } /** Rules for parsing of meta elements. */ private void createMetaRules() { inState(DECLARATIONS) .sequence(EnumSet.of(PUBLIC, PROTECTED, PRIVATE), SECTION, DOT) .createNode(EShallowEntityType.META, SubTypeNames.VISIBILITY, 0) .endNode(); inAnyState().sequence(EnumSet.of(TYPE_POOLS, TABLES, PARAMETERS)) .createNode(EShallowEntityType.META, 0).skipTo(DOT).endNode(); inAnyState().sequence(DEFINE) .createNode(EShallowEntityType.META, SubTypeNames.MACRO) .skipTo(END_OF_DEFINITION, DOT).endNode(); inState(DECLARATIONS).sequence(EnumSet.of(INTERFACES, ALIASES)) .createNode(EShallowEntityType.META, 0).skipTo(DOT).endNode(); } /** Rules for parsing types. */ private void createTypeRules() { // classes RecognizerBase<EAbapParserStates> classDefinitionAlternative = inState( TOPLEVEL, DECLARATIONS).sequence(CLASS, IDENTIFIER, DEFINITION); classDefinitionAlternative .sequence(EnumSet.of(LOAD, DEFERRED, LOCAL)) .createNode(EShallowEntityType.TYPE, SubTypeNames.CLASS_PUBLICATION, 1).skipTo(DOT) .endNode(); classDefinitionAlternative .createNode(EShallowEntityType.TYPE, SubTypeNames.CLASS_DEFINITION, 1).skipTo(DOT) .parseUntil(DECLARATIONS).sequence(ENDCLASS, DOT).endNode(); inState(TOPLEVEL, DECLARATIONS) .sequence(CLASS, IDENTIFIER, IMPLEMENTATION) .createNode(EShallowEntityType.TYPE, SubTypeNames.CLASS_IMPLEMENTATION, 1).skipTo(DOT) .parseUntil(DECLARATIONS).sequence(ENDCLASS, DOT).endNode(); // interfaces RecognizerBase<EAbapParserStates> interfaceAlternative = inState( TOPLEVEL, DECLARATIONS).sequence(INTERFACE, IDENTIFIER); interfaceAlternative .sequence(EnumSet.of(LOAD, DEFERRED, LOCAL)) .createNode(EShallowEntityType.TYPE, SubTypeNames.INTERFACE_PUBLICATION, 1).skipTo(DOT) .endNode(); interfaceAlternative .createNode(EShallowEntityType.TYPE, SubTypeNames.INTERFACE_DEFINITION, 1).skipTo(DOT) .parseUntil(DECLARATIONS).sequence(ENDINTERFACE, DOT).endNode(); // types, events, class events inState(TOPLEVEL, DECLARATIONS) .sequence(EnumSet.of(TYPES, EVENTS, CLASS_EVENTS)) .createNode(EShallowEntityType.ATTRIBUTE, 0).skipTo(DOT) .endNode(); } /** Rules for parsing attributes/methods. */ private void createMethodAndAttributeRules() { inState(TOPLEVEL, DECLARATIONS) .sequence(EnumSet.of(CONSTANTS, NODES, STATICS)) .createNode(EShallowEntityType.ATTRIBUTE, 0, 1).skipTo(DOT) .endNode(); inState(TOPLEVEL, DECLARATIONS) .sequence(EnumSet.of(DATA, FIELD_GROUPS, CLASS_DATA)) .createNode(EShallowEntityType.ATTRIBUTE, 0, 1).skipTo(DOT) .endNode(); inState(TOPLEVEL, DECLARATIONS, STATEMENTS) .sequence(EnumSet.of(FIELD_SYMBOLS)) .createNode(EShallowEntityType.ATTRIBUTE, 0, 1).skipTo(DOT) .endNode(); inState(DECLARATIONS) .sequence(EnumSet.of(METHODS, CLASS_METHODS)) .createNode(EShallowEntityType.METHOD, SubTypeNames.METHOD_DECLARATION, 1).skipTo(DOT) .endNode(); inState(DECLARATIONS) .sequence(METHOD) .markStart() .skipTo(DOT) .createNode(EShallowEntityType.METHOD, SubTypeNames.METHOD_IMPLEMENTATION, new Region(0, -2)) .parseUntil(STATEMENTS).sequence(ENDMETHOD, DOT).endNode(); inState(TOPLEVEL, DECLARATIONS) .sequence(FUNCTION) .markStart() .skipTo(DOT) .createNode(EShallowEntityType.METHOD, SubTypeNames.FUNCTION, new Region(0, -2)).parseUntil(STATEMENTS) .sequence(ENDFUNCTION, DOT).endNode(); inState(TOPLEVEL, DECLARATIONS) .sequence(MODULE) .markStart() .skipTo(DOT) .createNode(EShallowEntityType.METHOD, SubTypeNames.MODULE, new Region(0, -2)).parseUntil(STATEMENTS) .sequence(ENDMODULE, DOT).endNode(); inState(TOPLEVEL).sequence(FORM) .createNode(EShallowEntityType.METHOD, SubTypeNames.FORM, 1) .skipTo(DOT).parseUntil(STATEMENTS).sequence(ENDFORM, DOT) .endNode(); } /** Rules for parsing statements. */ private void createStatementRules() { // special rule that matches assignments to variables that have the same // name as keywords. inState(STATEMENTS).sequence(ETokenClass.KEYWORD, EQ) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .endNode(); // if/elseif RecognizerBase<EAbapParserStates> ifAlternative = inState(TOPLEVEL, STATEMENTS).sequence(EnumSet.of(IF, ELSEIF)) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS) .sequenceBefore(EnumSet.of(ELSEIF, ELSE, ENDIF)); ifAlternative.sequence(ENDIF, DOT).endNode(); ifAlternative.endNodeWithContinuation(); // else inState(TOPLEVEL, STATEMENTS).sequence(ELSE) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(EnumSet.of(ENDIF, ENDON), DOT) .endNode(); // case/when inState(TOPLEVEL, STATEMENTS).sequence(CASE) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDCASE, DOT).endNode(); // we parse when as meta, so we add no additional nesting inState(STATEMENTS).sequence(WHEN) .createNode(EShallowEntityType.META, 0).skipTo(DOT).endNode(); // on change RecognizerBase<EAbapParserStates> changeAlternative = inAnyState() .sequence(ON, CHANGE, OF) .createNode(EShallowEntityType.STATEMENT, SubTypeNames.ON_CHANGE).skipTo(DOT) .parseUntil(STATEMENTS).sequenceBefore(EnumSet.of(ELSE, ENDON)); changeAlternative.sequence(ENDON, DOT).endNode(); changeAlternative.endNodeWithContinuation(); // loops inState(TOPLEVEL, STATEMENTS).sequence(LOOP) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDLOOP, DOT).endNode(); inState(TOPLEVEL, STATEMENTS).sequence(DO) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDDO, DOT).endNode(); inState(TOPLEVEL, STATEMENTS).sequence(WHILE) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDWHILE, DOT).endNode(); inState(STATEMENTS).sequence(AT) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDAT, DOT).endNode(); // loop likes inAnyState().sequence(PROVIDE) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDPROVIDE, DOT).endNode(); inAnyState().sequence(ENHANCEMENT) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDENHANCEMENT, DOT).endNode(); inAnyState().sequence(ENHANCEMENT_SECTION) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS).sequence(END_ENHANCEMENT_SECTION, DOT) .endNode(); // try/catch RecognizerBase<EAbapParserStates> tryAlternative = inState(TOPLEVEL, STATEMENTS).sequence(EnumSet.of(TRY, CATCH, CLEANUP)) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .parseUntil(STATEMENTS) .sequenceBefore(EnumSet.of(ENDTRY, CATCH, ENDCATCH, CLEANUP)); tryAlternative.sequence(EnumSet.of(ENDTRY, ENDCATCH), DOT).endNode(); tryAlternative.endNodeWithContinuation(); createSelectRules(); // exec inState(TOPLEVEL, STATEMENTS) .sequence(EXEC, SQL) .createNode(EShallowEntityType.STATEMENT, SubTypeNames.NATIVE_SQL).skipTo(ENDEXEC, DOT).endNode(); inState(TOPLEVEL, STATEMENTS).sequence(SIMPLE_STATEMENT_START_TOKENS) .createNode(EShallowEntityType.STATEMENT, 0).skipTo(DOT) .endNode(); } /** * Creates the parsing rules for the select clause. This is tricky, because * the rules whether a select block or a single statement select is * expected, are not trivial. */ private void createSelectRules() { RecognizerBase<EAbapParserStates> selectAlternative = inState(TOPLEVEL, STATEMENTS).sequence(SELECT); selectAlternative.sequence(LPAREN) .createNode(EShallowEntityType.STATEMENT, "method call") .skipToWithNesting(RPAREN, LPAREN, RPAREN).skipTo(DOT) .endNode(); selectAlternative .subRecognizer(new SingleSelectRecognizer(), 1, 1) .createNode(EShallowEntityType.STATEMENT, SubTypeNames.SINGLE_SELECT).endNode(); selectAlternative .createNode(EShallowEntityType.STATEMENT, SubTypeNames.SELECT_BLOCK).skipTo(DOT) .parseUntil(STATEMENTS).sequence(ENDSELECT, DOT).endNode(); } /** * Recognizer that matches single statements selects according to the rules * found <a * href="http://help.sap.com/abapdocu_702/en/abapselect.htm">here</a>. The * recognozer should be called directly after finding the SELECT keyword. */ private static class SingleSelectRecognizer extends RecognizerBase<EAbapParserStates> { /** * Token types to be skipped from the select start to reach the result * description. */ private static final EnumSet<ETokenType> SELECT_TO_RESULTS_SKIP_TOKENS = EnumSet .of(SINGLE, FOR, UPDATE, DISTINCT); /** Token types for aggregate functions. */ private static final EnumSet<ETokenType> AGGREGATE_FUNCTIONS = EnumSet .of(MIN, MAX, SUM, AVG, COUNT); /** Token types that terminate the aggregate functions. */ private static final EnumSet<ETokenType> AGGREGATE_TERMINATOR = EnumSet .of(FROM, INTO); /** {@inheritDoc} */ @Override protected int matchesLocally( ParserState<EAbapParserStates> parserState, List<IToken> tokens, int startOffset) { int dotOffset = startOffset; while (dotOffset < tokens.size() && tokens.get(dotOffset).getType() != DOT) { dotOffset += 1; } // no match if closing dot was not found if (dotOffset >= tokens.size()) { return NO_MATCH; } int matchSingleSelect = dotOffset + 1; // the following is statements correspond directly to the rules in // http://help.sap.com/abapdocu_702/en/abapselect.htm, where a // result of matchSingleSelect means that no ENDSELECT is expected, // while a NO_MATCH indicates that an ENDSELECT is required if (!hasIntoAppendingTable(tokens, startOffset, dotOffset)) { if (isSingle(tokens, startOffset) || (hasOnlyAggregateFunctions(tokens, startOffset, dotOffset) && !hasGroupBy(tokens, startOffset, dotOffset))) { return matchSingleSelect; } return NO_MATCH; } if (hasPackageSize(tokens, startOffset, dotOffset)) { return NO_MATCH; } return matchSingleSelect; } /** Returns whether the SINGLE keyword was found right at the start. */ private boolean isSingle(List<IToken> tokens, int startOffset) { return tokens.get(startOffset).getType() == SINGLE; } /** Returns whether this has the INTO|APPEND ... TABLE clause. */ private boolean hasIntoAppendingTable(List<IToken> tokens, int startOffset, int endOffset) { return TokenStreamUtils.containsAny(tokens, startOffset, endOffset, INTO, APPENDING) && TokenStreamUtils.containsAny(tokens, startOffset, endOffset, TABLE); } /** Returns whether this has the PACKAGE SIZE clause. */ private boolean hasPackageSize(List<IToken> tokens, int startOffset, int endOffset) { return TokenStreamUtils.containsSequence(tokens, startOffset, endOffset, PACKAGE, SIZE); } /** Returns whether this has the GROUP BY clause. */ private boolean hasGroupBy(List<IToken> tokens, int startOffset, int endOffset) { return TokenStreamUtils.containsSequence(tokens, startOffset, endOffset, GROUP, BY); } /** Returns whether this only contains aggregate functions. */ private boolean hasOnlyAggregateFunctions(List<IToken> tokens, int startOffset, int endOffset) { while (startOffset < endOffset && SELECT_TO_RESULTS_SKIP_TOKENS.contains(tokens.get( startOffset).getType())) { startOffset += 1; } while (startOffset < endOffset && !AGGREGATE_TERMINATOR.contains(tokens.get(startOffset) .getType())) { if (!AGGREGATE_FUNCTIONS.contains(tokens.get(startOffset) .getType())) { // found non-aggregate return false; } startOffset = skipAggregate(tokens, startOffset + 1, endOffset); } return true; } /** * Skips the remainder of an aggregate function, i.e. a block in * parentheses and the optional AS part. Returns the new startOffset. */ private int skipAggregate(List<IToken> tokens, int startOffset, int endOffset) { if (startOffset >= endOffset || tokens.get(startOffset).getType() != LPAREN) { return startOffset; } int rparenPos = TokenStreamUtils.find(tokens, RPAREN, startOffset, endOffset); if (rparenPos == TokenStreamUtils.NOT_FOUND) { return startOffset; } startOffset = rparenPos + 1; // optionally skip AS part if (startOffset < endOffset && tokens.get(startOffset).getType() == AS) { startOffset += 2; } return startOffset; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.save; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.InputStreamReader; import java.util.List; import org.apache.jmeter.junit.JMeterTestCase; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.collections.HashTree; import org.junit.Test; public class TestSaveService extends JMeterTestCase { // testLoadAndSave test files private static final String[] FILES = new String[] { "AssertionTestPlan.jmx", "AuthManagerTestPlan.jmx", "HeaderManagerTestPlan.jmx", "InterleaveTestPlan2.jmx", "InterleaveTestPlan.jmx", "LoopTestPlan.jmx", "Modification Manager.jmx", "OnceOnlyTestPlan.jmx", "proxy.jmx", "ProxyServerTestPlan.jmx", "SimpleTestPlan.jmx", "GuiTest.jmx", "GuiTest231.jmx", // autogenerated test files "GenTest27.jmx", // 2.7 "GenTest210.jmx", // 2.10 "GenTest2_13.jmx", // 2.13 }; // Test files for testLoadAndSave; output will generally be different in size but same number of lines private static final String[] FILES_LINES = new String[] { "GuiTest231_original.jmx", "GenTest25.jmx", // GraphAccumVisualizer obsolete, BSFSamplerGui now a TestBean "GenTest251.jmx", // GraphAccumVisualizer obsolete, BSFSamplerGui now a TestBean "GenTest26.jmx", // GraphAccumVisualizer now obsolete "GenTest27_original.jmx", // CTT changed to use intProp for mode }; // Test files for testLoad; output will generally be different in size and line count private static final String[] FILES_LOAD_ONLY = new String[] { "GuiTest_original.jmx", "GenTest22.jmx", "GenTest231.jmx", "GenTest24.jmx", }; private static final boolean saveOut = JMeterUtils.getPropDefault("testsaveservice.saveout", false); @Test public void testPropfile1() throws Exception { assertEquals("Property Version mismatch, ensure you update SaveService#PROPVERSION field with _version property value from saveservice.properties", SaveService.PROPVERSION, SaveService.getPropertyVersion()); } @Test public void testPropfile2() throws Exception { assertEquals("Property File Version mismatch, ensure you update SaveService#FILEVERSION field with revision id of saveservice.properties", SaveService.FILEVERSION, SaveService.getFileVersion()); } @Test public void testVersions() throws Exception { assertTrue("Unexpected version found", SaveService.checkVersions()); } @Test public void testLoadAndSave() throws Exception { boolean failed = false; // Did a test fail? for (final String fileName : FILES) { final File testFile = findTestFile("testfiles/" + fileName); failed |= loadAndSave(testFile, fileName, true); } for (final String fileName : FILES_LINES) { final File testFile = findTestFile("testfiles/" + fileName); failed |= loadAndSave(testFile, fileName, false); } if (failed) // TODO make these separate tests? { fail("One or more failures detected"); } } private boolean loadAndSave(File testFile, String fileName, boolean checkSize) throws Exception { boolean failed = false; int [] orig = readFile(new BufferedReader(new FileReader(testFile))); HashTree tree = SaveService.loadTree(testFile); ByteArrayOutputStream out = new ByteArrayOutputStream(1000000); try { SaveService.saveTree(tree, out); } finally { out.close(); // Make sure all the data is flushed out } ByteArrayInputStream ins = new ByteArrayInputStream(out.toByteArray()); int [] output = readFile(new BufferedReader(new InputStreamReader(ins))); // We only check the length of the result. Comparing the // actual result (out.toByteArray==original) will usually // fail, because the order of the properties within each // test element may change. Comparing the lengths should be // enough to detect most problem cases... if ((checkSize && (orig[0] != output[0] ))|| orig[1] != output[1]) { failed = true; System.out.println(); System.out.println("Loading file testfiles/" + fileName + " and " + "saving it back changes its size from " + orig[0] + " to " + output[0] + "."); if (orig[1] != output[1]) { System.out.println("Number of lines changes from " + orig[1] + " to " + output[1]); } if (saveOut) { final File outFile = findTestFile("testfiles/" + fileName + ".out"); System.out.println("Write " + outFile); FileOutputStream outf = null; try { outf = new FileOutputStream(outFile); outf.write(out.toByteArray()); } finally { if(outf != null) { outf.close(); } } System.out.println("Wrote " + outFile); } } // Note this test will fail if a property is added or // removed to any of the components used in the test // files. The way to solve this is to appropriately change // the test file. return failed; } /** * Calculate size and line count ignoring EOL and * "jmeterTestPlan" element which may vary because of * different attributes/attribute lengths. */ private int[] readFile(BufferedReader br) throws Exception { try { int length=0; int lines=0; String line; while((line=br.readLine()) != null) { lines++; if (!line.startsWith("<jmeterTestPlan")) { length += line.length(); } } return new int []{length, lines}; } finally { br.close(); } } @Test public void testLoad() throws Exception { for (String fileName : FILES_LOAD_ONLY) { File file = findTestFile("testfiles/" + fileName); try { HashTree tree = SaveService.loadTree(file); assertNotNull(tree); } catch (IllegalArgumentException ex) { fail("Exception loading " + file.getAbsolutePath()); } } } @Test public void testClasses(){ List<String> missingClasses = SaveService.checkClasses(); if (missingClasses.size() > 0) { fail("One or more classes not found:"+missingClasses); } } }
package com.peiliping.web.server.dbtools.datasource; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.commons.lang3.tuple.Pair; public class SQLParser { /** * @link https://github.com/alibaba/tb_tddl/blob/master/tddl-common/src/main/java/com/taobao/tddl/common/SQLPreParser.java */ private static Pattern ptable = Pattern.compile("\\s+([a-z0-9_@\\.\"$]+)\\s+"); private static Pattern pinsert_into = Pattern.compile("\\s+into\\s+([a-z0-9_@\\.\"$]+)[\\s(]+"); private static Pattern pdelete_from = Pattern.compile("\\s+from\\s+([a-z0-9_@\\.\"$]+)\\s+"); private static Pattern pselect_from = Pattern.compile("\\s+from\\s+([a-z0-9_@\\.\"$]+)[\\s)]+"); private static Pattern preplace_from = Pattern.compile("\\s+into\\s+([a-z0-9_@\\.\"$]+)[\\s(]+"); private static Pattern pfrom_where = Pattern.compile("\\s+from\\s+(.*)\\s+where\\s+"); private static String hintregx = "/\\*.*?\\*/"; public static String findTableName(String sql0) { if (sql0 == null) return null; sql0 = sql0.trim(); if (sql0.length() < 7) { return null; } if (sql0.indexOf("/*") != -1) { sql0 = sql0.replaceAll(hintregx, "").trim(); } sql0 = sql0.toLowerCase(); sql0 = sql0 + " "; if (sql0.startsWith("update")) { Matcher m = ptable.matcher(sql0); if (m.find(6)) { return m.group(1); } return null; } if (sql0.startsWith("delete")) { Matcher m = pdelete_from.matcher(sql0); if (m.find(6)) { return m.group(1); } m = ptable.matcher(sql0); if (m.find(6)) { return m.group(1); } return null; } if (sql0.startsWith("insert")) { Matcher m = pinsert_into.matcher(sql0); if (m.find(6)) { return m.group(1); } return null; } if (sql0.startsWith("replace")) { Matcher m = preplace_from.matcher(sql0); if (m.find(6)) { return m.group(1); } return null; } if (!sql0.startsWith("select")) { return null; } Matcher m = pselect_from.matcher(sql0); if (m.find(6)) { return m.group(1); } m = pfrom_where.matcher(sql0); if (m.find(6)) { String from2where = m.group(1); String[] tables = from2where.split(","); for (int i = 1; i < tables.length; i++) { if (tables[i].indexOf('(') == -1) { return tables[i].trim().split("\\s")[0]; } else { String subTable = findTableName(tables[i]); if (subTable != null) { return subTable; } } } } if (sql0.indexOf(")from") != -1) { sql0 = sql0.replaceAll("\\)from", ") from"); return findTableName(sql0); } return null; } public static Pair<String,String> findTableNameAndType(String sql0) { if (sql0 == null) return null; sql0 = sql0.trim(); if (sql0.length() < 7) { return null; } if (sql0.indexOf("/*") != -1) { sql0 = sql0.replaceAll(hintregx, "").trim(); } sql0 = sql0.toLowerCase(); sql0 = sql0 + " "; if (sql0.startsWith("update")) { Matcher m = ptable.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_UPDATE); } return null; } if (sql0.startsWith("delete")) { Matcher m = pdelete_from.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_DELETE); } m = ptable.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_DELETE); } return null; } if (sql0.startsWith("insert")) { Matcher m = pinsert_into.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_INSERT); } return null; } if (sql0.startsWith("replace")) { Matcher m = preplace_from.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_OTHER); } return null; } if (!sql0.startsWith("select")) { return null; } Matcher m = pselect_from.matcher(sql0); if (m.find(6)) { return new MutablePair<String,String>(m.group(1),TablenameHandler.SQL_TYPE_SELECT); } m = pfrom_where.matcher(sql0); if (m.find(6)) { String from2where = m.group(1); String[] tables = from2where.split(","); for (int i = 1; i < tables.length; i++) { if (tables[i].indexOf('(') == -1) { return new MutablePair<String, String>(tables[i].trim().split("\\s")[0],TablenameHandler.SQL_TYPE_SELECT); } else { Pair<String,String> r = findTableNameAndType(tables[i]); if (r != null) { return r; } } } } if (sql0.indexOf(")from") != -1) { sql0 = sql0.replaceAll("\\)from", ") from"); return findTableNameAndType(sql0); } return null; } public static void main(String[] args) throws IOException { List<String> sqls = new ArrayList<String>(); // sqls.add(" \r \r\n \n update t_a$ble0 set a=1"); // sqls.add("delete from t_a$ble0\r\n t where t.id = 0"); // sqls.add("delete from t_a$ble0"); // sqls.add("insert into t_a$ble0 t values(?,?) where t.id = 0"); // sqls.add("insert into t_a$ble0(col_a, col_b) values(?,?) where id = 0"); // sqls.add("select count(*) from t_a$ble0"); // sqls.add("select 1 from t_a$ble0 t where t.id=0"); // sqls.add("select 1 from (select id from t_a$ble0) t where t.id = 5"); // sqls.add("select 1 from(select id from t_a$ble0) t where t.id = 5"); // sqls.add("select 1 from (select id from table2) t, t_a$ble0 a where t.id = a.id"); // sqls.add("select 1 from t_a$ble0 a, (select id from table2) t where t.id = a.id"); // sqls.add("select count(*) from CRM_KNOWLEDGE_DETAIL kc,CRM_KNOWLEDGE_BASE a where a.id=kc.KNOWLEDGE_ID"); // sqls.add("SELECT * FROM (SELECT CAST(STR2NUMLIST(#in#) AS NUMTABLETYPE) FROM dual) WHERE rownum <= 200"); // sqls.add("insert into ic_cache@lnk_icdb0 values (:b1 , sysdate) "); // sqls.add("select a ,r from icuser.tb0 where spu_id=:f1 and auction_type <> 'a' "); // sqls.add("select id from tb0 a, table(cast(str2numlist(:1) as numtabletype )) t where a.id=:2"); // sqls.add("select id from table(cast(str2numlist(:1) as numtabletype )) t, tb0 a where a.id=:2"); // sqls.add("select id from table(cast(str2numlist(:1) as numtabletype )) t, table(cast(str2numlist(:1) as numtabletype )) b, tb0 a where a.id=:2"); // sqls.add("select id from table(cast(str2numlist(:1) as numtabletype )) t, (select col1 from tb2) b, tb0 a where a.id=:2"); // sqls.add("select id from table(cast(str2numlist(:1) as numtabletype )) t, (select col1,col2 from tb2) b, tb0 a where a.id=:2"); // sqls.add("select id from table(cast(str2numlist(:1) as numtabletype )) t, (select col1,col2 from tb2 where tb2.id=0) b, tb0 a where a.id=:2"); // sqls.add("select max(mod(nvl(option$,0),2))from objauth$ where obj#=:1 group by grantee# order by grantee# "); // sqls.add("select output from table(dbms_workload_repository.awr_report_html(:dbid, :inst, :bid, :eid, :rpt_options))"); // sqls.add("DELETE crm_adgroup_detail WHERE status = 1 AND adgroupno = :1"); // sqls.add("SELECT * FROM \"ALIMM\".\"ADZONESCORE\""); // sqls.add("select nvl(min(ts#), -1) \"sysauxts#\" from sys.ts$ where name = 'sysaux'"); // sqls.add("/* oracleoem */ select nvl(min(ts#), -1) \"sysauxts#\" from sys.ts$ where name = 'sysaux'"); // sqls.add("/* oracleoem */ select /* sss */nvl(min(ts#), -1) \"sysauxts#\" from sys.ts$ where name = 'sysaux'"); // sqls.add("failed:select u.id from (table(str2numlist(:1))) n join et_airsupply_users u on n.column_value = u.id"); // sqls.add("replace into t (i,c,d,ui) values (?,?,?,?)"); // sqls.add(" SELECT /*+ ordered use_nl(acc,rb) */ rb.ID,rb.USER_ID,rb.DATABASE_CODE,EVENT_EXTEND FROM (SELECT /*+index(crb,IDX_RA_SC_BILL_STAT) */ crb.USER_ID, min(crb.id) dt FROM RA_SC_BILL crb WHERE crb.status = 1 and crb.process_mode = 0 and rownum <= 20000 and DATABASE_CODE in (1, 2, 3) GROUP BY crb.USER_ID) acc, RA_SC_BILL rb WHERE rb.Id = acc.dt and rownum <= 123 and not exists (select 1 from RA_SC_BILL up where up.status = 2 and up.USER_ID = acc.USER_ID)"); // sqls.add("select k,v from kv where k = ?#"); // sqls.add("select id,name,sex from people force index (name) where id=5"); sqls.add("insert into medd values (?,?),(?,?)"); for (String sql : sqls) { System.out.println(findTableNameAndType(sql) + " <-- " + sql); } } }
/* * Copyright (c) SiteWhere LLC. All rights reserved. http://www.sitewhere.com * * The software in this package is published under the terms of the MIT * license, a copy of which has been included with this distribution in the * LICENSE.txt file. */ package com.sitewhere.agent; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; //import java.util.logging.Level; //import java.util.logging.Logger; //log4j import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonElement; import com.sitewhere.agent.BaseCommandProcessor; import com.sitewhere.agent.ISiteWhereEventDispatcher; import com.sitewhere.agent.SiteWhereAgentException; //import com.sitewhere.device.communication.protobuf.proto.Sitewhere.Device.Header; //import com.sitewhere.device.communication.protobuf.proto.Sitewhere.Device.RegistrationAck; import com.sitewhere.rest.model.device.communication.DecodedDeviceRequest; import com.sitewhere.spi.device.event.IDeviceEventOriginator; import com.sitewhere.agent.*; import com.ericsson.common.AbstractAgentMeasurement; import com.ericsson.common.AbstractAgentCommand; /** * Example of command processing from a protobuf descriptor generated by SiteWhere. * * @author Derek */ public class Qi20HealthCommandProcessor extends BaseCommandProcessor { /** Static logger instance */ //private static Logger LOGGER = Logger.getLogger(ExampleCommandProcessor.class.getName()); private static final Logger LOG = LoggerFactory.getLogger(Qi20HealthCommandProcessor.class); /** Executor for background processing */ private ExecutorService executor = Executors.newSingleThreadExecutor(); /* * (non-Javadoc) * * @see com.sitewhere.agent.BaseCommandProcessor#executeStartupLogic(java.lang.String, * java.lang.String, com.sitewhere.agent.ISiteWhereEventDispatcher) */ @Override public void executeStartupLogic(String hardwareId, String specificationToken, ISiteWhereEventDispatcher dispatcher) throws SiteWhereAgentException { sendRegistration(hardwareId, specificationToken); LOG.info("Sent registration information."); } /* * (non-Javadoc) * * @see * com.sitewhere.agent.BaseCommandProcessor#handleRegistrationAck(com.sitewhere.device * .communication.protobuf.proto.Sitewhere.Device.Header, * com.sitewhere.device.communication.protobuf.proto.Sitewhere.Device.RegistrationAck) */ @Override public void handleRegistrationAck(DecodedDeviceRequest request) { /*switch (ack.getState()) { case NEW_REGISTRATION: { LOGGER.info("SiteWhere indicated device was successfully registered."); onRegistrationConfirmed(request); break; } case ALREADY_REGISTERED: { LOGGER.info("SiteWhere indicated device is using an existing registration."); onRegistrationConfirmed(request); break; } case REGISTRATION_ERROR: { LOGGER.warning("SiteWhere indicated a device registration error."); break; } }*/ onRegistrationConfirmed(request); } /** * Handle logic that should execute once registration is confirmed. * * @param ack */ public void onRegistrationConfirmed(DecodedDeviceRequest request) { //sendDataAtInterval(); processSensorData(); } public void processSensorData() { // Run processing in another thread. LOG.info("Starting measurement handling thread."); executor.execute(new Runnable() { @Override public void run() { while (true) { try{ AgentMeasurement meas = (AgentMeasurement)getEventQueue().poll(50, TimeUnit.MILLISECONDS); if(meas != null) { LOG.debug("sendMeasurement: "+meas.toString()); //sendMeasurement(data.getHardwareId(), data.getSensorName(), Double.parseDouble(data.getSensorValue()), data.getEventDate(), null); sendMeasurement(meas.getHardwareId(), meas.getSensorKVs(), meas.getEventDate(), meas.getMetadata(), null); } } catch(Exception e) { LOG.error("Sitewhere agent process data error.", e); } } } }); } /** * This is an example of creating a thread that will send data to SiteWhere every so * often, sleeping between cycles. */ public void sendDataAtInterval() { // Run processing in another thread. LOG.info("Starting JVM memory statistics sender thread."); executor.execute(new Runnable() { @Override public void run() { while (true) { // Get Java memory values from the runtime. long free = Runtime.getRuntime().freeMemory(); long max = Runtime.getRuntime().maxMemory(); long total = Runtime.getRuntime().totalMemory(); try { // Send events to SiteWhere. sendMeasurement(getHardwareId(), "jvmFreeMemory", free, null); sendMeasurement(getHardwareId(), "jvmMaxMemory", max, null); sendMeasurement(getHardwareId(), "jvmTotalMemory", total, null); LOG.info("Sent a batch of JVM memory statistics."); // Wait five second before sending next events. Thread.sleep(5000); } catch (SiteWhereAgentException e) { LOG.warn("Unable to send measurements to SiteWhere.", e); } catch (InterruptedException e) { LOG.warn("Event sender thread shut down.", e); } } } }); } /** * Handler for 'helloWorld(String, boolean)' command. * * @param greeting * @param loud * @param originator * @throws SiteWhereAgentException */ public void helloWorld(String greeting, Boolean loud, IDeviceEventOriginator originator) throws SiteWhereAgentException { String response = greeting + " World!"; if (loud) { response = response.toUpperCase(); } sendAck(getHardwareId(), response, originator); LOG.info("Sent reponse to 'helloWorld' command."); } /** * Handler for 'ping()' command. * * @param originator * @throws SiteWhereAgentException */ public void ping(IDeviceEventOriginator originator) throws SiteWhereAgentException { sendAck(getHardwareId(), "Acknowledged.", originator); LOG.info("Sent reponse to 'ping' command."); } /** * Handler for 'testEvents()' command. * * @param originator * @throws SiteWhereAgentException */ public void testEvents(IDeviceEventOriginator originator) throws SiteWhereAgentException { //sendMeasurement(getHardwareId(), "engine.temp", 170.0, originator); //sendLocation(getHardwareId(), 33.7550, -84.3900, 0.0, originator); //sendAlert(getHardwareId(), "engine.overheat", "Engine is overheating!", originator); LOG.info("Receive 'testEvents' command."); } public void testCommand(IDeviceEventOriginator originator) throws SiteWhereAgentException { //sendMeasurement(getHardwareId(), "engine.temp", 170.0, originator); //sendLocation(getHardwareId(), 33.7550, -84.3900, 0.0, originator); //sendAlert(getHardwareId(), "engine.overheat", "Engine is overheating!", originator); LOG.info("Receive 'testCommand' command."); } }
/* * Copyright 2015 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; public final class Es6TypedToEs6ConverterTest extends CompilerTestCase { @Override protected void setUp() throws Exception { super.setUp(); setAcceptedLanguage(LanguageMode.ECMASCRIPT6_TYPED); } @Override protected CompilerOptions getOptions() { CompilerOptions options = super.getOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT6_TYPED); options.setLanguageOut(LanguageMode.ECMASCRIPT6); return options; } @Override protected CompilerPass getProcessor(Compiler compiler) { PhaseOptimizer optimizer = new PhaseOptimizer(compiler, null, null); optimizer.addOneTimePass(new PassFactory("convertDeclaredTypesToJSDoc", true) { // To make sure types copied. @Override CompilerPass create(AbstractCompiler compiler) { return new Es6TypedToEs6Converter(compiler); } }); return optimizer; } public void testMemberVariable() { test(LINE_JOINER.join( "class C {", " mv: number;", " constructor() {", " this.f = 1;", " }", "}"), LINE_JOINER.join( "class C {", " constructor() {", " this.f = 1;", " }", "}", "/** @type {number} */ C.prototype.mv;")); test(LINE_JOINER.join( "class C {", " on: {", " p: string;", " }", "}"), LINE_JOINER.join( "class C {}", "/** @type {{p: string}} */ C.prototype.on;")); } public void testMemberVariable_noCtor() { test("class C { mv: number; }", "class C {} /** @type {number} */ C.prototype.mv;"); } public void testMemberVariable_static() { test("class C { static smv; }", "class C {} C.smv;"); } public void testMemberVariable_anonymousClass() { testSame("(class {})"); testSame("(class { f() {}})"); testError("(class { x: number; })", Es6TypedToEs6Converter.CANNOT_CONVERT_MEMBER_VARIABLES); } public void testComputedPropertyVariable() { test( LINE_JOINER.join( "class C {", " ['mv']: number;", " ['mv' + 2]: number;", " constructor() {", " this.f = 1;", " }", "}"), LINE_JOINER.join( "class C {", " constructor() {", " this.f = 1;", " }", "}", "/** @type {number} */ C.prototype['mv'];", "/** @type {number} */ C.prototype['mv' + 2];")); } public void testComputedPropertyVariable_static() { test("class C { static ['smv' + 2]: number; }", "class C {} /** @type {number} */ C['smv' + 2];"); } public void testUnionType() { test("var x: string | number;", "var /** string | number */ x;"); } // TypeQuery is currently not supported. public void testTypeQuery() { testError("var x: typeof y | number;", Es6TypedToEs6Converter.TYPE_QUERY_NOT_SUPPORTED); testError("var x: (p1: typeof y) => number;", Es6TypedToEs6Converter.TYPE_QUERY_NOT_SUPPORTED); } public void testTypedParameter() { test("function f(p1: number) {}", "function f(/** number */ p1) {}"); } public void testOptionalParameter() { test("function f(p1?: number) {}", "function f(/** number= */ p1) {}"); test("function f(p1?) {}", "function f(/** ?= */ p1) {}"); } public void testRestParameter() { test("function f(...p1: number[]) {}", "function f(/** ...number */ ...p1) {}"); test("function f(...p1) {}", "function f(...p1) {}"); } public void testReturnType() { test("function f(...p1: number[]): void {}", "/** @return{void} */ function f(/** ...number */ ...p1) {}"); test("function f(...p1) {}", "function f(...p1) {}"); } public void testBuiltins() { test("var x: any;", "var /** ? */ x;"); test("var x: number;", "var /** number */ x;"); test("var x: boolean;", "var /** boolean */ x;"); test("var x: string;", "var /** string */ x;"); test("var x: void;", "var /** void */ x;"); } public void testNamedType() { test("var x: foo;", "var /** !foo */ x;"); test("var x: foo.bar.Baz;", "var /** !foo.bar.Baz */ x;"); } public void testArrayType() { test("var x: string[];", "var /** !Array.<string> */ x;"); test("var x: string[][];", "var /** !Array.<!Array.<string>> */ x;"); test("var x: test.Type[];", "var /** !Array.<!test.Type> */ x;"); } public void testRecordType() { test("var x: {p: string; q: number};", "var /** {p: string, q: number} */ x;"); test("var x: {p: string, q: number};", "var /** {p: string, q: number} */ x;"); test("var x: {p: string; q: {p: string; q: number}};", "var /** {p: string, q: {p: string, q: number}}*/ x;"); test(LINE_JOINER.join( "var x: {", " p: string;", "};"), "var /** {p: string} */ x;"); testError("var x: {constructor(); q: number};", Es6TypedToEs6Converter.UNSUPPORTED_RECORD_TYPE); } public void testParameterizedType() { test("var x: test.Type<string>;", "var /** !test.Type<string> */ x;"); test("var x: test.Type<A, B>;", "var /** !test.Type<!A, !B> */ x;"); test("var x: test.Type<A<X>, B>;", "var /** !test.Type<!A<!X>, !B> */ x;"); } public void testParameterizedArrayType() { test("var x: test.Type<number>[];", "var /** !Array.<!test.Type<number>> */ x;"); } public void testFunctionType() { test("var x: (foo: number) => boolean;", "var /** function(number): boolean */ x;"); test("var x: (foo?: number) => boolean;", "var /** function(number=): boolean */ x;"); test("var x: (...foo: number[]) => boolean;", "var /** function(...number): boolean */ x;"); test("var x: (foo, bar?: number) => boolean;", "var /** function(?, number=): boolean */ x;"); test("var x: (foo: string, ...bar) => boolean;", "var /** function(string, ...?): boolean */ x;"); } public void testGenericClass() { test("class Foo<T> {}", "/** @template T */ class Foo {}"); test("class Foo<U, V> {}", "/** @template U, V */ class Foo {}"); test("var Foo = class<T> {};", "var Foo = /** @template T */ class {};"); // Currently, bounded generics are not supported. testError("class Foo<U extends () => boolean, V> {}", Es6TypedToEs6Converter.CANNOT_CONVERT_BOUNDED_GENERICS); } public void testGenericFunction() { test("function foo<T>() {}", "/** @template T */ function foo() {}"); test("var x = <K, V>(p) => 3;", "var x = /** @template K, V */ (p) => 3"); test("class Foo { f<T>() {} }", "class Foo { /** @template T */ f() {} }"); test("(function<T>() {})();", "(/** @template T */ function() {})();"); test("function* foo<T>() {}", "/** @template T */ function* foo() {}"); } public void testGenericInterface() { test("interface I<T> { foo: T; }", "/** @interface @template T */ class I {} /** @type {!T} */ I.prototype.foo;"); } public void testImplements() { test("class Foo implements Bar, Baz {}", "/** @implements {Bar} @implements {Baz} */ class Foo {}"); // The "extends" clause is handled by @link {Es6ToEs3Converter} test("class Foo extends Bar implements Baz {}", "/** @implements {Baz} */ class Foo extends Bar {}"); } public void testEnum() { test("enum E { Foo, Bar }", "/** @enum {number} */ var E = { Foo: 0, Bar: 1 }"); } public void testInterface() { test("interface I { foo: string; }", "/** @interface */ class I {} /** @type {string} */ I.prototype.foo;"); test("interface Foo extends Bar, Baz {}", "/** @interface @extends {Bar} @extends {Baz} */ class Foo {}"); test("interface I { foo(p: string): boolean; }", "/** @interface */ class I { /** @return {boolean} */ foo(/** string */ p) {} }"); } public void testTypeAlias() { test("type Foo = number;", "/** @typedef{number} */ var Foo;"); testError("type Foo = number; var Foo = 3; ", Es6TypedToEs6Converter.TYPE_ALIAS_ALREADY_DECLARED); testError("let Foo = 3; type Foo = number;", Es6TypedToEs6Converter.TYPE_ALIAS_ALREADY_DECLARED); } public void testAmbientDeclaration() { test("declare var x;", "/** @suppress {duplicate} */ var x;"); test("declare let x;", "/** @suppress {duplicate} */ var x;"); test("declare const x;", "/** @suppress {duplicate} @const */ var x;"); test("declare function f();", "/** @suppress {duplicate} */ function f() {}"); test("declare enum Foo {}", "/** @suppress {duplicate} @enum {number} */ var Foo = {}"); test("declare class C { constructor(); };", "/** @suppress {duplicate} */ class C { constructor() {} }"); } public void testIndexSignature() { test("interface I { [foo: string]: Bar<Baz>; }", "/** @interface @extends {IObject<string, !Bar<!Baz>>} */ class I {}"); test("interface I extends J { [foo: string]: Bar<Baz>; }", "/** @interface @extends {J} @extends {IObject<string, !Bar<!Baz>>} */ class I {}"); test("class C implements D { [foo: string]: number; }", "/** @implements {D} @implements {IObject<string, number>} */ class C {}"); testError("var x: { [foo: string]: number; };", Es6TypedToEs6Converter.UNSUPPORTED_RECORD_TYPE); } }
/******************************************************************************* * Copyright (c) 2004 IBM Corporation and others. * All rights reserved. The initial API is made available under the terms of * the Common Public License v1.0 which is available at: * http://www.eclipse.org/legal/cpl-v10.html * Subsequent modifications are made available under the Apache 2.0 license. * * Contributors: * IBM - Initial API and implementation * Groovy community - subsequent modifications ******************************************************************************/ package org.codehaus.groovy.classgen; import java.util.List; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.BinaryExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.DeclarationExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.GStringExpression; import org.codehaus.groovy.ast.expr.MapEntryExpression; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.expr.TupleExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.CatchStatement; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.runtime.MetaClassHelper; import org.codehaus.groovy.syntax.Types; import static java.lang.reflect.Modifier.*; import static org.objectweb.asm.Opcodes.*; /** * ClassCompletionVerifier */ public class ClassCompletionVerifier extends ClassCodeVisitorSupport { private ClassNode currentClass; private SourceUnit source; private boolean inConstructor = false; private boolean inStaticConstructor = false; public ClassCompletionVerifier(SourceUnit source) { this.source = source; } public ClassNode getClassNode() { return currentClass; } public void visitClass(ClassNode node) { ClassNode oldClass = currentClass; currentClass = node; checkImplementsAndExtends(node); if (source != null && !source.getErrorCollector().hasErrors()) { checkClassForIncorrectModifiers(node); checkInterfaceMethodVisibility(node); checkClassForOverwritingFinal(node); checkMethodsForIncorrectModifiers(node); checkMethodsForWeakerAccess(node); checkMethodsForOverridingFinal(node); checkNoAbstractMethodsNonabstractClass(node); checkGenericsUsage(node, node.getUnresolvedInterfaces()); checkGenericsUsage(node, node.getUnresolvedSuperClass()); } super.visitClass(node); currentClass = oldClass; } private void checkInterfaceMethodVisibility(ClassNode node) { if (!node.isInterface()) return; for (MethodNode method : node.getMethods()) { if (method.isPrivate()) { addError("Method '" + method.getName() + "' is private but should be public in " + getDescription(currentClass) + ".", method); } else if (method.isProtected()) { addError("Method '" + method.getName() + "' is protected but should be public in " + getDescription(currentClass) + ".", method); } } } private void checkNoAbstractMethodsNonabstractClass(ClassNode node) { if (isAbstract(node.getModifiers())) return; List<MethodNode> abstractMethods = node.getAbstractMethods(); if (abstractMethods == null) return; for (MethodNode method : abstractMethods) { addError("Can't have an abstract method in a non-abstract class." + " The " + getDescription(node) + " must be declared abstract or" + " the " + getDescription(method) + " must be implemented.", node); } } private void checkClassForIncorrectModifiers(ClassNode node) { checkClassForAbstractAndFinal(node); checkClassForOtherModifiers(node); } private void checkClassForAbstractAndFinal(ClassNode node) { if (!isAbstract(node.getModifiers())) return; if (!isFinal(node.getModifiers())) return; if (node.isInterface()) { addError("The " + getDescription(node) + " must not be final. It is by definition abstract.", node); } else { addError("The " + getDescription(node) + " must not be both final and abstract.", node); } } private void checkClassForOtherModifiers(ClassNode node) { checkClassForModifier(node, isTransient(node.getModifiers()), "transient"); checkClassForModifier(node, isVolatile(node.getModifiers()), "volatile"); checkClassForModifier(node, isNative(node.getModifiers()), "native"); if (!(node instanceof InnerClassNode)) { checkClassForModifier(node, isStatic(node.getModifiers()), "static"); checkClassForModifier(node, isPrivate(node.getModifiers()), "private"); } // don't check synchronized here as it overlaps with ACC_SUPER } private void checkMethodForModifier(MethodNode node, boolean condition, String modifierName) { if (!condition) return; addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node); } private void checkClassForModifier(ClassNode node, boolean condition, String modifierName) { if (!condition) return; addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node); } private String getDescription(ClassNode node) { return (node.isInterface() ? "interface" : "class") + " '" + node.getName() + "'"; } private String getDescription(MethodNode node) { return "method '" + node.getTypeDescriptor() + "'"; } private String getDescription(FieldNode node) { return "field '" + node.getName() + "'"; } private void checkAbstractDeclaration(MethodNode methodNode) { if (!methodNode.isAbstract()) return; if (isAbstract(currentClass.getModifiers())) return; addError("Can't have an abstract method in a non-abstract class." + " The " + getDescription(currentClass) + " must be declared abstract or the method '" + methodNode.getTypeDescriptor() + "' must not be abstract.", methodNode); } private void checkClassForOverwritingFinal(ClassNode cn) { ClassNode superCN = cn.getSuperClass(); if (superCN == null) return; if (!isFinal(superCN.getModifiers())) return; StringBuilder msg = new StringBuilder(); msg.append("You are not allowed to overwrite the final "); msg.append(getDescription(superCN)); msg.append("."); addError(msg.toString(), cn); } private void checkImplementsAndExtends(ClassNode node) { ClassNode cn = node.getSuperClass(); if (cn.isInterface() && !node.isInterface()) { addError("You are not allowed to extend the " + getDescription(cn) + ", use implements instead.", node); } for (ClassNode anInterface : node.getInterfaces()) { cn = anInterface; if (!cn.isInterface()) { addError("You are not allowed to implement the " + getDescription(cn) + ", use extends instead.", node); } } } private void checkMethodsForIncorrectModifiers(ClassNode cn) { if (!cn.isInterface()) return; for (MethodNode method : cn.getMethods()) { if (method.isFinal()) { addError("The " + getDescription(method) + " from " + getDescription(cn) + " must not be final. It is by definition abstract.", method); } if (method.isStatic() && !isConstructor(method)) { addError("The " + getDescription(method) + " from " + getDescription(cn) + " must not be static. Only fields may be static in an interface.", method); } } } private void checkMethodsForWeakerAccess(ClassNode cn) { for (MethodNode method : cn.getMethods()) { checkMethodForWeakerAccessPrivileges(method, cn); } } private boolean isConstructor(MethodNode method) { return method.getName().equals("<clinit>"); } private void checkMethodsForOverridingFinal(ClassNode cn) { for (MethodNode method : cn.getMethods()) { Parameter[] params = method.getParameters(); for (MethodNode superMethod : cn.getSuperClass().getMethods(method.getName())) { Parameter[] superParams = superMethod.getParameters(); if (!hasEqualParameterTypes(params, superParams)) continue; if (!superMethod.isFinal()) break; addInvalidUseOfFinalError(method, params, superMethod.getDeclaringClass()); return; } } } private void addInvalidUseOfFinalError(MethodNode method, Parameter[] parameters, ClassNode superCN) { StringBuilder msg = new StringBuilder(); msg.append("You are not allowed to override the final method ").append(method.getName()); msg.append("("); boolean needsComma = false; for (Parameter parameter : parameters) { if (needsComma) { msg.append(","); } else { needsComma = true; } msg.append(parameter.getType()); } msg.append(") from ").append(getDescription(superCN)); msg.append("."); addError(msg.toString(), method); } private void addWeakerAccessError(ClassNode cn, MethodNode method, Parameter[] parameters, MethodNode superMethod) { StringBuilder msg = new StringBuilder(); msg.append(method.getName()); msg.append("("); boolean needsComma = false; for (Parameter parameter : parameters) { if (needsComma) { msg.append(","); } else { needsComma = true; } msg.append(parameter.getType()); } msg.append(") in "); msg.append(cn.getName()); msg.append(" cannot override "); msg.append(superMethod.getName()); msg.append(" in "); msg.append(superMethod.getDeclaringClass().getName()); msg.append("; attempting to assign weaker access privileges; was "); msg.append(superMethod.isPublic() ? "public" : "protected"); addError(msg.toString(), method); } private boolean hasEqualParameterTypes(Parameter[] first, Parameter[] second) { if (first.length != second.length) return false; for (int i = 0; i < first.length; i++) { String ft = first[i].getType().getName(); String st = second[i].getType().getName(); if (ft.equals(st)) continue; return false; } return true; } protected SourceUnit getSourceUnit() { return source; } public void visitMethod(MethodNode node) { inConstructor = false; inStaticConstructor = node.isStaticConstructor(); checkAbstractDeclaration(node); checkRepetitiveMethod(node); checkOverloadingPrivateAndPublic(node); checkMethodModifiers(node); checkGenericsUsage(node, node.getParameters()); checkGenericsUsage(node, node.getReturnType()); super.visitMethod(node); } private void checkMethodModifiers(MethodNode node) { // don't check volatile here as it overlaps with ACC_BRIDGE // additional modifiers not allowed for interfaces if ((this.currentClass.getModifiers() & ACC_INTERFACE) != 0) { checkMethodForModifier(node, isStrict(node.getModifiers()), "strictfp"); checkMethodForModifier(node, isSynchronized(node.getModifiers()), "synchronized"); checkMethodForModifier(node, isNative(node.getModifiers()), "native"); } } private void checkMethodForWeakerAccessPrivileges(MethodNode mn, ClassNode cn) { if (mn.isPublic()) return; Parameter[] params = mn.getParameters(); for (MethodNode superMethod : cn.getSuperClass().getMethods(mn.getName())) { Parameter[] superParams = superMethod.getParameters(); if (!hasEqualParameterTypes(params, superParams)) continue; if ((mn.isPrivate() && !superMethod.isPrivate()) || (mn.isProtected() && superMethod.isPublic())) { addWeakerAccessError(cn, mn, params, superMethod); return; } } } private void checkOverloadingPrivateAndPublic(MethodNode node) { if (isConstructor(node)) return; boolean hasPrivate = node.isPrivate(); boolean hasPublic = node.isPublic(); for (MethodNode method : currentClass.getMethods(node.getName())) { if (method == node) continue; if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue; if (method.isPublic() || method.isProtected()) { hasPublic = true; } else { hasPrivate = true; } if (hasPrivate && hasPublic) break; } if (hasPrivate && hasPublic) { addError("Mixing private and public/protected methods of the same name causes multimethods to be disabled and is forbidden to avoid surprising behaviour. Renaming the private methods will solve the problem.", node); } } private void checkRepetitiveMethod(MethodNode node) { if (isConstructor(node)) return; for (MethodNode method : currentClass.getMethods(node.getName())) { if (method == node) continue; if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue; Parameter[] p1 = node.getParameters(); Parameter[] p2 = method.getParameters(); if (p1.length != p2.length) continue; addErrorIfParamsAndReturnTypeEqual(p2, p1, node, method); } } private void addErrorIfParamsAndReturnTypeEqual(Parameter[] p2, Parameter[] p1, MethodNode node, MethodNode element) { boolean isEqual = true; for (int i = 0; i < p2.length; i++) { isEqual &= p1[i].getType().equals(p2[i].getType()); if (!isEqual) break; } isEqual &= node.getReturnType().equals(element.getReturnType()); if (isEqual) { addError("Repetitive method name/signature for " + getDescription(node) + " in " + getDescription(currentClass) + ".", node); } } public void visitField(FieldNode node) { if (currentClass.getDeclaredField(node.getName()) != node) { addError("The " + getDescription(node) + " is declared multiple times.", node); } checkInterfaceFieldModifiers(node); checkGenericsUsage(node, node.getType()); super.visitField(node); } public void visitProperty(PropertyNode node) { checkDuplicateProperties(node); checkGenericsUsage(node, node.getType()); super.visitProperty(node); } private void checkDuplicateProperties(PropertyNode node) { ClassNode cn = node.getDeclaringClass(); String name = node.getName(); String getterName = "get" + MetaClassHelper.capitalize(name); if (Character.isUpperCase(name.charAt(0))) { for (PropertyNode propNode : cn.getProperties()) { String otherName = propNode.getField().getName(); String otherGetterName = "get" + MetaClassHelper.capitalize(otherName); if (node != propNode && getterName.equals(otherGetterName)) { String msg = "The field " + name + " and " + otherName + " on the class " + cn.getName() + " will result in duplicate JavaBean properties, which is not allowed"; addError(msg, node); } } } } private void checkInterfaceFieldModifiers(FieldNode node) { if (!currentClass.isInterface()) return; if ((node.getModifiers() & (ACC_PUBLIC | ACC_STATIC | ACC_FINAL)) == 0 || (node.getModifiers() & (ACC_PRIVATE | ACC_PROTECTED)) != 0) { addError("The " + getDescription(node) + " is not 'public static final' but is defined in " + getDescription(currentClass) + ".", node); } } public void visitBinaryExpression(BinaryExpression expression) { if (expression.getOperation().getType() == Types.LEFT_SQUARE_BRACKET && expression.getRightExpression() instanceof MapEntryExpression) { addError("You tried to use a map entry for an index operation, this is not allowed. " + "Maybe something should be set in parentheses or a comma is missing?", expression.getRightExpression()); } super.visitBinaryExpression(expression); switch (expression.getOperation().getType()) { case Types.EQUAL: // = assignment case Types.BITWISE_AND_EQUAL: case Types.BITWISE_OR_EQUAL: case Types.BITWISE_XOR_EQUAL: case Types.PLUS_EQUAL: case Types.MINUS_EQUAL: case Types.MULTIPLY_EQUAL: case Types.DIVIDE_EQUAL: case Types.INTDIV_EQUAL: case Types.MOD_EQUAL: case Types.POWER_EQUAL: case Types.LEFT_SHIFT_EQUAL: case Types.RIGHT_SHIFT_EQUAL: case Types.RIGHT_SHIFT_UNSIGNED_EQUAL: checkFinalFieldAccess(expression.getLeftExpression()); break; default: break; } } private void checkFinalFieldAccess(Expression expression) { if (!(expression instanceof VariableExpression) && !(expression instanceof PropertyExpression)) return; Variable v = null; if (expression instanceof VariableExpression) { VariableExpression ve = (VariableExpression) expression; v = ve.getAccessedVariable(); } else { PropertyExpression propExp = ((PropertyExpression) expression); Expression objectExpression = propExp.getObjectExpression(); if (objectExpression instanceof VariableExpression) { VariableExpression varExp = (VariableExpression) objectExpression; if (varExp.isThisExpression()) { v = currentClass.getDeclaredField(propExp.getPropertyAsString()); } } } if (v instanceof FieldNode) { FieldNode fn = (FieldNode) v; /* * if it is static final but not accessed inside a static constructor, or, * if it is an instance final but not accessed inside a instance constructor, it is an error */ boolean isFinal = fn.isFinal(); boolean isStatic = fn.isStatic(); boolean error = isFinal && ((isStatic && !inStaticConstructor) || (!isStatic && !inConstructor)); if (error) addError("cannot modify" + (isStatic ? " static" : "") + " final field '" + fn.getName() + "' outside of " + (isStatic ? "static initialization block." : "constructor."), expression); } } public void visitConstructor(ConstructorNode node) { inConstructor = true; inStaticConstructor = node.isStaticConstructor(); checkGenericsUsage(node, node.getParameters()); super.visitConstructor(node); } public void visitCatchStatement(CatchStatement cs) { if (!(cs.getExceptionType().isDerivedFrom(ClassHelper.make(Throwable.class)))) { addError("Catch statement parameter type is not a subclass of Throwable.", cs); } super.visitCatchStatement(cs); } public void visitMethodCallExpression(MethodCallExpression mce) { super.visitMethodCallExpression(mce); Expression aexp = mce.getArguments(); if (aexp instanceof TupleExpression) { TupleExpression arguments = (TupleExpression) aexp; for (Expression e : arguments.getExpressions()) { checkForInvalidDeclaration(e); } } else { checkForInvalidDeclaration(aexp); } } @Override public void visitDeclarationExpression(DeclarationExpression expression) { super.visitDeclarationExpression(expression); if (expression.isMultipleAssignmentDeclaration()) return; checkInvalidDeclarationModifier(expression, ACC_ABSTRACT, "abstract"); checkInvalidDeclarationModifier(expression, ACC_NATIVE, "native"); checkInvalidDeclarationModifier(expression, ACC_PRIVATE, "private"); checkInvalidDeclarationModifier(expression, ACC_PROTECTED, "protected"); checkInvalidDeclarationModifier(expression, ACC_PUBLIC, "public"); checkInvalidDeclarationModifier(expression, ACC_STATIC, "static"); checkInvalidDeclarationModifier(expression, ACC_STRICT, "strictfp"); checkInvalidDeclarationModifier(expression, ACC_SYNCHRONIZED, "synchronized"); checkInvalidDeclarationModifier(expression, ACC_TRANSIENT, "transient"); checkInvalidDeclarationModifier(expression, ACC_VOLATILE, "volatile"); } private void checkInvalidDeclarationModifier(DeclarationExpression expression, int modifier, String modName) { if ((expression.getVariableExpression().getModifiers() & modifier) != 0) { addError("Modifier '" + modName + "' not allowed here.", expression); } } private void checkForInvalidDeclaration(Expression exp) { if (!(exp instanceof DeclarationExpression)) return; addError("Invalid use of declaration inside method call.", exp); } public void visitConstantExpression(ConstantExpression expression) { super.visitConstantExpression(expression); checkStringExceedingMaximumLength(expression); } public void visitGStringExpression(GStringExpression expression) { super.visitGStringExpression(expression); for (ConstantExpression ce : expression.getStrings()) { checkStringExceedingMaximumLength(ce); } } private void checkStringExceedingMaximumLength(ConstantExpression expression) { Object value = expression.getValue(); if (value instanceof String) { String s = (String) value; if (s.length() > 65535) { addError("String too long. The given string is " + s.length() + " Unicode code units long, but only a maximum of 65535 is allowed.", expression); } } } private void checkGenericsUsage(ASTNode ref, ClassNode[] nodes) { for (ClassNode node : nodes) { checkGenericsUsage(ref, node); } } private void checkGenericsUsage(ASTNode ref, Parameter[] params) { for (Parameter p : params) { checkGenericsUsage(ref, p.getType()); } } private void checkGenericsUsage(ASTNode ref, ClassNode node) { if (node.isArray()) { checkGenericsUsage(ref, node.getComponentType()); } else if (!node.isRedirectNode() && node.isUsingGenerics()) { addError( "A transform used a generics containing ClassNode "+ node + " " + "for "+getRefDescriptor(ref) + "directly. You are not suppposed to do this. " + "Please create a new ClassNode refering to the old ClassNode " + "and use the new ClassNode instead of the old one. Otherwise " + "the compiler will create wrong descriptors and a potential " + "NullPointerException in TypeResolver in the OpenJDK. If this is " + "not your own doing, please report this bug to the writer of the " + "transform.", ref); } } private String getRefDescriptor(ASTNode ref) { if (ref instanceof FieldNode) { FieldNode f = (FieldNode) ref; return "the field "+f.getName()+" "; } else if (ref instanceof PropertyNode) { PropertyNode p = (PropertyNode) ref; return "the property "+p.getName()+" "; } else if (ref instanceof ConstructorNode) { return "the constructor "+ref.getText()+" "; } else if (ref instanceof MethodNode) { return "the method "+ref.getText()+" "; } else if (ref instanceof ClassNode) { return "the super class "+ref+" "; } return "<unknown with class "+ref.getClass()+"> "; } }
/* * Copyright 2015 Fabian Schultis, Hauke Oldsen * Copyright 2016 Hauke Oldsen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.gebatzens.sia.fragment; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.os.Bundle; import android.support.v7.widget.CardView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; import java.io.IOException; import java.io.OutputStream; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import de.gebatzens.sia.SIAApp; import de.gebatzens.sia.MainActivity; import de.gebatzens.sia.R; import de.gebatzens.sia.data.Mensa; public class MensaFragment extends RemoteDataFragment { Boolean screenOrientationHorizontal = false; @Override public View onCreateView(LayoutInflater inflater, ViewGroup group, Bundle bundle) { ((MainActivity) getActivity()).updateMenu(R.menu.toolbar_menu); ViewGroup vg = (ViewGroup) inflater.inflate(R.layout.fragment_mensa, group, false); if(getFragment().getData() != null) createRootView(inflater, vg); return vg; } @Override public void createView(LayoutInflater inflater, ViewGroup view) { LinearLayout lroot = (LinearLayout) view.findViewById(R.id.mensa_content); ScrollView sv = new ScrollView(getActivity()); sv.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); sv.setTag("gg_scroll"); LinearLayout l = new LinearLayout(getActivity()); createRootLayout(l); lroot.addView(sv); sv.addView(l); if(((Mensa) getFragment().getData()).isEmpty()) { createNoEntriesCard(l, inflater); } else { for (Mensa.MensaItem item : ((Mensa) getFragment().getData())) { if (!item.isPast()) { //try { l.addView(createCardItem(item, inflater)); //} catch (Exception e) { // e.printStackTrace(); //} } } } } @Override public ViewGroup getContentView() { return (ViewGroup) getView().findViewById(R.id.mensa_content); } private CardView createCardItem(Mensa.MensaItem mensa_item, LayoutInflater i) { CardView mcv = (CardView) i.inflate(R.layout.basic_cardview, null); mcv.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mcv.setContentPadding(0, 0, 0, 0); i.inflate(R.layout.mensa_cardview_entry, mcv, true); if(mensa_item.isPast()) mcv.setAlpha(0.65f); String[] colors = getActivity().getResources().getStringArray(SIAApp.SIA_APP.school.getColorArray()); ((TextView) mcv.findViewById(R.id.mcv_date)).setText(getFormatedDate(mensa_item.date)); ((TextView) mcv.findViewById(R.id.mcv_meal)).setText(mensa_item.meal); ((TextView) mcv.findViewById(R.id.mcv_garnish)).setText(getResources().getString(R.string.garnish) + ": " + mensa_item.garnish.replace("mit ","").replace("mit","")); ((TextView) mcv.findViewById(R.id.mcv_dessert)).setText(getResources().getString(R.string.dessert) + ": " + mensa_item.dessert); ((TextView) mcv.findViewById(R.id.mcv_day)).setText(getDayByDate(mensa_item.date)); ((ImageView) mcv.findViewById(R.id.mcv_imgvegi)).setImageBitmap((Integer.valueOf(mensa_item.vegetarian) == 1) ? BitmapFactory.decodeResource(getResources(), R.drawable.ic_vegetarian) : BitmapFactory.decodeResource(getResources(), R.drawable.ic_meat)); if(screenOrientationHorizontal) { LinearLayout mcvImageContainer = (LinearLayout) mcv.findViewById(R.id.mcv_image_container); ViewGroup.LayoutParams mcvImageContainerLayoutParams = mcvImageContainer.getLayoutParams(); mcvImageContainerLayoutParams.height = toPixels(240); } ViewHolder vh = new ViewHolder(); vh.imgview = (ImageView) mcv.findViewById(R.id.mcv_image); vh.filename = mensa_item.image; new AsyncTask<ViewHolder, Void, ViewHolder>() { @Override protected ViewHolder doInBackground(ViewHolder... params) { //params[0].bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.block_house_steak); try { Bitmap bitmap = cacheGetBitmap(params[0].filename); if(bitmap!=null) { params[0].bitmap = bitmap; } else { bitmap = SIAApp.SIA_APP.api.getMensaImage(params[0].filename); cacheSaveBitmap(params[0].filename, bitmap); params[0].bitmap = bitmap; } } catch (IOException e) { e.printStackTrace(); params[0].bitmap = null; } return params[0]; } @Override protected void onPostExecute(ViewHolder result) { try { ImageView imgView = result.imgview; if(result.bitmap != null) { imgView.setImageBitmap(result.bitmap); } else { imgView.setImageBitmap(BitmapFactory.decodeResource(getResources(), R.drawable.no_content)); } imgView.setScaleType(ImageView.ScaleType.CENTER_CROP); } catch (Exception e) { e.printStackTrace(); } } }.execute(vh); return mcv; } private String getDayByDate(String date) { String formattedDate; DateFormat parser = new SimpleDateFormat("yyyy-MM-dd"); DateFormat dateFormatter = new SimpleDateFormat("EEE"); try { Date parsedDate = parser.parse(date); formattedDate = dateFormatter.format(parsedDate); return formattedDate; } catch (ParseException e) { e.printStackTrace(); return ""; } } private String getFormatedDate(String date) { String formattedDate; DateFormat parser = new SimpleDateFormat("yyyy-MM-dd"); DateFormat dateFormatter; switch (Locale.getDefault().getLanguage()) { case "de": dateFormatter = new SimpleDateFormat("d. MMM"); break; case "en": dateFormatter = new SimpleDateFormat("MM/dd/yyyy"); break; default: dateFormatter = new SimpleDateFormat("yyyy-MM-dd"); break; } try { Date parsedDate = parser.parse(date); formattedDate = dateFormatter.format(parsedDate); return formattedDate; } catch (ParseException e) { e.printStackTrace(); return ""; } } public Bitmap cacheGetBitmap(String filename) { try { return BitmapFactory.decodeStream(getActivity().openFileInput("cache_" + filename)); } catch(Exception e) { return null; } } private void cacheSaveBitmap(String filename, Bitmap image) { try { OutputStream fos = getActivity().openFileOutput("cache_" + filename, Context.MODE_PRIVATE); image.compress(Bitmap.CompressFormat.PNG, 90, fos); fos.close(); } catch (Exception e) { e.printStackTrace(); } } private class ViewHolder { ImageView imgview; Bitmap bitmap; String filename; } }
package com.mapbox.rctmgl.components.styles.sources; import android.content.Context; import android.graphics.PointF; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import com.facebook.react.bridge.ReadableMap; import com.mapbox.mapboxsdk.annotations.Marker; import com.mapbox.mapboxsdk.annotations.MarkerOptions; import com.mapbox.mapboxsdk.annotations.MarkerView; import com.mapbox.mapboxsdk.annotations.MarkerViewOptions; import com.mapbox.mapboxsdk.maps.MapboxMap; import com.mapbox.mapboxsdk.style.sources.GeoJsonOptions; import com.mapbox.mapboxsdk.style.sources.GeoJsonSource; import com.mapbox.rctmgl.components.annotation.RCTMGLCallout; import com.mapbox.rctmgl.components.annotation.RCTMGLPointAnnotationOptions; import com.mapbox.rctmgl.components.mapview.RCTMGLMapView; import com.mapbox.rctmgl.components.styles.layers.RCTLayer; import com.mapbox.rctmgl.events.FeatureClickEvent; import com.mapbox.rctmgl.events.IEvent; import com.mapbox.rctmgl.utils.DownloadMapImageTask; import com.mapbox.rctmgl.utils.GeoJSONUtils; import com.mapbox.services.commons.geojson.Feature; import com.mapbox.services.commons.geojson.FeatureCollection; import com.mapbox.services.commons.geojson.Geometry; import com.mapbox.services.commons.geojson.Point; import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by nickitaliano on 9/19/17. */ public class RCTMGLShapeSource extends RCTSource<GeoJsonSource> { private URL mURL; private RCTMGLShapeSourceManager mManager; private String mShape; private Boolean mCluster; private Integer mClusterRadius; private Integer mClusterMaxZoom; private Integer mMaxZoom; private Integer mBuffer; private Double mTolerance; private boolean mRemoved = false; private List<Map.Entry<String, String>> mImages; private List<Map.Entry<String, BitmapDrawable>> mNativeImages; public RCTMGLShapeSource(Context context, RCTMGLShapeSourceManager manager) { super(context); mManager = manager; } @Override public void addToMap(final RCTMGLMapView mapView) { mRemoved = false; if (!hasNativeImages() && !hasImages()) { super.addToMap(mapView); return; } MapboxMap map = mapView.getMapboxMap(); // add all images from drawables folder if (hasNativeImages()) { for (Map.Entry<String, BitmapDrawable> nativeImage : mNativeImages) { map.addImage(nativeImage.getKey(), nativeImage.getValue().getBitmap()); } } // add all external images from javascript layer if (hasImages()) { DownloadMapImageTask.OnAllImagesLoaded imagesLoadedCallback = new DownloadMapImageTask.OnAllImagesLoaded() { @Override public void onAllImagesLoaded() { // don't add the ShapeSource when the it was removed while loading images if (mRemoved) return; RCTMGLShapeSource.super.addToMap(mapView); } }; DownloadMapImageTask task = new DownloadMapImageTask(getContext(), map, imagesLoadedCallback); task.execute(mImages.toArray(new Map.Entry[mImages.size()])); return; } super.addToMap(mapView); } @Override public void removeFromMap(RCTMGLMapView mapView) { super.removeFromMap(mapView); mRemoved = true; if (mMap == null) return; if (hasImages()) { for (Map.Entry<String, String> image : mImages) { mMap.removeImage(image.getKey()); } } if (hasNativeImages()) { for (Map.Entry<String, BitmapDrawable> image : mNativeImages) { mMap.removeImage(image.getKey()); } } } @Override public GeoJsonSource makeSource() { GeoJsonOptions options = getOptions(); if (mShape != null) { return new GeoJsonSource(mID, mShape, options); } return new GeoJsonSource(mID, mURL, options); } public void setURL(URL url) { mURL = url; if (mSource != null && mMapView != null && !mMapView.isDestroyed() ) { ((GeoJsonSource) mSource).setUrl(mURL); } } public void setShape(String geoJSONStr) { mShape = geoJSONStr; if (mSource != null && mMapView != null && !mMapView.isDestroyed() ) { ((GeoJsonSource) mSource).setGeoJson(mShape); } } public void setCluster(boolean cluster) { mCluster = cluster; } public void setClusterRadius(int clusterRadius) { mClusterRadius = clusterRadius; } public void setClusterMaxZoom(int clusterMaxZoom) { mClusterMaxZoom = clusterMaxZoom; } public void setMaxZoom(int maxZoom) { mMaxZoom = maxZoom; } public void setBuffer(int buffer) { mBuffer = buffer; } public void setTolerance(double tolerance) { mTolerance = tolerance; } public void setImages(List<Map.Entry<String, String>> images) { mImages = images; } public void setNativeImages(List<Map.Entry<String, BitmapDrawable>> nativeImages) { mNativeImages = nativeImages; } public void onPress(Feature feature) { mManager.handleEvent(FeatureClickEvent.makeShapeSourceEvent(this, feature)); } private GeoJsonOptions getOptions() { GeoJsonOptions options = new GeoJsonOptions(); if (mCluster != null) { options.withCluster(mCluster); } if (mClusterRadius != null) { options.withClusterRadius(mClusterRadius); } if (mClusterMaxZoom != null) { options.withClusterMaxZoom(mClusterMaxZoom); } if (mMaxZoom != null) { options.withMaxZoom(mMaxZoom); } if (mBuffer != null) { options.withBuffer(mBuffer); } if (mTolerance != null) { options.withTolerance(mTolerance.floatValue()); } return options; } private boolean hasImages() { return mImages != null && mImages.size() > 0; } private boolean hasNativeImages() { return mNativeImages != null && mNativeImages.size() > 0; } }
/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the * Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.jab.docsearch.gui; import java.awt.Event; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.print.PageFormat; import java.awt.print.PrinterException; import java.awt.print.PrinterJob; import java.io.IOException; import java.util.Vector; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JEditorPane; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JRadioButtonMenuItem; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.KeyStroke; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; /** * Uses JFrame to create a simple browser with printing. User passes String URL * (e.g., http://www.rbi.com) to set opening location. User then may follow * hyperlinks or type new preferred location into provided JTextField. Scaling * options are demonstrated in this example. Scaling options may be set from a * submenu in the File menu or by specified KeyStroke. A second menu track nn * websites, which user can reselect as destination using mouse. * * @version $Id: JBrowser.java 172 2012-09-14 15:24:32Z henschel $ */ public class JBrowser extends JFrame { private static final int kNumSites = 20; // number of sites listed in // JMenu "Last 20" private static final int kDefaultX = 640; private static final int kDefaultY = 480; // private static final int prefScale = 0; private static final String kScale2Label = "2X Scale"; private static final String kScaleFitLabel = "Scale to Fit"; private static final String kScaleHalfLabel = "1/2 Scale"; private static final String kScaleOffLabel = "Scaling Off"; private static final String kScaleXLabel = "Scale by Width"; private static final String kScaleYLabel = "Scale by Length"; private JEditorPane mainPane; private String path; private final JButton goButton = new JButton("Go"); private JComponentVista vista; private final JMenu fileMenu = new JMenu("File", true); private final JMenu prefMenu = new JMenu("Print Preferences", true); private final JMenu siteMenu = new JMenu("Last 20", true); private final JRadioButtonMenuItem scale2RadioBut = new JRadioButtonMenuItem(kScale2Label); private final JRadioButtonMenuItem scaleFitRadioBut = new JRadioButtonMenuItem(kScaleFitLabel); private final JRadioButtonMenuItem scaleHalfRadioBut = new JRadioButtonMenuItem(kScaleHalfLabel); private final JRadioButtonMenuItem scaleOffRadioBut = new JRadioButtonMenuItem(kScaleOffLabel, true); private final JRadioButtonMenuItem scaleXRadioBut = new JRadioButtonMenuItem(kScaleXLabel); private final JRadioButtonMenuItem scaleYRadioBut = new JRadioButtonMenuItem(kScaleYLabel); private final JTextField pathField = new JTextField(30); private final Vector<JMenuItem> siteMIVector = new Vector<JMenuItem>(); public JBrowser(String url) { super("JBrowser HTML Printing Demo"); path = url; addSite(path); try { mainPane = new JEditorPane(path); } catch (IOException ioe) { ioe.printStackTrace(System.err); System.exit(1); } JMenuBar menuBar = new JMenuBar(); // JPanel navPanel = new JPanel(); JMenuItem printMI = new JMenuItem("Print"); JMenuItem exitMI = new JMenuItem("Exit"); printMI.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P, Event.CTRL_MASK)); exitMI.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_X, Event.CTRL_MASK)); scale2RadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_D, Event.CTRL_MASK)); scaleFitRadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, Event.CTRL_MASK)); scaleHalfRadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_H, Event.CTRL_MASK)); scaleOffRadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, Event.CTRL_MASK)); scaleXRadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_W, Event.CTRL_MASK)); scaleYRadioBut.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_L, Event.CTRL_MASK)); printMI.addActionListener(new PrintMIListener()); exitMI.addActionListener(new ExitMIListener()); scaleXRadioBut.addActionListener(new ScaleXListener()); scaleYRadioBut.addActionListener(new ScaleYListener()); scaleFitRadioBut.addActionListener(new ScaleFitListener()); scaleHalfRadioBut.addActionListener(new ScaleHalfListener()); scale2RadioBut.addActionListener(new Scale2Listener()); pathField.addActionListener(new PathFieldListener()); pathField.setText(path); goButton.addActionListener(new PathFieldListener()); ButtonGroup scaleSetGroup = new ButtonGroup(); scaleSetGroup.add(scale2RadioBut); scaleSetGroup.add(scaleFitRadioBut); scaleSetGroup.add(scaleHalfRadioBut); scaleSetGroup.add(scaleOffRadioBut); scaleSetGroup.add(scaleXRadioBut); scaleSetGroup.add(scaleYRadioBut); prefMenu.add(scaleXRadioBut); prefMenu.add(scaleYRadioBut); prefMenu.add(scaleFitRadioBut); prefMenu.add(scaleHalfRadioBut); prefMenu.add(scale2RadioBut); prefMenu.addSeparator(); prefMenu.add(scaleOffRadioBut); fileMenu.add(prefMenu); fileMenu.add(printMI); fileMenu.addSeparator(); fileMenu.add(exitMI); menuBar.add(fileMenu); menuBar.add(siteMenu); menuBar.add(pathField); menuBar.add(goButton); mainPane.setEditable(false); mainPane.addHyperlinkListener(new LinkListener()); vista = new JComponentVista(mainPane, new PageFormat()); // addWindowListener(new BasicWindowMonitor()); setContentPane(new JScrollPane(mainPane)); setVisible(true); setJMenuBar(menuBar); setSize(kDefaultX, kDefaultY); setVisible(true); } public static void main(String[] args) { new JBrowser(args[0]); } /* * addSite method takes the String url and adds it to the Vector * siteMIVector and the JMenu siteMenu. */ public void addSite(String url) { boolean beenThere = false; /* * Cycle through the contents of the siteMenu, comparing their labels to * the string to determine if there is redundancy. */ for (int i = 0; (i < siteMenu.getItemCount()) && !beenThere; i++) { JMenuItem site = siteMenu.getItem(i); /* * The String url, is compared to the labels of the JMenuItems in * already stored in siteMIVector. If the string matches an existing * label, the older redundant element, at i, is removed. The new * JMenuItem site is inserted in the Vector at 0. The updateMenu * method is called to update the "Last nn" menu accordingly and the * "beenThere" boolean trigger is set TRUE. */ if (site.getText().equals(url)) { siteMIVector.removeElementAt(i); siteMIVector.insertElementAt(site, 0); updateMenu(siteMenu); beenThere = true; } } /* * If the new JMenuItem site has a unique string, then the addSite * method handles it as follows. */ if (!beenThere) { /* * If the "Last nn" menu has reached kNumSites capacity, the oldest * JMenuItem is removed from the vector, enabling storage for the * new menu item and maintaining the specified capacity of the "Last * nn" menu. */ if (siteMenu.getItemCount() >= kNumSites) { siteMIVector.removeElementAt(siteMIVector.size() - 1); } /* * A new JMenuItem is created and a siteMenuListener added. It is * added to the vector then the menu is updated. */ JMenuItem site = new JMenuItem(url); site.addActionListener(new SiteMenuListener(url)); siteMIVector.insertElementAt(site, 0); System.out.println("\n Connected to " + url); updateMenu(siteMenu); } } public void updateMenu(JMenu menu) { menu.removeAll(); for (int i = 0; i < siteMIVector.size(); i++) { JMenuItem mi = siteMIVector.elementAt(i); menu.add(mi); } } /* * * The ActionListener methods * */ public class ExitMIListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { System.out.println("\n Killing JBrowser..."); System.out.println(" ...AHHHHHHHHHhhhhhhh...ya got me...ugh"); System.exit(0); } } public class LinkListener implements HyperlinkListener { @Override public void hyperlinkUpdate(HyperlinkEvent ev) { try { if (ev.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { mainPane.setPage(ev.getURL()); path = ev.getURL().toString(); pathField.setText(path); addSite(path); } } catch (IOException ex) { ex.printStackTrace(System.err); } } } public class PathFieldListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { System.out.println("\n Switching from " + path + " to " + pathField.getText() + "."); path = pathField.getText(); try { mainPane.setPage(path); } catch (IOException ex) { ex.printStackTrace(System.err); } if (!path.equals("")) { addSite(path); } } } public class PrintMIListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { PrinterJob pj = PrinterJob.getPrinterJob(); pj.setPageable(vista); try { if (pj.printDialog()) { pj.print(); } } catch (PrinterException pe) { System.out.println(pe); } } } public class Scale2Listener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); vista.setScale(2.0, 2.0); } } public class ScaleFitListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); vista.scaleToFit(false); } } public class ScaleHalfListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); vista.setScale(0.5, 0.5); } } public class ScaleOffListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); } } public class ScaleXListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); vista.scaleToFitX(); } } public class ScaleYListener implements ActionListener { @Override public void actionPerformed(ActionEvent evt) { vista = new JComponentVista(mainPane, new PageFormat()); vista.scaleToFitY(); } } public class SiteMenuListener implements ActionListener { private final String site; public SiteMenuListener(String url) { site = url; } @Override public void actionPerformed(ActionEvent evt) { System.out.println("\n Switching from " + path + " to " + site + "."); path = site; try { mainPane.setPage(path); } catch (IOException ioe) { ioe.printStackTrace(System.err); } if (! path.equals("")) { addSite(path); } pathField.setText(path); } } }
package net.avalara.avatax.rest.client.models; import net.avalara.avatax.rest.client.enums.*; import net.avalara.avatax.rest.client.serializer.JsonSerializer; import java.lang.Override; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; /* * AvaTax Software Development Kit for Java JRE based environments * * (c) 2004-2018 Avalara, Inc. * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * @author Dustin Welden <dustin.welden@avalara.com> * @copyright 2004-2018 Avalara, Inc. * @license https://www.apache.org/licenses/LICENSE-2.0 * @link https://github.com/avadev/AvaTax-REST-V2-JRE-SDK * Swagger name: AvaTaxClient */ /** * Represents estimated financial results from responding to a tax notice. */ public class NoticeFinanceModel { private Integer id; /** * Getter for id * * The Unique Id of the Finance Model */ public Integer getId() { return this.id; } /** * Setter for id * * The Unique Id of the Finance Model */ public void setId(Integer value) { this.id = value; } private Integer noticeId; /** * Getter for noticeId * * The unique ID of the the tax notice associated with the the finance detail */ public Integer getNoticeId() { return this.noticeId; } /** * Setter for noticeId * * The unique ID of the the tax notice associated with the the finance detail */ public void setNoticeId(Integer value) { this.noticeId = value; } private Date noticeDate; /** * Getter for noticeDate * * The date of the notice */ public Date getNoticeDate() { return this.noticeDate; } /** * Setter for noticeDate * * The date of the notice */ public void setNoticeDate(Date value) { this.noticeDate = value; } private Date dueDate; /** * Getter for dueDate * * The due date of the notice */ public Date getDueDate() { return this.dueDate; } /** * Setter for dueDate * * The due date of the notice */ public void setDueDate(Date value) { this.dueDate = value; } private String noticeNumber; /** * Getter for noticeNumber * * The sequential number of the notice */ public String getNoticeNumber() { return this.noticeNumber; } /** * Setter for noticeNumber * * The sequential number of the notice */ public void setNoticeNumber(String value) { this.noticeNumber = value; } private BigDecimal taxDue; /** * Getter for taxDue * * The amount of tax due on the notice */ public BigDecimal getTaxDue() { return this.taxDue; } /** * Setter for taxDue * * The amount of tax due on the notice */ public void setTaxDue(BigDecimal value) { this.taxDue = value; } private BigDecimal penalty; /** * Getter for penalty * * The amound of penalty listed on the notice */ public BigDecimal getPenalty() { return this.penalty; } /** * Setter for penalty * * The amound of penalty listed on the notice */ public void setPenalty(BigDecimal value) { this.penalty = value; } private BigDecimal interest; /** * Getter for interest * * The amount of interest listed on the notice */ public BigDecimal getInterest() { return this.interest; } /** * Setter for interest * * The amount of interest listed on the notice */ public void setInterest(BigDecimal value) { this.interest = value; } private BigDecimal credits; /** * Getter for credits * * The amount of credits listed on the notice */ public BigDecimal getCredits() { return this.credits; } /** * Setter for credits * * The amount of credits listed on the notice */ public void setCredits(BigDecimal value) { this.credits = value; } private BigDecimal taxAbated; /** * Getter for taxAbated * * The amount of tax abated on the notice */ public BigDecimal getTaxAbated() { return this.taxAbated; } /** * Setter for taxAbated * * The amount of tax abated on the notice */ public void setTaxAbated(BigDecimal value) { this.taxAbated = value; } private BigDecimal customerPenalty; /** * Getter for customerPenalty * * The amount of customer penalty on the notice */ public BigDecimal getCustomerPenalty() { return this.customerPenalty; } /** * Setter for customerPenalty * * The amount of customer penalty on the notice */ public void setCustomerPenalty(BigDecimal value) { this.customerPenalty = value; } private BigDecimal customerInterest; /** * Getter for customerInterest * * The amount of customer interest on the notice */ public BigDecimal getCustomerInterest() { return this.customerInterest; } /** * Setter for customerInterest * * The amount of customer interest on the notice */ public void setCustomerInterest(BigDecimal value) { this.customerInterest = value; } private BigDecimal cspFeeRefund; /** * Getter for cspFeeRefund * * The amount of CSP Fee Refund on the notice */ public BigDecimal getCspFeeRefund() { return this.cspFeeRefund; } /** * Setter for cspFeeRefund * * The amount of CSP Fee Refund on the notice */ public void setCspFeeRefund(BigDecimal value) { this.cspFeeRefund = value; } private String fileName; /** * Getter for fileName * * The name of the file attached to the finance detail */ public String getFileName() { return this.fileName; } /** * Setter for fileName * * The name of the file attached to the finance detail */ public void setFileName(String value) { this.fileName = value; } private Long resourceFileId; /** * Getter for resourceFileId * * The ResourceFileId of the finance detail attachment */ public Long getResourceFileId() { return this.resourceFileId; } /** * Setter for resourceFileId * * The ResourceFileId of the finance detail attachment */ public void setResourceFileId(Long value) { this.resourceFileId = value; } private Date createdDate; /** * Getter for createdDate * * The date when this record was created. */ public Date getCreatedDate() { return this.createdDate; } /** * Setter for createdDate * * The date when this record was created. */ public void setCreatedDate(Date value) { this.createdDate = value; } private Integer createdUserId; /** * Getter for createdUserId * * The User ID of the user who created this record. */ public Integer getCreatedUserId() { return this.createdUserId; } /** * Setter for createdUserId * * The User ID of the user who created this record. */ public void setCreatedUserId(Integer value) { this.createdUserId = value; } private Date modifiedDate; /** * Getter for modifiedDate * * The date/time when this record was last modified. */ public Date getModifiedDate() { return this.modifiedDate; } /** * Setter for modifiedDate * * The date/time when this record was last modified. */ public void setModifiedDate(Date value) { this.modifiedDate = value; } private Integer modifiedUserId; /** * Getter for modifiedUserId * * The user ID of the user who last modified this record. */ public Integer getModifiedUserId() { return this.modifiedUserId; } /** * Setter for modifiedUserId * * The user ID of the user who last modified this record. */ public void setModifiedUserId(Integer value) { this.modifiedUserId = value; } private ResourceFileUploadRequestModel attachmentUploadRequest; /** * Getter for attachmentUploadRequest * * */ public ResourceFileUploadRequestModel getAttachmentUploadRequest() { return this.attachmentUploadRequest; } /** * Setter for attachmentUploadRequest * * */ public void setAttachmentUploadRequest(ResourceFileUploadRequestModel value) { this.attachmentUploadRequest = value; } /** * Returns a JSON string representation of NoticeFinanceModel */ @Override public String toString() { return JsonSerializer.SerializeObject(this); } }
/* * Copyright (c) 2009 University of Durham, England * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'SynergySpace' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package synergynet.table.apps.mysteries.projector; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Properties; import com.jme.scene.Spatial; import synergynet.contentsystem.ContentSystem; import synergynet.contentsystem.items.OrthoContentItem; import synergynet.contentsystem.items.ContentItem; import synergynet.contentsystem.items.QuadContentItem; import synergynet.contentsystem.items.innernotecontroller.InnerNoteController; import synergynet.contentsystem.items.innernotecontroller.InnerNoteEditor; import synergynet.services.ServiceManager; import synergynet.services.exceptions.CouldNotStartServiceException; import synergynet.services.net.networkedcontentmanager.NetworkedContentListener; import synergynet.services.net.networkedcontentmanager.NetworkedContentManager; import synergynet.services.net.networkedcontentmanager.messagehandler.DefaultMessageHandler; import synergynet.services.net.networkedcontentmanager.synchroniseddatarender.SynchronisedDataRender; import synergynet.services.net.networkedcontentmanager.utils.RemoteDesktop; import synergynet.services.net.tablecomms.client.TableCommsClientService; import synergynet.services.net.tablecomms.messages.control.fromclient.ApplicationCommsRequest; import synergynet.table.SynergyNetAppUtils; import synergynet.table.appregistry.ApplicationInfo; import synergynet.table.apps.DefaultSynergyNetApp; import synergynet.table.apps.mysteries.controller.MysteriesControllerApp; public class MysteryProjectorApp extends DefaultSynergyNetApp implements NetworkedContentListener { private TableCommsClientService comms; protected ContentSystem contentSystem; protected DefaultMessageHandler messageHandler; protected NetworkedContentManager networkedContentManager; protected InnerNoteController innerNoteController; private File restoreFolder; private File exitSettingsFile; public static boolean restore = true; protected String currentSubApp = ""; private boolean isLogEnabled = false; public MysteryProjectorApp(ApplicationInfo info) { super(info); innerNoteController = new InnerNoteController(); restoreFolder = new File(getApplicationDataDirectory(), "restore"); exitSettingsFile = new File(getApplicationDataDirectory(), "safeExitSettings.properties"); checkLastExitSettings(); /* if(restore){ for(String mysteryID: mysteryIDToXMLPath.keySet()) mysteriesToRestore.add(mysteryID); } */ } @Override public void addContent() { setDefaultSteadfastLimit(1); contentSystem = ContentSystem.getContentSystemForSynergyNetApp(this); SynergyNetAppUtils.addTableOverlay(this); } @Override public void onActivate() { if(comms == null) { try { comms = (TableCommsClientService) ServiceManager.getInstance().get(TableCommsClientService.class); } catch (CouldNotStartServiceException e1) { e1.printStackTrace(); } List<Class<?>> receiverClasses = new ArrayList<Class<?>>(); receiverClasses.add(MysteriesControllerApp.class); receiverClasses.add(MysteryProjectorApp.class); this.networkedContentManager = new NetworkedContentManager(contentSystem, comms, receiverClasses); this.networkedContentManager.addNetworkedContentListener(this); ArrayList<Class<?>> controllerClasses = new ArrayList<Class<?>>(); ArrayList<Class<?>> projectorClasses = new ArrayList<Class<?>>(); controllerClasses.add(MysteriesControllerApp.class); projectorClasses.add(this.getClass()); this.networkedContentManager.createProjectorController(controllerClasses, projectorClasses); messageHandler = new DefaultMessageHandler(contentSystem, this.networkedContentManager); try { if(comms != null) comms.register(this, messageHandler); } catch (IOException e) { e.printStackTrace(); } } try { if(comms != null) comms.sendMessage(new ApplicationCommsRequest(MysteryProjectorApp.class.getName())); } catch (IOException e) { e.printStackTrace(); } } @Override public void stateUpdate(float tpf) { if(networkedContentManager!= null) networkedContentManager.stateUpdate(tpf); if(contentSystem != null) contentSystem.update(tpf); if(currentSubApp != null && isLogEnabled){ try { logContentState(currentSubApp); } catch (FileNotFoundException e) { e.printStackTrace(); } } } @Override public void onDeactivate() {} public void loadContent(String filePath, String name){ this.removeAllItems(); networkedContentManager.loadLocalContent(filePath, name); this.contentLoaded(); } @Override public void contentLoaded() {} public void removeAllItems(){ this.innerNoteController.removeAllNoteEditors(); networkedContentManager.removeAllItems(); } @Override public void renderSynchronisedDate(ContentItem item, Map<String, String> itemAttrs) { SynchronisedDataRender.render((OrthoContentItem)item, itemAttrs, this.innerNoteController); SynchronisedDataRender.renderNote((OrthoContentItem)item, itemAttrs, this.innerNoteController); if (innerNoteController.getNodeEditor((QuadContentItem)item)!= null){ InnerNoteEditor innerNoteEditor = innerNoteController.getNodeEditor((QuadContentItem)item); innerNoteEditor.removeInnerNoteEventListeners(); innerNoteEditor.getNoteNode().setRotateTranslateScalable(false); innerNoteEditor.getNoteNode().removeItemListerners(); } } private void logContentState(String mysteryID) throws FileNotFoundException { if(!restoreFolder.exists()) restoreFolder.mkdir(); File restoreFile = new File(restoreFolder, mysteryID); PrintWriter pw = new PrintWriter(new FileOutputStream(restoreFile)); pw.println("# Last state for app ID: " + mysteryID); pw.println("# Storing started at " + new Date().toString()); pw.println("# Format is as follows:"); pw.println("# content Item name, location x, location y, location z, scale x, scale y, scale z, rotation x, rotation y, rotation z, rotation w, zOrder"); for(ContentItem item: contentSystem.getAllContentItems().values()) { Spatial spatial = (Spatial)item.getImplementationObject(); pw.print(spatial.getName() + ","); pw.print(spatial.getLocalTranslation().x + ","); pw.print(spatial.getLocalTranslation().y + ","); pw.print(spatial.getLocalTranslation().z + ","); pw.print(spatial.getLocalScale().x + ","); pw.print(spatial.getLocalScale().y + ","); pw.print(spatial.getLocalScale().z + ","); pw.print(spatial.getLocalRotation().x + ","); pw.print(spatial.getLocalRotation().y + ","); pw.print(spatial.getLocalRotation().z + ","); pw.print(spatial.getLocalRotation().w + ","); pw.println(spatial.getZOrder()); } pw.close(); } private void checkLastExitSettings(){ try{ if(!exitSettingsFile.exists()) exitSettingsFile.createNewFile(); FileInputStream is = new FileInputStream(exitSettingsFile); Properties properties = new Properties(); properties.load(is); String isRestore = properties.getProperty("restore"); is.close(); if(isRestore != null && isRestore.equals("1")) MysteryProjectorApp.restore = true; else { MysteryProjectorApp.restore = false; properties.setProperty("restore", "1"); FileOutputStream os = new FileOutputStream(exitSettingsFile); properties.store(os, null); os.close(); } } catch(IOException exp){ exp.printStackTrace(); } } /* private void setSafeExit(){ try{ if(!exitSettingsFile.exists()) exitSettingsFile.createNewFile(); FileOutputStream os = new FileOutputStream(exitSettingsFile); Properties properties = new Properties(); properties.setProperty("restore", "0"); properties.store(os, "Safe exit on "+ new Date()); os.close(); } catch(IOException exp){ exp.printStackTrace(); } } */ @Override public void channelSwitched() { } @Override public void contentItemLoaded(ContentItem item) { } @Override public void remoteContentLoaded(RemoteDesktop remoteDesktop) { } @Override public void renderRemoteDesktop(RemoteDesktop remoteDesktop, OrthoContentItem item, Map<String, String> map) { } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.vectordrawable.graphics.drawable.tests; import static androidx.vectordrawable.graphics.drawable.tests.DrawableUtils.saveVectorDrawableIntoPNG; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static java.lang.Thread.sleep; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.drawable.Drawable.ConstantState; import android.util.AttributeSet; import android.util.Xml; import android.view.View; import android.widget.ImageButton; import androidx.annotation.DrawableRes; import androidx.core.view.ViewCompat; import androidx.test.annotation.UiThreadTest; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.filters.FlakyTest; import androidx.test.filters.LargeTest; import androidx.test.platform.app.InstrumentationRegistry; import androidx.test.rule.ActivityTestRule; import androidx.vectordrawable.animated.test.R; import androidx.vectordrawable.graphics.drawable.Animatable2Compat.AnimationCallback; import androidx.vectordrawable.graphics.drawable.AnimatedVectorDrawableCompat; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @LargeTest @RunWith(AndroidJUnit4.class) public class AnimatedVectorDrawableTest { @Rule public final ActivityTestRule<DrawableStubActivity> mActivityTestRule; private static final float PIXEL_ERROR_THRESHOLD = 0.3f; private static final float PIXEL_DIFF_THRESHOLD = 0.03f; private static final float PIXEL_DIFF_COUNT_THRESHOLD = 0.1f; private static final String LOGTAG = AnimatedVectorDrawableTest.class.getSimpleName(); private static final int IMAGE_WIDTH = 64; private static final int IMAGE_HEIGHT = 64; @DrawableRes private static final int DRAWABLE_RES_ID = R.drawable.animation_vector_drawable_grouping_1; private Context mContext; private Resources mResources; private static final boolean DBG_DUMP_PNG = false; // States to check for animation callback tests. private boolean mAnimationStarted = false; private boolean mAnimationEnded = false; // Animation callback used for all callback related tests. private AnimationCallback mAnimationCallback = new AnimationCallback() { @Override public void onAnimationStart( Drawable drawable) { mAnimationStarted = true; } @Override public void onAnimationEnd( Drawable drawable) { mAnimationEnded = true; } }; public AnimatedVectorDrawableTest() { mActivityTestRule = new ActivityTestRule<>(DrawableStubActivity.class); } @Before public void setup() throws Exception { mContext = mActivityTestRule.getActivity(); mResources = mContext.getResources(); } @Test public void testInflate() throws Exception { // Setup AnimatedVectorDrawableCompat from xml file XmlPullParser parser = mResources.getXml(DRAWABLE_RES_ID); AttributeSet attrs = Xml.asAttributeSet(parser); int type; while ((type = parser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { // Empty loop } if (type != XmlPullParser.START_TAG) { throw new XmlPullParserException("No start tag found"); } Bitmap bitmap = Bitmap.createBitmap(IMAGE_WIDTH, IMAGE_HEIGHT, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); AnimatedVectorDrawableCompat animatedVectorDrawable = AnimatedVectorDrawableCompat.create(mContext, DRAWABLE_RES_ID); animatedVectorDrawable.inflate(mResources, parser, attrs); animatedVectorDrawable.setBounds(0, 0, IMAGE_WIDTH, IMAGE_HEIGHT); bitmap.eraseColor(0); animatedVectorDrawable.draw(canvas); int sunColor = bitmap.getPixel(IMAGE_WIDTH / 2, IMAGE_HEIGHT / 2); int earthColor = bitmap.getPixel(IMAGE_WIDTH * 3 / 4 + 2, IMAGE_HEIGHT / 2); assertTrue(sunColor == 0xFFFF8000); assertTrue(earthColor == 0xFF5656EA); if (DBG_DUMP_PNG) { saveVectorDrawableIntoPNG(mResources, bitmap, DRAWABLE_RES_ID, null); } } /** * Render AVD sequence in an bitmap for several frames with the same content, and make sure * there is no image corruptions. * * @throws IOException only if DBG_DUMP_PNG is true when dumping images for debugging purpose. */ @Test public void testRenderCorrectness() throws IOException { final int numTests = 5; final Bitmap bitmap = Bitmap.createBitmap(IMAGE_WIDTH, IMAGE_WIDTH, Bitmap.Config.ARGB_8888); final Canvas c = new Canvas(bitmap); final AnimatedVectorDrawableCompat avd = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animation_vector_drawable_circle); avd.setBounds(0, 0, IMAGE_WIDTH, IMAGE_HEIGHT); InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { avd.start(); } }); // First make sure the content is drawn into the bitmap. // Then save the first frame as the golden images. bitmap.eraseColor(0); InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { avd.draw(c); } }); int centerColor = bitmap.getPixel(IMAGE_WIDTH / 2 , IMAGE_WIDTH / 2); assertTrue(centerColor != 0); Bitmap firstFrame = Bitmap.createBitmap(bitmap); if (DBG_DUMP_PNG) { saveVectorDrawableIntoPNG(mResources, firstFrame, -1, "firstframe"); } // Now compare the following frames with the 1st frames. Expect some minor difference like // Anti-Aliased edges, so the compare is fuzzy. for (int i = 0; i < numTests; i++) { bitmap.eraseColor(0); InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { avd.draw(c); } }); if (DBG_DUMP_PNG) { saveVectorDrawableIntoPNG(mResources, bitmap, -1, "correctness_" + i); } compareImages(firstFrame, bitmap, "correctness_" + i); } } /** * Utility function for fuzzy image comparison b/t 2 bitmap. Failed if the difference is bigger * than a threshold. */ private void compareImages(Bitmap ideal, Bitmap given, String filename) { int idealWidth = ideal.getWidth(); int idealHeight = ideal.getHeight(); assertTrue(idealWidth == given.getWidth()); assertTrue(idealHeight == given.getHeight()); int totalDiffPixelCount = 0; float totalPixelCount = idealWidth * idealHeight; for (int x = 0; x < idealWidth; x++) { for (int y = 0; y < idealHeight; y++) { int idealColor = ideal.getPixel(x, y); int givenColor = given.getPixel(x, y); if (idealColor == givenColor) { continue; } float totalError = 0; totalError += Math.abs(Color.red(idealColor) - Color.red(givenColor)); totalError += Math.abs(Color.green(idealColor) - Color.green(givenColor)); totalError += Math.abs(Color.blue(idealColor) - Color.blue(givenColor)); totalError += Math.abs(Color.alpha(idealColor) - Color.alpha(givenColor)); if ((totalError / 1024.0f) >= PIXEL_ERROR_THRESHOLD) { fail((filename + ": totalError is " + totalError)); } if ((totalError / 1024.0f) >= PIXEL_DIFF_THRESHOLD) { totalDiffPixelCount++; } } } if ((totalDiffPixelCount / totalPixelCount) >= PIXEL_DIFF_COUNT_THRESHOLD) { fail((filename + ": totalDiffPixelCount is " + totalDiffPixelCount)); } } @Test public void testGetChangingConfigurations() { AnimatedVectorDrawableCompat d1 = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animated_color_fill_copy); ConstantState constantState = d1.getConstantState(); if (constantState != null) { // default assertEquals(0, constantState.getChangingConfigurations()); assertEquals(0, d1.getChangingConfigurations()); // change the drawable's configuration does not affect the state's configuration d1.setChangingConfigurations(0xff); assertEquals(0xff, d1.getChangingConfigurations()); assertEquals(0, constantState.getChangingConfigurations()); // the state's configuration get refreshed constantState = d1.getConstantState(); assertEquals(0xff, constantState.getChangingConfigurations()); // set a new configuration to drawable d1.setChangingConfigurations(0xff00); assertEquals(0xff, constantState.getChangingConfigurations()); assertEquals(0xffff, d1.getChangingConfigurations()); } } @Test public void testGetConstantState() { AnimatedVectorDrawableCompat animatedVectorDrawableCompat = AnimatedVectorDrawableCompat.create(mContext, DRAWABLE_RES_ID); ConstantState constantState = animatedVectorDrawableCompat.getConstantState(); if (constantState != null) { assertEquals(0, constantState.getChangingConfigurations()); animatedVectorDrawableCompat.setChangingConfigurations(1); constantState = animatedVectorDrawableCompat.getConstantState(); assertNotNull(constantState); assertEquals(1, constantState.getChangingConfigurations()); } } @Test public void testAnimateColor() throws Throwable { final ImageButton imageButton = (ImageButton) mActivityTestRule.getActivity().findViewById(R.id.imageButton); final int viewW = imageButton.getWidth(); final int viewH = imageButton.getHeight(); int pixelX = viewW / 2; int pixelY = viewH / 2; final int numTests = 5; final Bitmap bitmap = Bitmap.createBitmap(imageButton.getWidth(), imageButton.getHeight(), Bitmap.Config.ARGB_8888); final Canvas c = new Canvas(bitmap); CountDownLatch latch = new CountDownLatch(numTests); mActivityTestRule.runOnUiThread(new Runnable() { @Override public void run() { AnimatedVectorDrawableCompat avd = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animated_color_fill); ViewCompat.setBackground(imageButton, avd); avd.start(); } }); // Check the view several times during the animation to verify that it only // has red color in it for (int i = 0; i < numTests; ++i) { sleep(100); // check fill verifyRedOnly(pixelX, pixelY, imageButton, bitmap, c, latch); // check stroke verifyRedOnly(1, 1, imageButton, bitmap, c, latch); } latch.await(1000, TimeUnit.MILLISECONDS); } /** * Utility method to verify that the pixel at the given location has only red values. */ private void verifyRedOnly(final int pixelX, final int pixelY, final View button, final Bitmap bitmap, final Canvas canvas, final CountDownLatch latch) throws Throwable { mActivityTestRule.runOnUiThread(new Runnable() { @Override public void run() { button.draw(canvas); int pixel = bitmap.getPixel(pixelX, pixelY); int blue = pixel & 0xff; int green = pixel & 0xff00 >> 8; assertEquals("Blue channel not zero", 0, blue); assertEquals("Green channel not zero", 0, green); latch.countDown(); } }); } @Test public void testMutate() { AnimatedVectorDrawableCompat d1 = AnimatedVectorDrawableCompat.create(mContext, DRAWABLE_RES_ID); AnimatedVectorDrawableCompat d2 = AnimatedVectorDrawableCompat.create(mContext, DRAWABLE_RES_ID); AnimatedVectorDrawableCompat d3 = AnimatedVectorDrawableCompat.create(mContext, DRAWABLE_RES_ID); if (d1.getConstantState() != null) { int originalAlpha = d2.getAlpha(); int newAlpha = (originalAlpha + 1) % 255; // AVD is different than VectorDrawable. Every instance of it is a deep copy // of the VectorDrawable. // So every setAlpha operation will happen only to that specific object. d1.setAlpha(newAlpha); assertEquals(newAlpha, d1.getAlpha()); assertEquals(originalAlpha, d2.getAlpha()); assertEquals(originalAlpha, d3.getAlpha()); d1.mutate(); d1.setAlpha(0x40); assertEquals(0x40, d1.getAlpha()); assertEquals(originalAlpha, d2.getAlpha()); assertEquals(originalAlpha, d3.getAlpha()); d2.setAlpha(0x20); assertEquals(0x40, d1.getAlpha()); assertEquals(0x20, d2.getAlpha()); assertEquals(originalAlpha, d3.getAlpha()); } else { assertEquals(d1.mutate(), d1); } } /** * A helper function to setup the AVDC for callback tests. */ private AnimatedVectorDrawableCompat setupAnimatedVectorDrawableCompat() { final ImageButton imageButton = (ImageButton) mActivityTestRule.getActivity().findViewById(R.id.imageButton); mAnimationStarted = false; mAnimationEnded = false; AnimatedVectorDrawableCompat avd = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animation_vector_drawable_grouping_1); // Duration is 50 ms. ViewCompat.setBackground(imageButton, avd); return avd; } @Test /** * Test show that callback is successfully registered. * Note that this test requires screen is on. */ @FlakyTest(bugId = 190193710) public void testRegisterCallback() throws Throwable { InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { AnimatedVectorDrawableCompat avd = setupAnimatedVectorDrawableCompat(); avd.registerAnimationCallback(mAnimationCallback); avd.start(); } }); Thread.sleep(500); assertTrue(mAnimationStarted); assertTrue(mAnimationEnded); } @Test /** * Test show that callback is successfully removed. * Note that this test requires screen is on. */ public void testClearCallback() throws Throwable { InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { AnimatedVectorDrawableCompat avd = setupAnimatedVectorDrawableCompat(); avd.registerAnimationCallback(mAnimationCallback); avd.clearAnimationCallbacks(); avd.start(); } }); Thread.sleep(500); assertFalse(mAnimationStarted); assertFalse(mAnimationEnded); } @Test /** * Test show that callback is successfully unregistered. * Note that this test requires screen is on. */ public void testUnregisterCallback() throws Throwable { InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { AnimatedVectorDrawableCompat avd = setupAnimatedVectorDrawableCompat(); avd.registerAnimationCallback(mAnimationCallback); avd.unregisterAnimationCallback(mAnimationCallback); avd.start(); } }); Thread.sleep(500); assertFalse(mAnimationStarted); assertFalse(mAnimationEnded); } /** * Render AVD with path morphing, make sure the bitmap is different when it render at the start * and the end. * * @throws Exception for time out or I/O problem while dumping debug images. */ @Test public void testPathMorphing() throws Exception { final Object lock = new Object(); final Bitmap bitmap = Bitmap.createBitmap(IMAGE_WIDTH, IMAGE_WIDTH, Bitmap.Config.ARGB_8888); final Canvas c = new Canvas(bitmap); final AnimatedVectorDrawableCompat avd = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animation_path_morphing_rect2); avd.setBounds(0, 0, IMAGE_WIDTH, IMAGE_HEIGHT); bitmap.eraseColor(0); avd.draw(c); int centerColor = bitmap.getPixel(IMAGE_WIDTH / 2 , IMAGE_WIDTH / 2); assertTrue(centerColor == 0xffff0000); if (DBG_DUMP_PNG) { saveVectorDrawableIntoPNG(mResources, bitmap, -1, "start"); } avd.registerAnimationCallback(new AnimationCallback() { @Override public void onAnimationStart(Drawable drawable) { // Nothing to do. } @Override public void onAnimationEnd(Drawable drawable) { bitmap.eraseColor(0); drawable.draw(c); int centerColor = bitmap.getPixel(IMAGE_WIDTH / 2 , IMAGE_WIDTH / 2); assertTrue(centerColor == 0); synchronized (lock) { lock.notify(); } } }); InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { avd.start(); } }); synchronized (lock) { lock.wait(1000); } if (DBG_DUMP_PNG) { saveVectorDrawableIntoPNG(mResources, bitmap, -1, "ended"); } } @Rule public ExpectedException thrown = ExpectedException.none(); /** * Make sure when path didn't match, we got an exception. */ @Test @UiThreadTest public void testPathMorphingException() throws Exception { thrown.expect(RuntimeException.class); final AnimatedVectorDrawableCompat avd = AnimatedVectorDrawableCompat.create(mContext, R.drawable.animation_path_morphing_rect_exception); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4.http.springboot; import javax.annotation.Generated; import org.apache.camel.LoggingLevel; import org.apache.camel.component.netty4.http.SecurityAuthenticator; import org.apache.camel.component.netty4.http.SecurityConstraint; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Netty HTTP server and client using the Netty 4.x library. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.netty4-http") public class NettyHttpComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the netty4-http component. This * is enabled by default. */ private Boolean enabled; /** * To use a custom org.apache.camel.component.netty4.http.NettyHttpBinding * for binding to/from Netty and Camel Message API. The option is a * org.apache.camel.component.netty4.http.NettyHttpBinding type. */ private String nettyHttpBinding; /** * To use the NettyConfiguration as configuration when creating endpoints. */ private NettyHttpConfigurationNestedConfiguration configuration; /** * To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter * headers. The option is a org.apache.camel.spi.HeaderFilterStrategy type. */ private String headerFilterStrategy; /** * Refers to a * org.apache.camel.component.netty4.http.NettyHttpSecurityConfiguration for * configuring secure web resources. */ private NettyHttpSecurityConfigurationNestedConfiguration securityConfiguration; /** * Enable usage of global SSL context parameters. */ private Boolean useGlobalSslContextParameters = false; /** * The thread pool size for the EventExecutorGroup if its in use. The * default value is 16. */ private Integer maximumPoolSize = 16; /** * To use the given EventExecutorGroup. The option is a * io.netty.util.concurrent.EventExecutorGroup type. */ private String executorService; /** * To configure security using SSLContextParameters. The option is a * org.apache.camel.support.jsse.SSLContextParameters type. */ private String sslContextParameters; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; public String getNettyHttpBinding() { return nettyHttpBinding; } public void setNettyHttpBinding(String nettyHttpBinding) { this.nettyHttpBinding = nettyHttpBinding; } public NettyHttpConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( NettyHttpConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public String getHeaderFilterStrategy() { return headerFilterStrategy; } public void setHeaderFilterStrategy(String headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } public NettyHttpSecurityConfigurationNestedConfiguration getSecurityConfiguration() { return securityConfiguration; } public void setSecurityConfiguration( NettyHttpSecurityConfigurationNestedConfiguration securityConfiguration) { this.securityConfiguration = securityConfiguration; } public Boolean getUseGlobalSslContextParameters() { return useGlobalSslContextParameters; } public void setUseGlobalSslContextParameters( Boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public Integer getMaximumPoolSize() { return maximumPoolSize; } public void setMaximumPoolSize(Integer maximumPoolSize) { this.maximumPoolSize = maximumPoolSize; } public String getExecutorService() { return executorService; } public void setExecutorService(String executorService) { this.executorService = executorService; } public String getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters(String sslContextParameters) { this.sslContextParameters = sslContextParameters; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public static class NettyHttpConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.netty4.http.NettyHttpConfiguration.class; /** * The protocol to use which is either http or https */ private String protocol; /** * The local hostname such as localhost, or 0.0.0.0 when being a * consumer. The remote HTTP server hostname when using producer. */ private String host; /** * The port number. Is default 80 for http and 443 for https. */ private Integer port; /** * Allow using gzip/deflate for compression on the Netty HTTP server if * the client supports it from the HTTP headers. */ private Boolean compression = false; /** * Option to disable throwing the HttpOperationFailedException in case * of failed responses from the remote server. This allows you to get * all responses regardless of the HTTP status code. */ private Boolean throwExceptionOnFailure = true; /** * If enabled and an Exchange failed processing on the consumer side, * and if the caused Exception was send back serialized in the response * as a application/x-java-serialized-object content type. On the * producer side the exception will be deserialized and thrown as is, * instead of the HttpOperationFailedException. The caused exception is * required to be serialized. This is by default turned off. If you * enable this then be aware that Java will deserialize the incoming * data from the request to Java and that can be a potential security * risk. */ private Boolean transferException = false; /** * If this option is enabled, then during binding from Netty to Camel * Message then the header values will be URL decoded (eg %20 will be a * space character. Notice this option is used by the default * org.apache.camel.component.netty.http.NettyHttpBinding and therefore * if you implement a custom * org.apache.camel.component.netty4.http.NettyHttpBinding then you * would need to decode the headers accordingly to this option. */ private Boolean urlDecodeHeaders = false; /** * If this option is enabled, then during binding from Netty to Camel * Message then the headers will be mapped as well (eg added as header * to the Camel Message as well). You can turn off this option to * disable this. The headers can still be accessed from the * org.apache.camel.component.netty.http.NettyHttpMessage message with * the method getHttpRequest() that returns the Netty HTTP request * io.netty.handler.codec.http.HttpRequest instance. */ private Boolean mapHeaders = true; /** * Whether or not Camel should try to find a target consumer by matching * the URI prefix if no exact match is found. */ private Boolean matchOnUriPrefix = false; /** * If the option is true, the producer will ignore the Exchange.HTTP_URI * header, and use the endpoint's URI for request. You may also set the * throwExceptionOnFailure to be false to let the producer send all the * fault response back. The consumer working in the bridge mode will * skip the gzip compression and WWW URL form encoding (by adding the * Exchange.SKIP_GZIP_ENCODING and Exchange.SKIP_WWW_FORM_URLENCODED * headers to the consumed exchange). */ private Boolean bridgeEndpoint = false; /** * Resource path */ private String path; /** * Determines whether or not the raw input stream from Netty * HttpRequest#getContent() or HttpResponset#getContent() is cached or * not (Camel will read the stream into a in light-weight memory based * Stream caching) cache. By default Camel will cache the Netty input * stream to support reading it multiple times to ensure it Camel can * retrieve all data from the stream. However you can set this option to * true when you for example need to access the raw stream, such as * streaming it directly to a file or other persistent store. Mind that * if you enable this option, then you cannot read the Netty stream * multiple times out of the box, and you would need manually to reset * the reader index on the Netty raw stream. Also Netty will auto-close * the Netty stream when the Netty HTTP server/HTTP client is done * processing, which means that if the asynchronous routing engine is in * use then any asynchronous thread that may continue routing the * org.apache.camel.Exchange may not be able to read the Netty stream, * because Netty has closed it. */ private Boolean disableStreamCache = false; /** * Whether to send back HTTP status code 503 when the consumer has been * suspended. If the option is false then the Netty Acceptor is unbound * when the consumer is suspended, so clients cannot connect anymore. */ private Boolean send503whenSuspended = true; /** * Value in bytes the max content length per chunked frame received on * the Netty HTTP server. */ private Integer chunkedMaxContentLength = 1048576; /** * The maximum length of all headers. If the sum of the length of each * header exceeds this value, a * io.netty.handler.codec.TooLongFrameException will be raised. */ private Integer maxHeaderSize = 8192; private Boolean allowDefaultCodec; /** * The status codes which are considered a success response. The values * are inclusive. Multiple ranges can be defined, separated by comma, * e.g. 200-204,209,301-304. Each range must be a single number or * from-to with the dash included. The default range is 200-299 */ private String okStatusCodeRange = "200-299"; /** * Sets whether to use a relative path in HTTP requests. */ private Boolean useRelativePath = false; public String getProtocol() { return protocol; } public void setProtocol(String protocol) { this.protocol = protocol; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public Integer getPort() { return port; } public void setPort(Integer port) { this.port = port; } public Boolean getCompression() { return compression; } public void setCompression(Boolean compression) { this.compression = compression; } public Boolean getThrowExceptionOnFailure() { return throwExceptionOnFailure; } public void setThrowExceptionOnFailure(Boolean throwExceptionOnFailure) { this.throwExceptionOnFailure = throwExceptionOnFailure; } public Boolean getTransferException() { return transferException; } public void setTransferException(Boolean transferException) { this.transferException = transferException; } public Boolean getUrlDecodeHeaders() { return urlDecodeHeaders; } public void setUrlDecodeHeaders(Boolean urlDecodeHeaders) { this.urlDecodeHeaders = urlDecodeHeaders; } public Boolean getMapHeaders() { return mapHeaders; } public void setMapHeaders(Boolean mapHeaders) { this.mapHeaders = mapHeaders; } public Boolean getMatchOnUriPrefix() { return matchOnUriPrefix; } public void setMatchOnUriPrefix(Boolean matchOnUriPrefix) { this.matchOnUriPrefix = matchOnUriPrefix; } public Boolean getBridgeEndpoint() { return bridgeEndpoint; } public void setBridgeEndpoint(Boolean bridgeEndpoint) { this.bridgeEndpoint = bridgeEndpoint; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public Boolean getDisableStreamCache() { return disableStreamCache; } public void setDisableStreamCache(Boolean disableStreamCache) { this.disableStreamCache = disableStreamCache; } public Boolean getSend503whenSuspended() { return send503whenSuspended; } public void setSend503whenSuspended(Boolean send503whenSuspended) { this.send503whenSuspended = send503whenSuspended; } public Integer getChunkedMaxContentLength() { return chunkedMaxContentLength; } public void setChunkedMaxContentLength(Integer chunkedMaxContentLength) { this.chunkedMaxContentLength = chunkedMaxContentLength; } public Integer getMaxHeaderSize() { return maxHeaderSize; } public void setMaxHeaderSize(Integer maxHeaderSize) { this.maxHeaderSize = maxHeaderSize; } public Boolean getAllowDefaultCodec() { return allowDefaultCodec; } public void setAllowDefaultCodec(Boolean allowDefaultCodec) { this.allowDefaultCodec = allowDefaultCodec; } public String getOkStatusCodeRange() { return okStatusCodeRange; } public void setOkStatusCodeRange(String okStatusCodeRange) { this.okStatusCodeRange = okStatusCodeRange; } public Boolean getUseRelativePath() { return useRelativePath; } public void setUseRelativePath(Boolean useRelativePath) { this.useRelativePath = useRelativePath; } } public static class NettyHttpSecurityConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.netty4.http.NettyHttpSecurityConfiguration.class; /** * Whether to enable authentication * <p/> * This is by default enabled. */ private Boolean authenticate; /** * The supported restricted. * <p/> * Currently only Basic is supported. */ private String constraint; /** * Sets the name of the realm to use. */ private String realm; /** * Sets a {@link SecurityConstraint} to use for checking if a web * resource is restricted or not * <p/> * By default this is <tt>null</tt>, which means all resources is * restricted. */ private SecurityConstraint securityConstraint; /** * Sets the {@link SecurityAuthenticator} to use for authenticating the * {@link HttpPrincipal} . */ private SecurityAuthenticator securityAuthenticator; /** * Sets a logging level to use for logging denied login attempts (incl * stacktraces) * <p/> * This level is by default DEBUG. */ private LoggingLevel loginDeniedLoggingLevel; private String roleClassName; public Boolean getAuthenticate() { return authenticate; } public void setAuthenticate(Boolean authenticate) { this.authenticate = authenticate; } public String getConstraint() { return constraint; } public void setConstraint(String constraint) { this.constraint = constraint; } public String getRealm() { return realm; } public void setRealm(String realm) { this.realm = realm; } public SecurityConstraint getSecurityConstraint() { return securityConstraint; } public void setSecurityConstraint(SecurityConstraint securityConstraint) { this.securityConstraint = securityConstraint; } public SecurityAuthenticator getSecurityAuthenticator() { return securityAuthenticator; } public void setSecurityAuthenticator( SecurityAuthenticator securityAuthenticator) { this.securityAuthenticator = securityAuthenticator; } public LoggingLevel getLoginDeniedLoggingLevel() { return loginDeniedLoggingLevel; } public void setLoginDeniedLoggingLevel( LoggingLevel loginDeniedLoggingLevel) { this.loginDeniedLoggingLevel = loginDeniedLoggingLevel; } public String getRoleClassName() { return roleClassName; } public void setRoleClassName(String roleClassName) { this.roleClassName = roleClassName; } } }
/******************************************************************************* * Copyright 2014 United States Government as represented by the * Administrator of the National Aeronautics and Space Administration. * All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ /** * <copyright> * </copyright> * * $Id$ */ package gov.nasa.ensemble.core.model.plan.impl; import gov.nasa.ensemble.common.CommonUtils; import gov.nasa.ensemble.core.model.plan.EDay; import gov.nasa.ensemble.core.model.plan.PlanPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>EDay</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link gov.nasa.ensemble.core.model.plan.impl.EDayImpl#getBubbleFormattedDate <em>Bubble Formatted Date</em>}</li> * <li>{@link gov.nasa.ensemble.core.model.plan.impl.EDayImpl#getDate <em>Date</em>}</li> * <li>{@link gov.nasa.ensemble.core.model.plan.impl.EDayImpl#getNotes <em>Notes</em>}</li> * </ul> * </p> * * @generated */ public class EDayImpl extends MinimalEObjectImpl.Container implements EDay { /** * The default value of the '{@link #getBubbleFormattedDate() <em>Bubble Formatted Date</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getBubbleFormattedDate() * @generated * @ordered */ protected static final String BUBBLE_FORMATTED_DATE_EDEFAULT = null; /** * The cached value of the '{@link #getBubbleFormattedDate() <em>Bubble Formatted Date</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getBubbleFormattedDate() * @generated * @ordered */ protected String bubbleFormattedDate = BUBBLE_FORMATTED_DATE_EDEFAULT; /** * The default value of the '{@link #getDate() <em>Date</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDate() * @generated * @ordered */ protected static final String DATE_EDEFAULT = null; /** * The cached value of the '{@link #getDate() <em>Date</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDate() * @generated * @ordered */ protected String date = DATE_EDEFAULT; /** * The default value of the '{@link #getNotes() <em>Notes</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNotes() * @generated * @ordered */ protected static final String NOTES_EDEFAULT = null; /** * The cached value of the '{@link #getNotes() <em>Notes</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNotes() * @generated * @ordered */ protected String notes = NOTES_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EDayImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return PlanPackage.Literals.EDAY; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getBubbleFormattedDate() { return bubbleFormattedDate; } /** * <!-- begin-user-doc --> * Only does it once because the bubble format is specified once. * <!-- end-user-doc --> * @generated NOT */ public void setBubbleFormattedDate(String newBubbleFormattedDate) { if (bubbleFormattedDate == null) { String oldBubbleFormattedDate = bubbleFormattedDate; bubbleFormattedDate = newBubbleFormattedDate; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, PlanPackage.EDAY__BUBBLE_FORMATTED_DATE, oldBubbleFormattedDate, bubbleFormattedDate)); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getDate() { return date; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDate(String newDate) { String oldDate = date; date = newDate; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, PlanPackage.EDAY__DATE, oldDate, date)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getNotes() { return notes; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setNotes(String newNotes) { String oldNotes = notes; notes = newNotes; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, PlanPackage.EDAY__NOTES, oldNotes, notes)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case PlanPackage.EDAY__BUBBLE_FORMATTED_DATE: return getBubbleFormattedDate(); case PlanPackage.EDAY__DATE: return getDate(); case PlanPackage.EDAY__NOTES: return getNotes(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case PlanPackage.EDAY__BUBBLE_FORMATTED_DATE: setBubbleFormattedDate((String)newValue); return; case PlanPackage.EDAY__DATE: setDate((String)newValue); return; case PlanPackage.EDAY__NOTES: setNotes((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case PlanPackage.EDAY__BUBBLE_FORMATTED_DATE: setBubbleFormattedDate(BUBBLE_FORMATTED_DATE_EDEFAULT); return; case PlanPackage.EDAY__DATE: setDate(DATE_EDEFAULT); return; case PlanPackage.EDAY__NOTES: setNotes(NOTES_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case PlanPackage.EDAY__BUBBLE_FORMATTED_DATE: return BUBBLE_FORMATTED_DATE_EDEFAULT == null ? bubbleFormattedDate != null : !BUBBLE_FORMATTED_DATE_EDEFAULT.equals(bubbleFormattedDate); case PlanPackage.EDAY__DATE: return DATE_EDEFAULT == null ? date != null : !DATE_EDEFAULT.equals(date); case PlanPackage.EDAY__NOTES: return NOTES_EDEFAULT == null ? notes != null : !NOTES_EDEFAULT.equals(notes); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (bubbleFormattedDate: "); result.append(bubbleFormattedDate); result.append(", date: "); result.append(date); result.append(", notes: "); result.append(notes); result.append(')'); return result.toString(); } } //EDayImpl
/**************************************************************************** * DeliveryReceipt.java * * Copyright (C) Selenium Software Ltd 2006 * * This file is part of SMPPSim. * * SMPPSim is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * SMPPSim is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with SMPPSim; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * @author martin@seleniumsoftware.com * http://www.woolleynet.com * http://www.seleniumsoftware.com * $Header: /var/cvsroot/SMPPSim2/src/java/com/seleniumsoftware/SMPPSim/pdu/DeliveryReceipt.java,v 1.14 2011/05/24 18:57:04 martin Exp $ ****************************************************************************/ package com.seleniumsoftware.SMPPSim.pdu; import com.seleniumsoftware.SMPPSim.SMPPSim; import java.util.Date; import java.util.logging.Logger; public class DeliveryReceipt extends DeliverSM { private static Logger logger = Logger.getLogger("com.seleniumsoftware.smppsim"); // PDU fields private String message_id = ""; private String sub = ""; private String dlvrd = ""; private String submit_date = ""; private String done_date = ""; private String err = "000"; private String stat = ""; private String text = ""; public DeliveryReceipt(SubmitSM msg, int esm_class) { super(msg); setEsm_class(esm_class); setValidity_period(""); setRegistered_delivery_flag(0); deriveUssd_service_op(msg); } public void setDeliveryReceiptMessage(byte state) { // id:IIIIIIIIII sub:SSS dlvrd:DDD submit date:YYMMDDhhmm done // date:YYMMDDhhmm stat:DDDDDDD err:E Text: . . . . . . . . . Date d = new Date(); String id = "id:" + message_id; String sb = " sub:" + sub; String dlv = " dlvrd:" + dlvrd; String sdate = " submit date:" + submit_date; String ddate = " done date:" + done_date; setStateText(state); String st = " stat:" + stat; String er = " err:" + err; String txt = " Text:" + text; setShort_message(new String(id + sb + dlv + sdate + ddate + st + er + txt).getBytes()); setSm_length(getShort_message().length); } public void setStateText(byte state) { if (state == PduConstants.DELIVERED) stat = "DELIVRD"; else if (state == PduConstants.EXPIRED) stat = "EXPIRED"; else if (state == PduConstants.DELETED) stat = "DELETED"; else if (state == PduConstants.UNDELIVERABLE) stat = "UNDELIV"; else if (state == PduConstants.ACCEPTED) stat = "ACCEPTD"; else if (state == PduConstants.UNKNOWN) stat = "UNKNOWN"; else if (state == PduConstants.REJECTED) stat = "REJECTD"; else if (state == PduConstants.ENROUTE) stat = "ENROUTE"; else stat = "BADSTAT"; } // 0 = PSSD indication // 1 = PSSR indication // 2 = USSR request // 3 = USSN request // 16 = PSSD response // 17 = PSSR response // 18 = USSR confirm // 19 = USSN confirm private void deriveUssd_service_op(SubmitSM msg) { if (SMPPSim.isDeliver_sm_includes_ussd_service_op()) { Tlv ussd_service_op = msg.getUssd_service_op(); if (ussd_service_op != null && ussd_service_op.getValue().length == 1) { byte uso = 0; try { uso = ussd_service_op.getValue()[0]; switch (uso) { case 0: setUssd_service_op((byte) 16); // PSSD response break; case 1: setUssd_service_op((byte) 17); // PSSD response break; case 2: setUssd_service_op((byte) 18); // PSSD response break; case 3: setUssd_service_op((byte) 19); // PSSD response break; } } catch (Exception e) { e.printStackTrace(); } } } } /** * @return */ public String getDlvrd() { return dlvrd; } /** * @return */ public String getDone_date() { return done_date; } /** * @return */ public String getErr() { return err; } /** * @return */ public String getMessage_id() { return message_id; } /** * @return */ public String getStat() { return stat; } /** * @return */ public String getSub() { return sub; } /** * @return */ public String getSubmit_date() { return submit_date; } /** * @return */ public String getText() { return text; } /** * @param string */ public void setDlvrd(String string) { dlvrd = string; } /** * @param string */ public void setDone_date(String string) { logger.finest("Setting done_date=" + string); done_date = string; } /** * @param string */ public void setErr(String string) { err = string; } /** * @param string */ public void setMessage_id(String string) { message_id = string; } /** * @param string */ public void setStat(String string) { stat = string; } /** * @param string */ public void setSub(String string) { sub = string; } /** * @param string */ public void setSubmit_date(String string) { submit_date = string; } /** * @param string */ public void setText(String string) { text = string; } /** * *returns String representation of PDU */ public String toString() { return super.toString(); } }
/******************************************************************************* * Copyright 2015, 2016 Junichi Tatemura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.nec.congenio.json; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.StringReader; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Map; import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonNumber; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.json.JsonString; import javax.json.JsonValue; import javax.json.JsonValue.ValueType; import javax.json.stream.JsonParser; import javax.json.stream.JsonParser.Event; import javax.json.stream.JsonParsingException; import com.nec.congenio.ConfigException; /** * Utility to create a JsonValue. * * @author tatemura * */ public final class JsonValueUtil { private JsonValueUtil() { } public static JsonNumber create(double val) { return new JsonNumberImpl(BigDecimal.valueOf(val)); } public static JsonNumber create(long val) { return new JsonNumberImpl(BigDecimal.valueOf(val)); } /** * Creates a JSON number for the given number. * @param val the number. * @return a JSON number that represents * the given number. */ public static JsonNumber create(Number val) { if (val instanceof Long || val instanceof Integer) { return create(val.longValue()); } else if (val instanceof BigDecimal) { return new JsonNumberImpl((BigDecimal) val); } else if (val instanceof BigInteger) { return create(((BigInteger) val).longValue()); } else { return create(val.doubleValue()); } } public static JsonString create(String val) { return new JsonStringImpl(val); } /** * Parses the content of the given file as a JSON * object. * @param file the file that contains a JSON object. * @return a parsed JSON object. */ public static JsonObject parseObject(File file) { try { JsonParser parser = Json.createParser(new FileInputStream(file)); return (JsonObject) new JsonValueParser(parser).parse(); } catch (FileNotFoundException ex) { throw new ConfigException("json file not found", ex); } } public static JsonNumber number(String val) { return create(new BigDecimal(val)); } public static JsonObject object(String val) { return (JsonObject) parse(val); } public static JsonValue parse(String val) { return new JsonValueParser(val).parse(); } public static JsonObject emptyObject() { return Json.createObjectBuilder().build(); } static class JsonValueParser { private final JsonParser parser; JsonValueParser(JsonParser parser) { this.parser = parser; } JsonValueParser(String text) { this.parser = Json.createParser(new StringReader(text)); } public JsonValue parse() { JsonValue val = parseValue(assertEvent()); assertEnd(); parser.close(); return val; } private JsonObject parseObject() { JsonObjectBuilder builder = Json.createObjectBuilder(); String key; while ((key = assertKeyOrEndObject()) != null) { JsonValue value = parseValue(); builder.add(key, value); } return builder.build(); } private JsonArray parseArray() { JsonArrayBuilder builder = Json.createArrayBuilder(); JsonValue val; while ((val = assertValueOrEndArray()) != null) { builder.add(val); } return builder.build(); } private JsonValue parseValue() { return parseValue(assertEvent()); } private JsonValue parseValue(Event event) { switch (event) { case START_OBJECT: return parseObject(); case END_OBJECT: throw error("unexpected end of object"); case START_ARRAY: return parseArray(); case END_ARRAY: throw error("unexpected end of array"); case VALUE_NUMBER: return JsonValueUtil.create(parser.getBigDecimal()); case VALUE_STRING: return JsonValueUtil.create(parser.getString()); case VALUE_TRUE: return JsonValue.TRUE; case VALUE_FALSE: return JsonValue.FALSE; case KEY_NAME: throw error("unexpected key name"); default: throw error("unknown event"); } } private String assertKeyOrEndObject() { Event event = assertEvent(); if (event == Event.KEY_NAME) { return parser.getString(); } else if (event == Event.END_OBJECT) { return null; } else { throw error("key expected"); } } private JsonValue assertValueOrEndArray() { Event event = assertEvent(); if (event == Event.END_ARRAY) { return null; } else { return parseValue(event); } } private void assertEnd() { if (parser.hasNext()) { throw error("unexpected text after expression"); } } private Event assertEvent() { if (parser.hasNext()) { return parser.next(); } else { throw error("unexpected end of text"); } } private JsonParsingException error(String msg) { return new JsonParsingException(msg, parser.getLocation()); } } static class JsonNumberImpl implements JsonNumber { private final BigDecimal value; public JsonNumberImpl(BigDecimal value) { this.value = value; } @Override public ValueType getValueType() { return ValueType.NUMBER; } @Override public boolean isIntegral() { return value.scale() == 0; } @Override public int intValue() { return value.intValue(); } @Override public int intValueExact() { return value.intValueExact(); } @Override public long longValue() { return value.longValue(); } @Override public long longValueExact() { return value.longValueExact(); } @Override public BigInteger bigIntegerValue() { return value.toBigInteger(); } @Override public BigInteger bigIntegerValueExact() { return value.toBigIntegerExact(); } @Override public double doubleValue() { return value.doubleValue(); } @Override public BigDecimal bigDecimalValue() { return value; } @Override public String toString() { return value.toString(); } @Override public int hashCode() { return value.hashCode(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj instanceof JsonNumber) { JsonNumber val = (JsonNumber) obj; return value.equals(val.bigDecimalValue()); } return false; } } static class JsonStringImpl implements JsonString { private final String value; JsonStringImpl(String value) { this.value = value; } @Override public ValueType getValueType() { return ValueType.STRING; } @Override public String getString() { return value; } @Override public CharSequence getChars() { return value; } @Override public int hashCode() { return value.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof JsonString) { JsonString str = (JsonString) obj; return value.equals(str.getString()); } return false; } @Override public String toString() { if (value == null) { return "\"\""; } StringBuilder sb = new StringBuilder(); sb.append('"'); for (int i = 0; i < value.length(); i++) { char chr = value.charAt(i); switch (chr) { case '\\': case '"': sb.append('\\').append(chr); break; case '\t': sb.append('\\').append('t'); break; case '\n': sb.append('\\').append('n'); break; case '\r': sb.append('\\').append('r'); break; case '\b': sb.append('\\').append('b'); break; case '\f': sb.append('\\').append('f'); break; default: if (chr < ' ') { String hexStr = "000" + Integer.toHexString(chr); sb.append("\\u") .append(hexStr.substring(hexStr.length() - UNICODE_HEX_LEN)); } else { sb.append(chr); } } } sb.append('"'); return sb.toString(); } private static final int UNICODE_HEX_LEN = 4; } /** * Merges (unions) a list of Json objects into one object. * @param values an array of Json objects. * @return the merged object. */ public static JsonObject union(JsonObject... values) { JsonObjectBuilder builder = Json.createObjectBuilder(); for (JsonObject v : values) { for (Map.Entry<String, JsonValue> e : v.entrySet()) { builder.add(e.getKey(), e.getValue()); } } return builder.build(); } /** * Converts a JSON value to an indented text representation. * * @param json a JSON value to be converted. * @return an indented text. */ public static String toString(JsonValue json) { StringBuilder sb = new StringBuilder(); toString(json, sb, 0); return sb.toString(); } private static void toString(JsonObject json, StringBuilder sb, int indent) { sb.append("{"); boolean contd = false; for (Map.Entry<String, JsonValue> e : json.entrySet()) { if (contd) { sb.append(",\n"); } else { contd = true; sb.append("\n"); } indent(indent + 1, sb); sb.append("\"").append(e.getKey()).append("\": "); toString(e.getValue(), sb, indent + 1); } if (contd) { sb.append("\n"); indent(indent, sb); } sb.append("}"); } private static void toString(JsonArray json, StringBuilder sb, int indent) { sb.append("["); boolean contd = false; for (JsonValue v : json) { if (contd) { sb.append(",\n"); } else { contd = true; sb.append("\n"); } indent(indent + 1, sb); toString(v, sb, indent + 1); } sb.append("]"); } private static void toString(JsonValue json, StringBuilder sb, int indent) { if (json.getValueType() == ValueType.OBJECT) { toString((JsonObject) json, sb, indent); } else if (json.getValueType() == ValueType.ARRAY) { toString((JsonArray) json, sb, indent); } else { sb.append(json.toString()); } } private static void indent(int indent, StringBuilder sb) { for (int i = 0; i < indent; i++) { sb.append(INDENT); } } private static final String INDENT = " "; }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sjms; import java.util.Map; import java.util.concurrent.ExecutorService; import javax.jms.ConnectionFactory; import org.apache.camel.CamelException; import org.apache.camel.Endpoint; import org.apache.camel.ExchangePattern; import org.apache.camel.component.sjms.jms.ConnectionFactoryResource; import org.apache.camel.component.sjms.jms.ConnectionResource; import org.apache.camel.component.sjms.jms.DefaultJmsKeyFormatStrategy; import org.apache.camel.component.sjms.jms.DestinationCreationStrategy; import org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy; import org.apache.camel.component.sjms.jms.MessageCreatedStrategy; import org.apache.camel.component.sjms.taskmanager.TimedTaskManager; import org.apache.camel.impl.UriEndpointComponent; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The <a href="http://camel.apache.org/sjms">Simple JMS</a> component. */ public class SjmsComponent extends UriEndpointComponent implements HeaderFilterStrategyAware { private static final Logger LOGGER = LoggerFactory.getLogger(SjmsComponent.class); private ConnectionFactory connectionFactory; private ConnectionResource connectionResource; private HeaderFilterStrategy headerFilterStrategy = new SjmsHeaderFilterStrategy(); private JmsKeyFormatStrategy jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy(); private Integer connectionCount = 1; private TransactionCommitStrategy transactionCommitStrategy; private TimedTaskManager timedTaskManager; private DestinationCreationStrategy destinationCreationStrategy; private ExecutorService asyncStartStopExecutorService; private MessageCreatedStrategy messageCreatedStrategy; public SjmsComponent() { super(SjmsEndpoint.class); } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { validateMepAndReplyTo(parameters); SjmsEndpoint endpoint = new SjmsEndpoint(uri, this, remaining); setProperties(endpoint, parameters); if (endpoint.isTransacted()) { endpoint.setSynchronous(true); } if (transactionCommitStrategy != null) { endpoint.setTransactionCommitStrategy(transactionCommitStrategy); } if (destinationCreationStrategy != null) { endpoint.setDestinationCreationStrategy(destinationCreationStrategy); } if (headerFilterStrategy != null) { endpoint.setHeaderFilterStrategy(headerFilterStrategy); } if (messageCreatedStrategy != null) { endpoint.setMessageCreatedStrategy(messageCreatedStrategy); } return endpoint; } /** * Helper method used to verify that when there is a namedReplyTo value we * are using the InOut MEP. If namedReplyTo is defined and the MEP is InOnly * the endpoint won't be expecting a reply so throw an error to alert the * user. * * @param parameters {@link Endpoint} parameters * @throws Exception throws a {@link CamelException} when MEP equals InOnly * and namedReplyTo is defined. */ private static void validateMepAndReplyTo(Map<String, Object> parameters) throws Exception { boolean namedReplyToSet = parameters.containsKey("namedReplyTo"); boolean mepSet = parameters.containsKey("exchangePattern"); if (namedReplyToSet && mepSet) { if (!parameters.get("exchangePattern").equals(ExchangePattern.InOut.toString())) { String namedReplyTo = (String) parameters.get("namedReplyTo"); ExchangePattern mep = ExchangePattern.valueOf((String) parameters.get("exchangePattern")); throw new CamelException("Setting parameter namedReplyTo=" + namedReplyTo + " requires a MEP of type InOut. Parameter exchangePattern is set to " + mep); } } } @Override protected void doStart() throws Exception { super.doStart(); timedTaskManager = new TimedTaskManager(); LOGGER.trace("Verify ConnectionResource"); if (getConnectionResource() == null) { LOGGER.debug("No ConnectionResource provided. Initialize the ConnectionFactoryResource."); // We always use a connection pool, even for a pool of 1 ConnectionFactoryResource connections = new ConnectionFactoryResource(getConnectionCount(), getConnectionFactory()); connections.fillPool(); setConnectionResource(connections); } else if (getConnectionResource() instanceof ConnectionFactoryResource) { ((ConnectionFactoryResource) getConnectionResource()).fillPool(); } } @Override protected void doStop() throws Exception { if (timedTaskManager != null) { timedTaskManager.cancelTasks(); } if (getConnectionResource() != null) { if (getConnectionResource() instanceof ConnectionFactoryResource) { ((ConnectionFactoryResource) getConnectionResource()).drainPool(); } } super.doStop(); } @Override protected void doShutdown() throws Exception { if (asyncStartStopExecutorService != null) { getCamelContext().getExecutorServiceManager().shutdownNow(asyncStartStopExecutorService); asyncStartStopExecutorService = null; } super.doShutdown(); } protected synchronized ExecutorService getAsyncStartStopExecutorService() { if (asyncStartStopExecutorService == null) { // use a cached thread pool for async start tasks as they can run for a while, and we need a dedicated thread // for each task, and the thread pool will shrink when no more tasks running asyncStartStopExecutorService = getCamelContext().getExecutorServiceManager().newCachedThreadPool(this, "AsyncStartStopListener"); } return asyncStartStopExecutorService; } /** * A ConnectionFactory is required to enable the SjmsComponent. * It can be set directly or set set as part of a ConnectionResource. */ public void setConnectionFactory(ConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; } public ConnectionFactory getConnectionFactory() { return connectionFactory; } @Override public HeaderFilterStrategy getHeaderFilterStrategy() { return this.headerFilterStrategy; } /** * To use a custom HeaderFilterStrategy to filter header to and from Camel message. */ @Override public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } /** * A ConnectionResource is an interface that allows for customization and container control of the ConnectionFactory. * See Plugable Connection Resource Management for further details. */ public void setConnectionResource(ConnectionResource connectionResource) { this.connectionResource = connectionResource; } public ConnectionResource getConnectionResource() { return connectionResource; } /** * The maximum number of connections available to endpoints started under this component */ public void setConnectionCount(Integer maxConnections) { this.connectionCount = maxConnections; } public Integer getConnectionCount() { return connectionCount; } /** * Pluggable strategy for encoding and decoding JMS keys so they can be compliant with the JMS specification. * Camel provides one implementation out of the box: default. * The default strategy will safely marshal dots and hyphens (. and -). * Can be used for JMS brokers which do not care whether JMS header keys contain illegal characters. * You can provide your own implementation of the org.apache.camel.component.jms.JmsKeyFormatStrategy * and refer to it using the # notation. */ public void setJmsKeyFormatStrategy(JmsKeyFormatStrategy jmsKeyFormatStrategy) { this.jmsKeyFormatStrategy = jmsKeyFormatStrategy; } public JmsKeyFormatStrategy getJmsKeyFormatStrategy() { return jmsKeyFormatStrategy; } public TransactionCommitStrategy getTransactionCommitStrategy() { return transactionCommitStrategy; } /** * To configure which kind of commit strategy to use. Camel provides two implementations out * of the box, default and batch. */ public void setTransactionCommitStrategy(TransactionCommitStrategy commitStrategy) { this.transactionCommitStrategy = commitStrategy; } public DestinationCreationStrategy getDestinationCreationStrategy() { return destinationCreationStrategy; } /** * To use a custom DestinationCreationStrategy. */ public void setDestinationCreationStrategy(DestinationCreationStrategy destinationCreationStrategy) { this.destinationCreationStrategy = destinationCreationStrategy; } public TimedTaskManager getTimedTaskManager() { return timedTaskManager; } /** * To use a custom TimedTaskManager */ public void setTimedTaskManager(TimedTaskManager timedTaskManager) { this.timedTaskManager = timedTaskManager; } public MessageCreatedStrategy getMessageCreatedStrategy() { return messageCreatedStrategy; } /** * To use the given MessageCreatedStrategy which are invoked when Camel creates new instances of <tt>javax.jms.Message</tt> * objects when Camel is sending a JMS message. */ public void setMessageCreatedStrategy(MessageCreatedStrategy messageCreatedStrategy) { this.messageCreatedStrategy = messageCreatedStrategy; } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002,2008 Oracle. All rights reserved. * * $Id: SampleViews.java 63573 2008-05-23 21:43:21Z trent.nelson $ */ package collections.ship.sentity; import com.sleepycat.bind.EntityBinding; import com.sleepycat.bind.EntryBinding; import com.sleepycat.bind.serial.ClassCatalog; import com.sleepycat.bind.serial.TupleSerialBinding; import com.sleepycat.bind.tuple.TupleBinding; import com.sleepycat.bind.tuple.TupleInput; import com.sleepycat.bind.tuple.TupleOutput; import com.sleepycat.collections.StoredSortedMap; import com.sleepycat.collections.StoredSortedValueSet; /** * SampleViews defines the data bindings and collection views for the sample * database. * * @author Mark Hayes */ public class SampleViews { private StoredSortedMap partMap; private StoredSortedMap supplierMap; private StoredSortedMap shipmentMap; private StoredSortedMap shipmentByPartMap; private StoredSortedMap shipmentBySupplierMap; private StoredSortedMap supplierByCityMap; /** * Create the data bindings and collection views. */ public SampleViews(SampleDatabase db) { // Create the data bindings. // In this sample, EntityBinding classes are used to bind the stored // key/data entry pair to a combined data object; a "tricky" binding // that uses transient fields is used--see PartBinding, etc, for // details. For keys, a one-to-one binding is implemented with // EntryBinding classes to bind the stored tuple entry to a key Object. // ClassCatalog catalog = db.getClassCatalog(); EntryBinding partKeyBinding = new PartKeyBinding(); EntityBinding partDataBinding = new PartBinding(catalog, Part.class); EntryBinding supplierKeyBinding = new SupplierKeyBinding(); EntityBinding supplierDataBinding = new SupplierBinding(catalog, Supplier.class); EntryBinding shipmentKeyBinding = new ShipmentKeyBinding(); EntityBinding shipmentDataBinding = new ShipmentBinding(catalog, Shipment.class); EntryBinding cityKeyBinding = TupleBinding.getPrimitiveBinding(String.class); // Create map views for all stores and indices. // StoredSortedMap is used since the stores and indices are ordered // (they use the DB_BTREE access method). // partMap = new StoredSortedMap(db.getPartDatabase(), partKeyBinding, partDataBinding, true); supplierMap = new StoredSortedMap(db.getSupplierDatabase(), supplierKeyBinding, supplierDataBinding, true); shipmentMap = new StoredSortedMap(db.getShipmentDatabase(), shipmentKeyBinding, shipmentDataBinding, true); shipmentByPartMap = new StoredSortedMap(db.getShipmentByPartDatabase(), partKeyBinding, shipmentDataBinding, true); shipmentBySupplierMap = new StoredSortedMap(db.getShipmentBySupplierDatabase(), supplierKeyBinding, shipmentDataBinding, true); supplierByCityMap = new StoredSortedMap(db.getSupplierByCityDatabase(), cityKeyBinding, supplierDataBinding, true); } // The views returned below can be accessed using the java.util.Map or // java.util.Set interfaces, or using the StoredSortedMap and // StoredValueSet classes, which provide additional methods. The entity // sets could be obtained directly from the Map.values() method but // convenience methods are provided here to return them in order to avoid // down-casting elsewhere. /** * Return a map view of the part storage container. */ public StoredSortedMap getPartMap() { return partMap; } /** * Return a map view of the supplier storage container. */ public StoredSortedMap getSupplierMap() { return supplierMap; } /** * Return a map view of the shipment storage container. */ public StoredSortedMap getShipmentMap() { return shipmentMap; } /** * Return an entity set view of the part storage container. */ public StoredSortedValueSet getPartSet() { return (StoredSortedValueSet) partMap.values(); } /** * Return an entity set view of the supplier storage container. */ public StoredSortedValueSet getSupplierSet() { return (StoredSortedValueSet) supplierMap.values(); } /** * Return an entity set view of the shipment storage container. */ public StoredSortedValueSet getShipmentSet() { return (StoredSortedValueSet) shipmentMap.values(); } /** * Return a map view of the shipment-by-part index. */ public StoredSortedMap getShipmentByPartMap() { return shipmentByPartMap; } /** * Return a map view of the shipment-by-supplier index. */ public StoredSortedMap getShipmentBySupplierMap() { return shipmentBySupplierMap; } /** * Return a map view of the supplier-by-city index. */ public final StoredSortedMap getSupplierByCityMap() { return supplierByCityMap; } /** * PartKeyBinding is used to bind the stored key tuple entry for a part to * a key object representation. */ private static class PartKeyBinding extends TupleBinding { /** * Construct the binding object. */ private PartKeyBinding() { } /** * Create the key object from the stored key tuple entry. */ public Object entryToObject(TupleInput input) { String number = input.readString(); return new PartKey(number); } /** * Create the stored key tuple entry from the key object. */ public void objectToEntry(Object object, TupleOutput output) { PartKey key = (PartKey) object; output.writeString(key.getNumber()); } } /** * PartBinding is used to bind the stored key/data entry pair for a part * to a combined data object (entity). * * <p> The binding is "tricky" in that it uses the Part class for both the * stored data entry and the combined entity object. To do this, Part's * key field(s) are transient and are set by the binding after the data * object has been deserialized. This avoids the use of a PartData class * completely. </p> */ private static class PartBinding extends TupleSerialBinding { /** * Construct the binding object. */ private PartBinding(ClassCatalog classCatalog, Class dataClass) { super(classCatalog, dataClass); } /** * Create the entity by combining the stored key and data. * This "tricky" binding returns the stored data as the entity, but * first it sets the transient key fields from the stored key. */ public Object entryToObject(TupleInput keyInput, Object dataInput) { String number = keyInput.readString(); Part part = (Part) dataInput; part.setKey(number); return part; } /** * Create the stored key from the entity. */ public void objectToKey(Object object, TupleOutput output) { Part part = (Part) object; output.writeString(part.getNumber()); } /** * Return the entity as the stored data. There is nothing to do here * since the entity's key fields are transient. */ public Object objectToData(Object object) { return object; } } /** * SupplierKeyBinding is used to bind the stored key tuple entry for a * supplier to a key object representation. */ private static class SupplierKeyBinding extends TupleBinding { /** * Construct the binding object. */ private SupplierKeyBinding() { } /** * Create the key object from the stored key tuple entry. */ public Object entryToObject(TupleInput input) { String number = input.readString(); return new SupplierKey(number); } /** * Create the stored key tuple entry from the key object. */ public void objectToEntry(Object object, TupleOutput output) { SupplierKey key = (SupplierKey) object; output.writeString(key.getNumber()); } } /** * SupplierBinding is used to bind the stored key/data entry pair for a * supplier to a combined data object (entity). * * <p> The binding is "tricky" in that it uses the Supplier class for both * the stored data entry and the combined entity object. To do this, * Supplier's key field(s) are transient and are set by the binding after * the data object has been deserialized. This avoids the use of a * SupplierData class completely. </p> */ private static class SupplierBinding extends TupleSerialBinding { /** * Construct the binding object. */ private SupplierBinding(ClassCatalog classCatalog, Class dataClass) { super(classCatalog, dataClass); } /** * Create the entity by combining the stored key and data. * This "tricky" binding returns the stored data as the entity, but * first it sets the transient key fields from the stored key. */ public Object entryToObject(TupleInput keyInput, Object dataInput) { String number = keyInput.readString(); Supplier supplier = (Supplier) dataInput; supplier.setKey(number); return supplier; } /** * Create the stored key from the entity. */ public void objectToKey(Object object, TupleOutput output) { Supplier supplier = (Supplier) object; output.writeString(supplier.getNumber()); } /** * Return the entity as the stored data. There is nothing to do here * since the entity's key fields are transient. */ public Object objectToData(Object object) { return object; } } /** * ShipmentKeyBinding is used to bind the stored key tuple entry for a * shipment to a key object representation. */ private static class ShipmentKeyBinding extends TupleBinding { /** * Construct the binding object. */ private ShipmentKeyBinding() { } /** * Create the key object from the stored key tuple entry. */ public Object entryToObject(TupleInput input) { String partNumber = input.readString(); String supplierNumber = input.readString(); return new ShipmentKey(partNumber, supplierNumber); } /** * Create the stored key tuple entry from the key object. */ public void objectToEntry(Object object, TupleOutput output) { ShipmentKey key = (ShipmentKey) object; output.writeString(key.getPartNumber()); output.writeString(key.getSupplierNumber()); } } /** * ShipmentBinding is used to bind the stored key/data entry pair for a * shipment to a combined data object (entity). * * <p> The binding is "tricky" in that it uses the Shipment class for both * the stored data entry and the combined entity object. To do this, * Shipment's key field(s) are transient and are set by the binding after * the data object has been deserialized. This avoids the use of a * ShipmentData class completely. </p> */ private static class ShipmentBinding extends TupleSerialBinding { /** * Construct the binding object. */ private ShipmentBinding(ClassCatalog classCatalog, Class dataClass) { super(classCatalog, dataClass); } /** * Create the entity by combining the stored key and data. * This "tricky" binding returns the stored data as the entity, but * first it sets the transient key fields from the stored key. */ public Object entryToObject(TupleInput keyInput, Object dataInput) { String partNumber = keyInput.readString(); String supplierNumber = keyInput.readString(); Shipment shipment = (Shipment) dataInput; shipment.setKey(partNumber, supplierNumber); return shipment; } /** * Create the stored key from the entity. */ public void objectToKey(Object object, TupleOutput output) { Shipment shipment = (Shipment) object; output.writeString(shipment.getPartNumber()); output.writeString(shipment.getSupplierNumber()); } /** * Return the entity as the stored data. There is nothing to do here * since the entity's key fields are transient. */ public Object objectToData(Object object) { return object; } } }
package org.broadinstitute.hellbender.tools.walkers.variantutils; import htsjdk.variant.utils.GeneralUtils; import htsjdk.variant.variantcontext.*; import htsjdk.variant.vcf.VCFConstants; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.tools.walkers.genotyper.GenotypeAssignmentMethod; import org.broadinstitute.hellbender.utils.MathUtils; import org.broadinstitute.hellbender.utils.QualityUtils; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.samples.Sample; import org.broadinstitute.hellbender.utils.samples.SampleDB; import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants; import org.broadinstitute.hellbender.utils.variant.GATKVariantContextUtils; import java.util.*; /** * Utility to compute genotype posteriors given family priors. */ public final class FamilyLikelihoods { private static final Logger logger = LogManager.getLogger(FamilyLikelihoods.class); //Matrix of priors for all genotype combinations private final EnumMap<GenotypeType,EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>> mvCountMatrix = new EnumMap<>(GenotypeType.class); static final int NUM_CALLED_GENOTYPETYPES = 3; //HOM_REF, HET, and HOM_VAR double[] configurationLikelihoodsMatrix = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES]; private List<Sample> trios = new ArrayList<>(); public final double NO_JOINT_VALUE = -1.0; private double deNovoPrior = 1e-8; private static final double ONE_THIRD = 0.333333333333333333; private static final double LOG10_OF_ONE_THIRD = -0.4771213; private enum FamilyMember { MOTHER, FATHER, CHILD } public FamilyLikelihoods(final SampleDB sampleDB, final double DNprior, final Set<String> vcfSamples, final Map<String,Set<Sample>> families){ this.deNovoPrior = DNprior; Arrays.fill(configurationLikelihoodsMatrix,0); buildMatrices(); trios = setTrios(sampleDB, vcfSamples, families); } /** * Applies the trio genotype combination to the given trio. * @param motherGenotype: Original genotype of the mother * @param fatherGenotype: Original genotype of the father * @param childGenotype: Original genotype of the child * @param updatedGenotypes: An ArrayList<Genotype> to which the newly updated genotypes are added in the following order: Mother, Father, Child */ public void getUpdatedGenotypes(final VariantContext vc, final Genotype motherGenotype, final Genotype fatherGenotype, final Genotype childGenotype, final ArrayList<Genotype> updatedGenotypes){ //genotypes here can be no call final boolean fatherIsCalled = fatherGenotype != null && hasCalledGT(fatherGenotype.getType()) && fatherGenotype.hasLikelihoods(); final boolean motherIsCalled = motherGenotype != null && hasCalledGT(motherGenotype.getType()) && motherGenotype.hasLikelihoods(); final boolean childIsCalled = childGenotype != null && hasCalledGT(childGenotype.getType()) && childGenotype.hasLikelihoods(); //default to posteriors equal to likelihoods (flat priors) in case input genotypes are not called final double[] uninformativeLikelihoods = {ONE_THIRD, ONE_THIRD, ONE_THIRD}; final double[] motherLikelihoods = motherIsCalled? GeneralUtils.normalizeFromLog10(motherGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods; final double[] fatherLikelihoods = fatherIsCalled? GeneralUtils.normalizeFromLog10(fatherGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods; final double[] childLikelihoods = childIsCalled? GeneralUtils.normalizeFromLog10(childGenotype.getLikelihoods().getAsVector()) : uninformativeLikelihoods; //these are also in log10 space final double[] motherLog10Posteriors = getPosteriors(FamilyMember.MOTHER); final double[] fatherLog10Posteriors = getPosteriors(FamilyMember.FATHER); final double[] childLog10Posteriors = getPosteriors(FamilyMember.CHILD); final double[] motherPosteriors = GeneralUtils.normalizeFromLog10(motherLog10Posteriors); final double[] fatherPosteriors = GeneralUtils.normalizeFromLog10(fatherLog10Posteriors); final double[] childPosteriors = GeneralUtils.normalizeFromLog10(childLog10Posteriors); double jointPosteriorProbability = -1; //jointTrioLikelihood is combined likelihoods (before prior) of best configuration after applying prior double jointTrioLikelihood = -1; if(childIsCalled && motherIsCalled && fatherIsCalled) { jointTrioLikelihood = motherLikelihoods[MathUtils.maxElementIndex(motherPosteriors)]*fatherLikelihoods[MathUtils.maxElementIndex(fatherPosteriors)]*childLikelihoods[MathUtils.maxElementIndex(childPosteriors)]; jointPosteriorProbability = MathUtils.arrayMax(motherPosteriors)*MathUtils.arrayMax(fatherPosteriors)*MathUtils.arrayMax(childPosteriors); } updatedGenotypes.add(getUpdatedGenotype(vc, motherGenotype, jointTrioLikelihood, jointPosteriorProbability, motherLog10Posteriors)); updatedGenotypes.add(getUpdatedGenotype(vc, fatherGenotype, jointTrioLikelihood, jointPosteriorProbability, fatherLog10Posteriors)); updatedGenotypes.add(getUpdatedGenotype(vc, childGenotype, jointTrioLikelihood, jointPosteriorProbability, childLog10Posteriors)); } private Genotype getUpdatedGenotype(final VariantContext vc, final Genotype genotype, final double jointLikelihood, final double jointPosteriorProb, final double[] log10Posteriors){ //Don't update null, missing or unavailable genotypes if(genotype == null || !hasCalledGT(genotype.getType())) { return genotype; } int phredScaledJL = -1; int phredScaledJP = -1; if(jointLikelihood != NO_JOINT_VALUE){ final double dphredScaledJL = QualityUtils.phredScaleLog10ErrorRate(Math.log10(1-jointLikelihood)); phredScaledJL = dphredScaledJL < Byte.MAX_VALUE ? (byte)dphredScaledJL : Byte.MAX_VALUE; } if(jointPosteriorProb != NO_JOINT_VALUE){ final double dphredScaledJP = QualityUtils.phredScaleLog10ErrorRate(Math.log10(1-jointPosteriorProb)); phredScaledJP = dphredScaledJP < Byte.MAX_VALUE ? (byte)dphredScaledJP : Byte.MAX_VALUE; } //Add the joint trio calculations final Map<String, Object> genotypeAttributes = new LinkedHashMap<>(); genotypeAttributes.putAll(genotype.getExtendedAttributes()); genotypeAttributes.put(GATKVCFConstants.JOINT_LIKELIHOOD_TAG_NAME, phredScaledJL); genotypeAttributes.put(GATKVCFConstants.JOINT_POSTERIOR_TAG_NAME, phredScaledJP); final GenotypeBuilder builder = new GenotypeBuilder(genotype); //update genotype types based on posteriors GATKVariantContextUtils.makeGenotypeCall(vc.getMaxPloidy(2), builder, GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, log10Posteriors, vc.getAlleles(), null); builder.attribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY, Utils.listFromPrimitives(GenotypeLikelihoods.fromLog10Likelihoods(log10Posteriors).getAsPLs())); builder.attributes(genotypeAttributes); return builder.make(); } //marginalize over the configurationLikelihoodsMatrix and normalize to get the posteriors private double[] getPosteriors(final FamilyMember recalcInd) { final double[] marginalOverChangedHR = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES]; final double[] marginalOverChangedHET = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES]; final double[] marginalOverChangedHV = new double[NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES]; final double[] recalcPosteriors = new double[NUM_CALLED_GENOTYPETYPES]; final GenotypeType[] calledTypes = {GenotypeType.HOM_REF, GenotypeType.HET, GenotypeType.HOM_VAR}; int counter = 0; switch (recalcInd) { case MOTHER: for(final GenotypeType father : calledTypes) { for(final GenotypeType child : calledTypes) { GenotypeType mother; mother = GenotypeType.HOM_REF; marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; mother = GenotypeType.HET; marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; mother = GenotypeType.HOM_VAR; marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; counter++; } } break; case FATHER: for(final GenotypeType mother : calledTypes){ for (final GenotypeType child : calledTypes){ GenotypeType father; father = GenotypeType.HOM_REF; marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; father = GenotypeType.HET; marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; father = GenotypeType.HOM_VAR; marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; counter++; } } break; case CHILD: for(final GenotypeType mother : calledTypes){ for (final GenotypeType father: calledTypes){ GenotypeType child; child = GenotypeType.HOM_REF; marginalOverChangedHR[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; child = GenotypeType.HET; marginalOverChangedHET[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; child = GenotypeType.HOM_VAR; marginalOverChangedHV[counter] = configurationLikelihoodsMatrix[getLikelihoodMatrixIndex(mother, father, child)]; counter++; } } break; default: throw new UserException(String.format("%d does not indicate a valid trio FamilyMember -- use 0 for mother, 1 for father, 2 for child",recalcInd.ordinal())); } recalcPosteriors[0] = MathUtils.log10sumLog10(marginalOverChangedHR,0); recalcPosteriors[1] = MathUtils.log10sumLog10(marginalOverChangedHET,0); recalcPosteriors[2] = MathUtils.log10sumLog10(marginalOverChangedHV,0); return MathUtils.scaleLogSpaceArrayForNumericalStability(recalcPosteriors); } /** * Computes phred-scaled genotype posteriors given the data in the given variant context and family priors given by this object. */ public GenotypesContext calculatePosteriorGLs(final VariantContext vc){ Utils.nonNull(vc); final GenotypesContext genotypesContext = GenotypesContext.copy(vc.getGenotypes()); for (final Sample sample : trios) { final Genotype mother = vc.getGenotype(sample.getMaternalID()); final Genotype father = vc.getGenotype(sample.getPaternalID()); final Genotype child = vc.getGenotype(sample.getID()); //Keep only trios and parent/child pairs if(mother == null && father == null || child == null) { logger.warn("Null genotypes in variant: "+vc.toStringDecodeGenotypes()); continue; } final ArrayList<Genotype> trioGenotypes = new ArrayList<>(3); updateFamilyGenotypes(vc, mother, father, child, trioGenotypes); //replace uses sample names to match genotypes, so order doesn't matter if (!trioGenotypes.isEmpty()) { genotypesContext.replace(trioGenotypes.get(0)); genotypesContext.replace(trioGenotypes.get(1)); genotypesContext.replace(trioGenotypes.get(2)); } } return genotypesContext; } /** * Select trios and parent/child pairs only */ private List<Sample> setTrios(final SampleDB sampleDB, final Set<String> vcfSamples, final Map<String, Set<Sample>> families){ final List<Sample> trios = new ArrayList<>(); for(final Map.Entry<String,Set<Sample>> familyEntry : families.entrySet()){ Set<Sample> family = familyEntry.getValue(); // Since getFamilies(vcfSamples) above still returns parents of samples in the VCF even if those parents are not in the VCF, need to subset down here: final Set<Sample> familyMembersInVCF = new TreeSet<>(); for(final Sample familyMember : family){ if (vcfSamples.contains(familyMember.getID())) { familyMembersInVCF.add(familyMember); } } family = familyMembersInVCF; if(family.size() == 3){ for(final Sample familyMember : family){ final List<Sample> parents = sampleDB.getParents(familyMember); if(parents.size()==2){ if(family.containsAll(parents)) { trios.add(familyMember); } } } } } return trios; } //Create a lookup matrix to find the number of MVs for each family genotype combination private void buildMatrices(){ for(final GenotypeType mother : GenotypeType.values()){ mvCountMatrix.put(mother, new EnumMap<>(GenotypeType.class)); for(final GenotypeType father : GenotypeType.values()){ mvCountMatrix.get(mother).put(father, new EnumMap<>(GenotypeType.class)); for(final GenotypeType child : GenotypeType.values()){ mvCountMatrix.get(mother).get(father).put(child, getCombinationMVCount(mother, father, child)); } } } } //Returns the number of Mendelian Violations for a given genotype combination. //If one of the parents' genotypes is missing, it will consider it as a parent/child pair //If the child genotype or both parents genotypes are missing, 0 is returned. private int getCombinationMVCount(final GenotypeType mother, final GenotypeType father, final GenotypeType child){ //Child is no call => No MV if(child == GenotypeType.NO_CALL || child == GenotypeType.UNAVAILABLE) { return 0; } //Add parents with genotypes for the evaluation final ArrayList<GenotypeType> parents = new ArrayList<>(); if (!(mother == GenotypeType.NO_CALL || mother == GenotypeType.UNAVAILABLE)) { parents.add(mother); } if (!(father == GenotypeType.NO_CALL || father == GenotypeType.UNAVAILABLE)) { parents.add(father); } //Both parents no calls => No MV if (parents.isEmpty()) { return 0; } //If at least one parent had a genotype, then count the number of ref and alt alleles that can be passed int parentsNumRefAlleles = 0; int parentsNumAltAlleles = 0; for(final GenotypeType parent : parents){ if(parent == GenotypeType.HOM_REF){ parentsNumRefAlleles++; } else if(parent == GenotypeType.HET){ parentsNumRefAlleles++; parentsNumAltAlleles++; } else if(parent == GenotypeType.HOM_VAR){ parentsNumAltAlleles++; } } //Case Child is HomRef if(child == GenotypeType.HOM_REF){ if(parentsNumRefAlleles == parents.size()) { return 0; } else { return (parents.size() - parentsNumRefAlleles); } } //Case child is HomVar if(child == GenotypeType.HOM_VAR){ if(parentsNumAltAlleles == parents.size()) { return 0; } else { return parents.size() - parentsNumAltAlleles; } } //Case child is Het if(child == GenotypeType.HET && ((parentsNumRefAlleles > 0 && parentsNumAltAlleles > 0) || parents.size()<2)) { return 0; } //MV return 1; } /** * Updates the genotypes of the given trio. If one of the parents is null, it is considered a parent/child pair. * @param vc: Input variant context * @param mother: Mother's genotype from vc input * @param father: Father's genotype from vc input * @param child: Child's genotype from vc input * @param finalGenotypes: An ArrayList<Genotype> containing the updated genotypes */ private void updateFamilyGenotypes(final VariantContext vc, final Genotype mother, final Genotype father, final Genotype child, final ArrayList<Genotype> finalGenotypes) { //If one of the parents is not called, fill in with uninformative likelihoods final Map<GenotypeType,Double> motherLikelihoods = getLikelihoodsAsMapSafeNull(mother); final Map<GenotypeType,Double> fatherLikelihoods = getLikelihoodsAsMapSafeNull(father); final Map<GenotypeType,Double> childLikelihoods = getLikelihoodsAsMapSafeNull(child); //if the child isn't called or neither parent is called, there's no extra inheritance information in that trio so return if (!hasCalledGT(child.getType()) || (!hasCalledGT(mother.getType()) && !hasCalledGT(father.getType()))) { return; } //Fill the configurationLikelihoodsMatrix for each genotype combination int matInd; int mvCount; double jointLikelihood; double mvCoeff; double configurationLikelihood; for(final Map.Entry<GenotypeType,Double> childGenotype : childLikelihoods.entrySet()){ for(final Map.Entry<GenotypeType,Double> motherGenotype : motherLikelihoods.entrySet()){ for(final Map.Entry<GenotypeType,Double> fatherGenotype : fatherLikelihoods.entrySet()){ mvCount = mvCountMatrix.get(motherGenotype.getKey()).get(fatherGenotype.getKey()).get(childGenotype.getKey()); jointLikelihood = motherGenotype.getValue()+fatherGenotype.getValue()+childGenotype.getValue(); mvCoeff = mvCount>0 ? Math.pow(deNovoPrior,mvCount) : (1.0-10*deNovoPrior-deNovoPrior*deNovoPrior); configurationLikelihood = Math.log10(mvCoeff) + jointLikelihood; matInd = getLikelihoodMatrixIndex(motherGenotype.getKey(), fatherGenotype.getKey(), childGenotype.getKey()); configurationLikelihoodsMatrix[matInd] = configurationLikelihood; } } } getUpdatedGenotypes(vc, mother, father, child, finalGenotypes); } //Get a Map of genotype (log10)likelihoods private EnumMap<GenotypeType,Double> getLikelihoodsAsMapSafeNull(final Genotype genotype){ final EnumMap<GenotypeType,Double> likelihoodsMap = new EnumMap<>(GenotypeType.class); final double[] likelihoods; if (genotype != null && hasCalledGT(genotype.getType()) && genotype.hasExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY)) { final Object GPfromVCF = genotype.getExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY); //parse the GPs into a vector of probabilities final String[] likelihoodsAsStringVector = ((String)GPfromVCF).split(","); final double[] likelihoodsAsVector = new double[likelihoodsAsStringVector.length]; for ( int i = 0; i < likelihoodsAsStringVector.length; i++ ) { likelihoodsAsVector[i] = Double.parseDouble(likelihoodsAsStringVector[i]) / -10.0; } //keep in log10 space for large GQs likelihoods = GeneralUtils.normalizeFromLog10(likelihoodsAsVector, true, true); } //In case of null, unavailable or no call, all likelihoods are log10(1/3) else if(genotype == null || !hasCalledGT(genotype.getType()) || genotype.getLikelihoods() == null){ likelihoods = new double[NUM_CALLED_GENOTYPETYPES]; likelihoods[0] = LOG10_OF_ONE_THIRD; likelihoods[1] = LOG10_OF_ONE_THIRD; likelihoods[2] = LOG10_OF_ONE_THIRD; } //No posteriors in VC, use PLs else { likelihoods = GeneralUtils.normalizeFromLog10(genotype.getLikelihoods().getAsVector(), true, true); } if (likelihoods.length != NUM_CALLED_GENOTYPETYPES) { final String key = genotype.hasExtendedAttribute(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY) ? GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY : VCFConstants.GENOTYPE_PL_KEY; throw new UserException(genotype + " has " + likelihoods.length + " " + key + " values, should be " + NUM_CALLED_GENOTYPETYPES + " since only the diploid case is supported when applying family priors."); } likelihoodsMap.put(GenotypeType.HOM_REF,likelihoods[genotypeTypeToValue(GenotypeType.HOM_REF)]); likelihoodsMap.put(GenotypeType.HET,likelihoods[genotypeTypeToValue(GenotypeType.HET)]); likelihoodsMap.put(GenotypeType.HOM_VAR, likelihoods[genotypeTypeToValue(GenotypeType.HOM_VAR)]); return likelihoodsMap; } private int getLikelihoodMatrixIndex(final GenotypeType mother, final GenotypeType father, final GenotypeType child){ final int childInd = genotypeTypeToValue(child); final int motherInd; final int fatherInd; final int INVALID = -1; motherInd = genotypeTypeToValue(mother); fatherInd = genotypeTypeToValue(father); if (childInd == INVALID || motherInd == INVALID || fatherInd == INVALID) //any of the genotypes are NO_CALL, UNAVAILABLE or MIXED { return INVALID; } //index into array playing the part of a 3x3x3 matrix (where 3=NUM_CALLED_GENOTYPETYPES) return motherInd*NUM_CALLED_GENOTYPETYPES*NUM_CALLED_GENOTYPETYPES + fatherInd*NUM_CALLED_GENOTYPETYPES + childInd; } private int genotypeTypeToValue(final GenotypeType input){ if (input == GenotypeType.HOM_REF) { return 0; } if (input == GenotypeType.HET) { return 1; } if (input == GenotypeType.HOM_VAR) { return 2; } return -1; } //this excludes mixed genotypes, whereas the htsjdk Genotype.isCalled() will return true if the GenotypeType is mixed private boolean hasCalledGT(final GenotypeType genotype){ return genotype == GenotypeType.HOM_REF || genotype == GenotypeType.HET || genotype == GenotypeType.HOM_VAR; } }
package com.github.traviolia.logic_engine.rules.fluents; import java.io.Reader; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.github.traviolia.logic_engine.rules.Init; import com.github.traviolia.logic_engine.rules.Version; import com.github.traviolia.logic_engine.rules.io.IO; import com.github.traviolia.logic_engine.rules.io.Parser; import com.github.traviolia.logic_engine.rules.runtime.ClauseCompiler; import com.github.traviolia.logic_engine.rules.runtime.ClauseRuntime; import com.github.traviolia.logic_engine.rules.runtime.LogicEngineImpl; import com.github.traviolia.logic_engine.rules.runtime.RuleEngine; import com.github.traviolia.logic_engine.rules.runtime.Structure; import com.github.traviolia.logic_engine.rules.runtime.Term; import com.github.traviolia.logic_engine.rules.terms.AtomAstNode; import com.github.traviolia.logic_engine.rules.terms.ClauseAstNode; import com.github.traviolia.logic_engine.rules.terms.ConsAstNode; import com.github.traviolia.logic_engine.rules.terms.StructureAstNode; import com.github.traviolia.logic_engine.rules.terms.TermAstNode; /** Implements a Term and Clause objects based blackboard (database). */ public class DataBase extends HashMap<String,LinkedList<Term>> { private static final long serialVersionUID = 1L; private static class DataBaseHolder { private static final DataBase INSTANCE= new DataBase(); } public static DataBase getInstance() { return DataBaseHolder.INSTANCE; } Logger logger = LogManager.getLogger(); public DataBase(){ super(); } private static AtomAstNode yes=AtomAstNode.aYes; private static AtomAstNode no=AtomAstNode.aNo; // /** // Removes a matching Term from the blackboards and // signals failure if no such term is found. // */ // public Term cin(String k,Term pattern) { // Term found=take(k,pattern); // // if(found!=null) { // // found=found.matching_copy(pattern); // // } // if(found==null) // found=(Version.getInstance().getNumber() == 1) ? no : null; // else // found=new StructureAstNode("the",found.copy()); // return found; // } /** Adds a Term to the blackboard */ //public TermAstNode out(String k,TermAstNode pattern,boolean copying) { // add(k,copying?pattern.copy():pattern); // return (Version.getInstance().getNumber() == 1) ? yes : null; //} /** Adds a copy of a Term to the blackboard */ // synchronized public TermAstNode out(String key,Term pattern) { return out(key,pattern,true); // copies pattern } /** Adds a Term to the blackboard */ public TermAstNode out(String k,Term pattern,boolean copying) { add(k,copying?pattern.copy():pattern); return (Version.getInstance().getNumber() == 1) ? yes : null; } /** Adds a copy of a Term to the blackboard */ // synchronized //public TermAstNode out(String key,TermAstNode pattern) { //return out(key,pattern,true); // copies pattern //} private void all0(int max,ArrayList<Term> To,String k,Term FXs) { if(0==max) max=-1; LinkedList<Term> Q = get(k); if(Q==null) return; // todo: use always the same "server's" trail for(Iterator<Term> e=Q.iterator();e.hasNext();) { Term t= e.next(); if(null==t) break; t=t.matching_copy(FXs); if(t!=null&&0!=max--) To.add(t); } } // private TermAstNode all1(int max,Term FXs) { // ArrayList<Term> To=new ArrayList<>(); // for(Iterator<String> e=keySet().iterator();e.hasNext();) { // all0(max,To,e.next(),FXs); // } // Structure R=new StructureAstNode("$",(Object[])To.toArray()); // // IO.mes("RR"+R); // // To.copyInto(R.args); // return (TermAstNode) ((ConsAstNode)R.listify()).args[1]; // } // // private TermAstNode all2(int max,String k,Term FXs) { // if(k==null) { // // IO.mes("expensive operation: all/2 with unknown key"); // return all1(max,FXs); // } // ArrayList<Term> To=new ArrayList<>(); // all0(max,To,k,FXs); // if(To.size()==0) // return AtomAstNode.aNil; // Structure R=new StructureAstNode("$",(Object[])To.toArray()); // // To.copyInto(R.args); // TermAstNode T= (TermAstNode) ((ConsAstNode)R.listify()).args[1]; // return T; // } // // /** // Returns a (possibly empty) list of matching Term objects // */ // public Term all(String k,Term FX) { // FX=all2(0,k,FX); // return FX; // } /** Gives an Iterator view to the LinkedList of Term or Clause objects stored at key k @see LinkedList @see TermAstNode @see ClauseAstNode */ // public Iterator toEnumerationFor(String k) { // Iterator E=super.toEnumerationFor(k); // return E; // } /** Returns a formatted String representation of this PrologBlackboard object */ public String pprint() { StringBuffer s=new StringBuffer(name()); Iterator<String> e=keySet().iterator(); while(e.hasNext()) { s.append(pred_to_string((String)e.next())); s.append("\n"); } return s.toString(); } public String pred_to_string(String key) { LinkedList<Term> Q=get(key); if(null==Q) return null; Iterator<Term> e=Q.iterator(); StringBuffer s=new StringBuffer("% "+key+"\n\n"); while(e.hasNext()) { s.append(((TermAstNode)e.next()).pprint()); s.append(".\n"); } s.append("\n"); return s.toString(); } /** consults or reconsults a Prolog file by adding or overriding existing predicates to be extended to load from URLs transparently */ public boolean fromFile(String f, RuleEngine engine, boolean overwrite) { IO.trace("last consulted file was: "+lastFile); boolean ok=fileToProg(f, engine, overwrite); if(ok) { IO.trace("last consulted file set to: "+f); lastFile=f; } else IO.errmes("error in consulting file: "+f); return ok; } /** reconsults a file by overwritting similar predicates in memory */ public boolean fromFile(String f, RuleEngine engine) { return fromFile(f,engine,true); } private static String lastFile=Init.default_lib; /** reconsults the last reconsulted file */ public boolean fromFile(RuleEngine engine) { IO.println("begin('"+lastFile+"')"); boolean ok=fromFile(lastFile, engine); if(ok) IO.println("end('"+lastFile+"')"); return ok; } private boolean fileToProg(String fname, RuleEngine engine, boolean overwrite) { Reader sname=IO.toFileReader(fname); if(null==sname) return false; return streamToProg(fname,sname,engine,overwrite); } /** Reads a set of clauses from a stream and adds them to the blackboard. Overwrites old predicates if asked to. Returns true if all went well. */ public boolean streamToProg(Reader sname,RuleEngine engine,boolean overwrite) { return streamToProg(sname.toString(),sname,engine, overwrite); } private boolean streamToProg(String fname,Reader sname, RuleEngine engine, boolean overwrite) { @SuppressWarnings("unchecked") HashMap<String,LinkedList<Term>> ktable = overwrite ? (HashMap<String,LinkedList<Term>>) DataBase.getInstance().clone() : null; // Clause Err=new Clause(new Const("error"),new Var()); try { Parser p=new Parser(sname, engine); apply_parser(p,fname,ktable); } catch(Exception e) { // already catched by readClause IO.errmes("unexpected error in streamToProg",e); return false; } return true; } private void apply_parser(Parser p,String fname, HashMap<String,LinkedList<Term>> ktable) { switch (Version.getInstance().getNumber()) { // case 1: { // for(;;) { // if(p.atEOF()) // return; // int begins_at=p.lineno(); // ClauseAstNode C=p.readClause(); // if(null==C) // return; // if(Parser.isError(C)) // Parser.showError(C); // else { // // IO.mes("ADDING= "+C.pprint()); // processClause(C,ktable); // C.setFile(fname,begins_at,p.lineno()); // } // } // } case 2: { for(;;) { if(p.atEOF()) return; int begins_at=p.lineno(); ClauseAstNode C=p.readClause(); if(null==C) return; if(Parser.isError(C)) Parser.showError(C); else { // IO.mes("ADDING= "+C.pprint()); ClauseCompiler compiler = new ClauseCompiler(); C.inspect(compiler); processClause((ClauseRuntime) compiler.getReturnValue(),ktable); C.setFile(fname,begins_at,p.lineno()); } } } default: throw new RuntimeException(); } } /** adds a Clause to the joint Linda and Predicate table */ // static public void addClause(ClauseAstNode C,HashMap<String,LinkedList<Term>> ktable) { // String k=C.getKey(); // // overwrites previous definitions // if(null!=ktable&&null!=ktable.get(k)) { // ktable.remove(k); // DataBase.getInstance().remove(k); // } // DataBase.getInstance().out(k,C,false); // } static public void addClause(ClauseRuntime C,HashMap<String,LinkedList<Term>> ktable) { String k=C.getKey(); // overwrites previous definitions if(null!=ktable&&null!=ktable.get(k)) { ktable.remove(k); DataBase.getInstance().remove(k); } DataBase.getInstance().out(k,C,false); } /** adds a Clause to the joint Linda and Predicate table @see ClauseAstNode */ // static public void processClause(ClauseAstNode C, HashMap<String,LinkedList<Term>> ktable) { // if(C.getHead().matches(new AtomAstNode("init"))) { // IO.mes("init: "+C.getBody()); // LogicEngineImpl.firstSolution(C.getHead(),C.getBody()); // } else { // IO.mes("ADDING= "+C.pprint()); // addClause(C,ktable); // } // } public void processClause(ClauseRuntime C, HashMap<String,LinkedList<Term>> ktable) { // if(C.getHead().matches(new AtomRuntime("init"))) { // IO.mes("init: "+C.getBody()); // RuleEngineRuntime.firstSolution(C.getHead(),C.getBody()); // } else { logger.trace("ADDING= "+C.pprint()); addClause(C,ktable); // } } // blackboard methods public String name() { return getClass().getName()+hashCode(); } /** Removes the first Term having key k or the first enumerated key if k is null */ // synchronized private final Term pick(String k) { if(k==null) { Iterator<String> e=this.keySet().iterator(); if(!e.hasNext()) return null; k=e.next(); // IO.trace("$$Got key:"+k+this); } LinkedList<Term> Q=get(k); if(Q==null) return null; Term T= Q.removeFirst(); if(Q.isEmpty()) { remove(k); // IO.trace("$$Removed key:"+k+this); } return T; } private final void addBack(String k,ArrayList<Term> V) { for(Iterator<Term> e=V.iterator();e.hasNext();) { // cannot be here if k==null add(k,e.next()); } } /** Removes the first matching Term or Clause from the blackboard, to be used by Linda in/1 operation in PrologBlackBoard @see PrologBlackBoard#in() */ // synchronized protected final Term take(String k,Term pattern) { ArrayList<Term> V=new ArrayList<>(); Term t; while(true) { t=pick(k); if(null==t) break; // IO.trace("$$After pick: t="+t+this); if(t.matches(pattern)) break; else V.add(t); } addBack(k,V); return t; } /** Adds a Term or Clause to the the blackboard, to be used by Linda out/1 operation @see PrologBlackBoard */ // synchronized // protected final void add(String k,TermAstNode value) { // LinkedList<Term> Q = get(k); // if(Q==null) { // Q=new LinkedList<>(); // put(k,Q); // } // Q.addLast(value); // // IO.trace("$$Added key/val:"+k+"/"+value+"=>"+this); // } protected final void add(String k,Term value) { LinkedList<Term> Q=get(k); if(Q==null) { Q=new LinkedList<>(); put(k,Q); } Q.addLast(value); // IO.errmes("LinkedList full, key:"+k); // IO.trace("$$Added key/val:"+k+"/"+value+"=>"+this); } /** This gives an enumeration view for the sequence of objects kept under key k. */ // synchronized public Iterator<Term> toEnumerationFor(String k) { LinkedList<Term> Q= get(k); return Q.iterator(); } public LinkedList<Term> getClauses(String k) { LinkedList<Term> Q=get(k); return Q; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.linkedin.api; import java.io.IOException; import java.net.URI; import java.net.URL; import java.net.URLDecoder; import java.net.URLEncoder; import java.security.SecureRandom; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Priority; import javax.ws.rs.Priorities; import javax.ws.rs.client.ClientRequestContext; import javax.ws.rs.client.ClientRequestFilter; import javax.ws.rs.ext.Provider; import com.fasterxml.jackson.databind.ObjectMapper; import com.gargoylesoftware.htmlunit.BrowserVersion; import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException; import com.gargoylesoftware.htmlunit.HttpMethod; import com.gargoylesoftware.htmlunit.Page; import com.gargoylesoftware.htmlunit.ProxyConfig; import com.gargoylesoftware.htmlunit.WebClient; import com.gargoylesoftware.htmlunit.WebClientOptions; import com.gargoylesoftware.htmlunit.WebRequest; import com.gargoylesoftware.htmlunit.WebResponse; import com.gargoylesoftware.htmlunit.html.HtmlDivision; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlPage; import com.gargoylesoftware.htmlunit.html.HtmlPasswordInput; import com.gargoylesoftware.htmlunit.html.HtmlSubmitInput; import com.gargoylesoftware.htmlunit.html.HtmlTextInput; import com.gargoylesoftware.htmlunit.util.WebConnectionWrapper; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; import org.apache.http.HttpStatus; import org.apache.http.conn.params.ConnRoutePNames; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * LinkedIn OAuth request filter to handle OAuth token. */ @Provider @Priority(Priorities.AUTHENTICATION) public final class LinkedInOAuthRequestFilter implements ClientRequestFilter { public static final String BASE_ADDRESS = "https://api.linkedin.com/v1"; private static final Logger LOG = LoggerFactory.getLogger(LinkedInOAuthRequestFilter.class); private static final String AUTHORIZATION_URL = "https://www.linkedin.com/uas/oauth2/authorization?" + "response_type=code&client_id=%s&state=%s&redirect_uri=%s"; private static final String AUTHORIZATION_URL_WITH_SCOPE = "https://www.linkedin.com/uas/oauth2/authorization?" + "response_type=code&client_id=%s&state=%s&scope=%s&redirect_uri=%s"; private static final String ACCESS_TOKEN_URL = "https://www.linkedin.com/uas/oauth2/accessToken?" + "grant_type=authorization_code&code=%s&redirect_uri=%s&client_id=%s&client_secret=%s"; private static final Pattern QUERY_PARAM_PATTERN = Pattern.compile("&?([^=]+)=([^&]+)"); private final WebClient webClient; private final OAuthParams oAuthParams; private OAuthToken oAuthToken; @SuppressWarnings("deprecation") public LinkedInOAuthRequestFilter(OAuthParams oAuthParams, Map<String, Object> httpParams, boolean lazyAuth, String[] enabledProtocols) { this.oAuthParams = oAuthParams; this.oAuthToken = null; // create HtmlUnit client webClient = new WebClient(BrowserVersion.FIREFOX_38); final WebClientOptions options = webClient.getOptions(); options.setRedirectEnabled(true); options.setJavaScriptEnabled(false); options.setThrowExceptionOnFailingStatusCode(true); options.setThrowExceptionOnScriptError(true); options.setPrintContentOnFailingStatusCode(LOG.isDebugEnabled()); options.setSSLClientProtocols(enabledProtocols); // add HTTP proxy if set if (httpParams != null && httpParams.get(ConnRoutePNames.DEFAULT_PROXY) != null) { final HttpHost proxyHost = (HttpHost) httpParams.get(ConnRoutePNames.DEFAULT_PROXY); final Boolean socksProxy = (Boolean) httpParams.get("http.route.socks-proxy"); final ProxyConfig proxyConfig = new ProxyConfig(proxyHost.getHostName(), proxyHost.getPort(), socksProxy != null ? socksProxy : false); options.setProxyConfig(proxyConfig); } // disable default gzip compression, as error pages are sent with no compression and htmlunit doesn't negotiate new WebConnectionWrapper(webClient) { @Override public WebResponse getResponse(WebRequest request) throws IOException { request.setAdditionalHeader(HttpHeaders.ACCEPT_ENCODING, "identity"); return super.getResponse(request); } }; if (!lazyAuth) { try { updateOAuthToken(); } catch (IOException e) { throw new IllegalArgumentException( String.format("Error authorizing user %s: %s", oAuthParams.getUserName(), e.getMessage()), e); } } } @SuppressWarnings("deprecation") private String getRefreshToken() { // disable redirect to avoid loading error redirect URL webClient.getOptions().setRedirectEnabled(false); try { final String csrfId = String.valueOf(new SecureRandom().nextLong()); final String encodedRedirectUri = URLEncoder.encode(oAuthParams.getRedirectUri(), "UTF-8"); final OAuthScope[] scopes = oAuthParams.getScopes(); final String url; if (scopes == null || scopes.length == 0) { url = String.format(AUTHORIZATION_URL, oAuthParams.getClientId(), csrfId, encodedRedirectUri); } else { final int nScopes = scopes.length; final StringBuilder builder = new StringBuilder(); int i = 0; for (OAuthScope scope : scopes) { builder.append(scope.getValue()); if (++i < nScopes) { builder.append("%20"); } } url = String.format(AUTHORIZATION_URL_WITH_SCOPE, oAuthParams.getClientId(), csrfId, builder.toString(), encodedRedirectUri); } HtmlPage authPage = null; try { authPage = webClient.getPage(url); } catch (FailingHttpStatusCodeException e) { // only handle errors returned with redirects boolean done = false; do { if (e.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY || e.getStatusCode() == HttpStatus.SC_SEE_OTHER) { final URL location = new URL(e.getResponse().getResponseHeaderValue(HttpHeaders.LOCATION)); final String locationQuery = location.getQuery(); if (locationQuery != null && locationQuery.contains("error=")) { throw new IOException(URLDecoder.decode(locationQuery).replaceAll("&", ", ")); } else { // follow the redirect to login form try { authPage = webClient.getPage(location); done = true; } catch (FailingHttpStatusCodeException e1) { e = e1; } } } else { throw e; } } while (!done); } // look for <div role="alert"> final HtmlDivision div = authPage.getFirstByXPath("//div[@role='alert']"); if (div != null) { throw new IllegalArgumentException("Error authorizing application: " + div.getTextContent()); } // submit login credentials final HtmlForm loginForm = authPage.getForms().get(0); final HtmlTextInput login = loginForm.getInputByName("session_key"); login.setText(oAuthParams.getUserName()); final HtmlPasswordInput password = loginForm.getInputByName("session_password"); password.setText(oAuthParams.getUserPassword()); final HtmlSubmitInput submitInput = (HtmlSubmitInput) loginForm.getElementsByAttribute("input", "type", "submit").get(0); // validate CSRF and get authorization code String redirectQuery; try { final Page redirectPage = submitInput.click(); redirectQuery = redirectPage.getUrl().getQuery(); } catch (FailingHttpStatusCodeException e) { // escalate non redirect errors if (e.getStatusCode() != HttpStatus.SC_MOVED_TEMPORARILY) { throw e; } final String location = e.getResponse().getResponseHeaderValue("Location"); redirectQuery = new URL(location).getQuery(); } if (redirectQuery == null) { throw new IllegalArgumentException("Redirect response query is null, check username, password and permissions"); } final Map<String, String> params = new HashMap<>(); final Matcher matcher = QUERY_PARAM_PATTERN.matcher(redirectQuery); while (matcher.find()) { params.put(matcher.group(1), matcher.group(2)); } // check if we got caught in a Captcha! if (params.get("challengeId") != null) { throw new SecurityException("Unable to login due to CAPTCHA, use with a valid accessToken instead!"); } final String state = params.get("state"); if (!csrfId.equals(state)) { throw new SecurityException("Invalid CSRF code!"); } else { // return authorization code // TODO check results?? return params.get("code"); } } catch (Exception e) { throw new IllegalArgumentException("Error authorizing application: " + e.getMessage(), e); } } public void close() { webClient.close(); } private OAuthToken getAccessToken(String refreshToken) throws IOException { final String tokenUrl = String.format(ACCESS_TOKEN_URL, refreshToken, oAuthParams.getRedirectUri(), oAuthParams.getClientId(), oAuthParams.getClientSecret()); final WebRequest webRequest = new WebRequest(new URL(tokenUrl), HttpMethod.POST); final WebResponse webResponse = webClient.loadWebResponse(webRequest); if (webResponse.getStatusCode() != HttpStatus.SC_OK) { throw new IOException(String.format("Error getting access token: [%s: %s]", webResponse.getStatusCode(), webResponse.getStatusMessage())); } final long currentTime = System.currentTimeMillis(); final ObjectMapper mapper = new ObjectMapper(); final Map map = mapper.readValue(webResponse.getContentAsStream(), Map.class); final String accessToken = map.get("access_token").toString(); final Integer expiresIn = Integer.valueOf(map.get("expires_in").toString()); return new OAuthToken(refreshToken, accessToken, currentTime + TimeUnit.MILLISECONDS.convert(expiresIn, TimeUnit.SECONDS)); } public synchronized OAuthToken getOAuthToken() { return oAuthToken; } @Override public void filter(ClientRequestContext requestContext) throws IOException { updateOAuthToken(); // add OAuth query param final String requestUri = requestContext.getUri().toString(); final StringBuilder builder = new StringBuilder(requestUri); if (requestUri.contains("?")) { builder.append('&'); } else { builder.append('?'); } builder.append("oauth2_access_token=").append(oAuthToken.getAccessToken()); requestContext.setUri(URI.create(builder.toString())); } private synchronized void updateOAuthToken() throws IOException { // check whether an update is needed final long currentTime = System.currentTimeMillis(); if (oAuthToken == null || oAuthToken.getExpiryTime() < currentTime) { LOG.info("OAuth token doesn't exist or has expired"); // check whether a secure store is provided final OAuthSecureStorage secureStorage = oAuthParams.getSecureStorage(); if (secureStorage != null) { oAuthToken = secureStorage.getOAuthToken(); // if it returned a valid token, we are done, otherwise fall through and generate a new token if (oAuthToken != null && oAuthToken.getExpiryTime() > currentTime) { return; } LOG.info("OAuth secure storage returned a null or expired token, creating a new token..."); // throw an exception if a user password is not set for authorization if (oAuthParams.getUserPassword() == null || oAuthParams.getUserPassword().isEmpty()) { throw new IllegalArgumentException("Missing password for LinkedIn authorization"); } } // need new OAuth token, authorize user, LinkedIn does not support OAuth2 grant_type=refresh_token final String refreshToken = getRefreshToken(); this.oAuthToken = getAccessToken(refreshToken); LOG.info("OAuth token created!"); // notify secure storage if (secureStorage != null) { secureStorage.saveOAuthToken(this.oAuthToken); } } } }
/* * Copyright (c) 2009-2020 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.bounding; import com.jme3.collision.Collidable; import com.jme3.collision.CollisionResults; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.Savable; import com.jme3.math.*; import com.jme3.util.TempVars; import java.io.IOException; import java.nio.FloatBuffer; /** * <code>BoundingVolume</code> defines an interface for dealing with * containment of a collection of points. * * @author Mark Powell * @version $Id: BoundingVolume.java,v 1.24 2007/09/21 15:45:32 nca Exp $ */ public abstract class BoundingVolume implements Savable, Cloneable, Collidable { /** * The type of bounding volume being used. */ public enum Type { /** * {@link BoundingSphere} */ Sphere, /** * {@link BoundingBox}. */ AABB, /** * Currently unsupported by jME3. */ Capsule; } protected int checkPlane = 0; protected Vector3f center = new Vector3f(); public BoundingVolume() { } public BoundingVolume(Vector3f center) { this.center.set(center); } /** * Grabs the checkplane we should check first. */ public int getCheckPlane() { return checkPlane; } /** * Sets the index of the plane that should be first checked during rendering. * * @param value */ public final void setCheckPlane(int value) { checkPlane = value; } /** * getType returns the type of bounding volume this is. */ public abstract Type getType(); /** * <code>transform</code> alters the location of the bounding volume by a * rotation, translation and a scalar. * * @param trans * the transform to affect the bound. * @return the new bounding volume. */ public final BoundingVolume transform(Transform trans) { return transform(trans, null); } /** * <code>transform</code> alters the location of the bounding volume by a * rotation, translation and a scalar. * * @param trans * the transform to affect the bound. * @param store * bounding volume to store result in * @return the new bounding volume. */ public abstract BoundingVolume transform(Transform trans, BoundingVolume store); public abstract BoundingVolume transform(Matrix4f trans, BoundingVolume store); /** * <code>whichSide</code> returns the side on which the bounding volume * lies on a plane. Possible values are POSITIVE_SIDE, NEGATIVE_SIDE, and * NO_SIDE. * * @param plane * the plane to check against this bounding volume. * @return the side on which this bounding volume lies. */ public abstract Plane.Side whichSide(Plane plane); /** * <code>computeFromPoints</code> generates a bounding volume that * encompasses a collection of points. * * @param points * the points to contain. */ public abstract void computeFromPoints(FloatBuffer points); /** * <code>merge</code> combines two bounding volumes into a single bounding * volume that contains both this bounding volume and the parameter volume. * * @param volume * the volume to combine. * @return the new merged bounding volume. */ public abstract BoundingVolume merge(BoundingVolume volume); /** * <code>mergeLocal</code> combines two bounding volumes into a single * bounding volume that contains both this bounding volume and the parameter * volume. The result is stored locally. * * @param volume * the volume to combine. * @return this */ public abstract BoundingVolume mergeLocal(BoundingVolume volume); /** * <code>clone</code> creates a new BoundingVolume object containing the * same data as this one. * * @param store * where to store the cloned information. if null or wrong class, * a new store is created. * @return the new BoundingVolume */ public abstract BoundingVolume clone(BoundingVolume store); public final Vector3f getCenter() { return center; } public final Vector3f getCenter(Vector3f store) { store.set(center); return store; } public final void setCenter(Vector3f newCenter) { center.set(newCenter); } public final void setCenter(float x, float y, float z) { center.set(x, y, z); } /** * Find the distance from the center of this Bounding Volume to the given * point. * * @param point * The point to get the distance to * @return distance */ public final float distanceTo(Vector3f point) { return center.distance(point); } /** * Find the squared distance from the center of this Bounding Volume to the * given point. * * @param point * The point to get the distance to * @return distance */ public final float distanceSquaredTo(Vector3f point) { return center.distanceSquared(point); } /** * Find the distance from the nearest edge of this Bounding Volume to the given * point. * * @param point * The point to get the distance to * @return distance */ public abstract float distanceToEdge(Vector3f point); /** * determines if this bounding volume and a second given volume are * intersecting. Intersecting being: one volume contains another, one volume * overlaps another or one volume touches another. * * @param bv * the second volume to test against. * @return true if this volume intersects the given volume. */ public abstract boolean intersects(BoundingVolume bv); /** * determines if a ray intersects this bounding volume. * * @param ray * the ray to test. * @return true if this volume is intersected by a given ray. */ public abstract boolean intersects(Ray ray); /** * determines if this bounding volume and a given bounding sphere are * intersecting. * * @param bs * the bounding sphere to test against. * @return true if this volume intersects the given bounding sphere. */ public abstract boolean intersectsSphere(BoundingSphere bs); /** * determines if this bounding volume and a given bounding box are * intersecting. * * @param bb * the bounding box to test against. * @return true if this volume intersects the given bounding box. */ public abstract boolean intersectsBoundingBox(BoundingBox bb); /* * determines if this bounding volume and a given bounding box are * intersecting. * * @param bb * the bounding box to test against. * @return true if this volume intersects the given bounding box. */ // public abstract boolean intersectsOrientedBoundingBox(OrientedBoundingBox bb); /** * determines if a given point is contained within this bounding volume. * If the point is on the edge of the bounding volume, this method will * return false. Use intersects(Vector3f) to check for edge intersection. * * @param point * the point to check * @return true if the point lies within this bounding volume. */ public abstract boolean contains(Vector3f point); /** * Determines if a given point intersects (touches or is inside) this bounding volume. * * @param point the point to check * @return true if the point lies within this bounding volume. */ public abstract boolean intersects(Vector3f point); public abstract float getVolume(); @Override public BoundingVolume clone() { try { BoundingVolume clone = (BoundingVolume) super.clone(); clone.center = center.clone(); return clone; } catch (CloneNotSupportedException ex) { throw new AssertionError(); } } @Override public void write(JmeExporter e) throws IOException { e.getCapsule(this).write(center, "center", Vector3f.ZERO); } @Override public void read(JmeImporter e) throws IOException { center = (Vector3f) e.getCapsule(this).readSavable("center", Vector3f.ZERO.clone()); } public int collideWith(Collidable other) { TempVars tempVars = TempVars.get(); try { CollisionResults tempResults = tempVars.collisionResults; tempResults.clear(); return collideWith(other, tempResults); } finally { tempVars.release(); } } }
package com.taskadapter.redmineapi; import com.taskadapter.redmineapi.bean.Group; import com.taskadapter.redmineapi.bean.GroupFactory; import com.taskadapter.redmineapi.bean.Role; import com.taskadapter.redmineapi.bean.User; import com.taskadapter.redmineapi.bean.UserFactory; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.fest.assertions.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class UserIntegrationTest { private static final User OUR_USER = IntegrationTestHelper.getOurUser(); private static UserManager userManager; private static Integer nonAdminUserId; private static String nonAdminUserLogin; private static String nonAdminPassword; @BeforeClass public static void oneTimeSetup() { RedmineManager mgr = IntegrationTestHelper.createRedmineManager(); userManager = mgr.getUserManager(); try { createNonAdminUser(); } catch (Exception e) { throw new RuntimeException(e); } } private static void createNonAdminUser() throws RedmineException { User user = UserGenerator.generateRandomUser(); User nonAdminUser = userManager.createUser(user); nonAdminUserId = nonAdminUser.getId(); nonAdminUserLogin = user.getLogin(); nonAdminPassword = user.getPassword(); } @Test public void usersAreLoadedByAdmin() throws RedmineException { List<User> users = userManager.getUsers(); assertTrue(users.size() > 0); } @Test(expected = NotAuthorizedException.class) public void usersCannotBeLoadedByNotAdmin() throws RedmineException { getNonAdminManager().getUserManager().getUsers(); fail("Must have failed with NotAuthorizedException."); } @Test public void userCanBeLoadedByIdByNonAdmin() throws RedmineException { User userById = getNonAdminManager().getUserManager().getUserById(nonAdminUserId); assertEquals(nonAdminUserId, userById.getId()); } @Test public void testGetCurrentUser() throws RedmineException { User currentUser = userManager.getCurrentUser(); assertEquals(OUR_USER.getId(), currentUser.getId()); assertEquals(OUR_USER.getLogin(), currentUser.getLogin()); } @Test public void testGetUserById() throws RedmineException { User loadedUser = userManager.getUserById(OUR_USER.getId()); assertEquals(OUR_USER.getId(), loadedUser.getId()); assertEquals(OUR_USER.getLogin(), loadedUser.getLogin()); assertEquals(OUR_USER.getApiKey(), loadedUser.getApiKey()); } @Test public void userCanBeFoundByFreeFormSearch() throws RedmineException { final User user = UserFactory.create(); user.setLogin("somelogin"); final String name = "FirstNameUnique"; user.setFirstName(name); user.setLastName("LastNameUnique"); user.setMail("aa@aaa.ccc"); Integer id = null; try { final User created = userManager.createUser(user); id = created.getId(); Map<String, String> params = new HashMap<String, String>(); params.put("name", name); List<User> list = userManager.getUsers(params); assertThat(list.size()).isEqualTo(1); final User loaded = list.get(0); assertThat(loaded.getFirstName()).isEqualTo(name); } finally { userManager.deleteUser(id); } } @Test(expected = NotFoundException.class) public void testGetUserNonExistingId() throws RedmineException { userManager.getUserById(999999); } @Test public void testCreateUser() throws RedmineException { User createdUser = null; try { User userToCreate = UserGenerator.generateRandomUser(); createdUser = userManager.createUser(userToCreate); assertNotNull( "checking that a non-null user is returned", createdUser); assertEquals(userToCreate.getLogin(), createdUser.getLogin()); assertEquals(userToCreate.getFirstName(), createdUser.getFirstName()); assertEquals(userToCreate.getLastName(), createdUser.getLastName()); Integer id = createdUser.getId(); assertNotNull(id); } finally { if (createdUser != null) { userManager.deleteUser(createdUser.getId()); } } } @Test public void testCreateUserWithAuthSource() throws RedmineException { User createdUser = null; try { User userToCreate = UserGenerator.generateRandomUser(); userToCreate.setAuthSourceId(1); createdUser = userManager.createUser(userToCreate); assertNotNull("checking that a non-null user is returned", createdUser); // Redmine doesn't return it, so let's consider a non-exceptional return as success for now. // assertNotNull("checking that a non-null auth_source_id is returned", createdUser.getAuthSourceId()); // assertEquals(1, createdUser.getAuthSourceId().intValue()); } finally { if (createdUser != null) { userManager.deleteUser(createdUser.getId()); } } } @Test public void testUpdateUser() throws RedmineException { User userToCreate = UserFactory.create(); userToCreate.setFirstName("fname2"); userToCreate.setLastName("lname2"); long randomNumber = new Date().getTime(); userToCreate.setLogin("login33" + randomNumber); userToCreate.setMail("email" + randomNumber + "@somedomain.com"); userToCreate.setPassword("1234asdf"); User createdUser = userManager.createUser(userToCreate); Integer userId = createdUser.getId(); assertNotNull( "checking that a non-null project is returned", createdUser); String newFirstName = "fnameNEW"; String newLastName = "lnameNEW"; String newMail = "newmail" + randomNumber + "@asd.com"; createdUser.setFirstName(newFirstName); createdUser.setLastName(newLastName); createdUser.setMail(newMail); userManager.update(createdUser); User updatedUser = userManager.getUserById(userId); assertEquals(newFirstName, updatedUser.getFirstName()); assertEquals(newLastName, updatedUser.getLastName()); assertEquals(newMail, updatedUser.getMail()); assertEquals(userId, updatedUser.getId()); } @Test public void userCanBeDeleted() throws RedmineException { User user = UserGenerator.generateRandomUser(); User createdUser = userManager.createUser(user); Integer newUserId = createdUser.getId(); try { userManager.deleteUser(newUserId); } catch (Exception e) { fail(e.getMessage()); } try { userManager.getUserById(newUserId); fail("Must have failed with NotFoundException because we tried to delete the user"); } catch (NotFoundException e) { // ignore: the user should not be found } } @Test(expected = NotFoundException.class) public void deletingNonExistingUserThrowsNFE() throws RedmineException { userManager.deleteUser(999999); } /** * Requires Redmine 2.1 */ @Test public void testAddUserToGroup() throws RedmineException { final Group template = GroupFactory.create(); template.setName("testAddUserToGroup " + System.currentTimeMillis()); final Group group = userManager.createGroup(template); try { final User newUser = userManager.createUser(UserGenerator.generateRandomUser()); try { userManager.addUserToGroup(newUser, group); final Collection<Group> userGroups = userManager.getUserById(newUser.getId()).getGroups(); assertTrue(userGroups.size() == 1); assertTrue(group.getName().equals(userGroups.iterator().next().getName())); } finally { userManager.deleteUser(newUser.getId()); } } finally { userManager.deleteGroup(group); } } /** * "add to group" operation used to be safe (idempotent) for Redmine 2.6.x, but FAILS for Redmine 3.0.0. * I submitted a bug: http://www.redmine.org/issues/19363, which was closed as "invalid". * Marking this test as "Ignored" for now. */ @Ignore @Test public void addingUserToGroupTwiceDoesNotGiveErrors() throws RedmineException { final Group template = GroupFactory.create(); template.setName("some test " + System.currentTimeMillis()); final Group group = userManager.createGroup(template); try { final User newUser = userManager.createUser(UserGenerator.generateRandomUser()); try { userManager.addUserToGroup(newUser, group); userManager.addUserToGroup(newUser, group); assertTrue(userManager.getUserById(newUser.getId()).getGroups().size() == 1); } finally { userManager.deleteUser(newUser.getId()); } } finally { userManager.deleteGroup(group); } } @Test public void testGroupCRUD() throws RedmineException { final Group template = GroupFactory.create(); template.setName("Template group " + System.currentTimeMillis()); final Group created = userManager.createGroup(template); try { assertEquals(template.getName(), created.getName()); final Group loaded = userManager.getGroupById(created.getId()); assertEquals(template.getName(), loaded.getName()); final Group update = GroupFactory.create(loaded.getId()); update.setName("Group update " + System.currentTimeMillis()); userManager.update(update); final Group loaded2 = userManager.getGroupById(created.getId()); assertEquals(update.getName(), loaded2.getName()); } finally { userManager.deleteGroup(created); } try { userManager.getGroupById(created.getId()); fail("Group should be deleted but was found"); } catch (NotFoundException e) { // OK! } } @Test public void testGetRoleById() throws RedmineException { final Collection<Role> roles = userManager.getRoles(); for (Role r : roles) { final Role loaded = userManager.getRoleById(r.getId()); assertEquals(r.getName(), loaded.getName()); assertEquals(r.getInherited(), loaded.getInherited()); } } @Test public void testRolesHasPermissions() throws RedmineException { final Collection<Role> roles = userManager.getRoles(); for (Role r : roles) { final Role loaded = userManager.getRoleById(r.getId()); if (loaded.getPermissions() != null && !loaded.getPermissions().isEmpty()) return; } fail("Failed to find a role with a permissions"); } @Test public void testGetRoles() throws RedmineException { assertTrue(userManager.getRoles().size() > 0); } @Test public void testUserDefaults() throws RedmineException { final User template = UserFactory.create(); template.setFirstName("first name"); template.setLastName("last name"); template.setMail("root@globalhost.ru"); template.setPassword("aslkdj32jnrfds7asdfn23()[]:kajsdf"); template.setLogin("asdNnadnNasd"); final User result = userManager.createUser(template); try { Assert.assertNotNull(result.getId()); Assert.assertEquals("asdNnadnNasd", result.getLogin()); Assert.assertNull(result.getPassword()); Assert.assertEquals("first name", result.getFirstName()); Assert.assertEquals("last name", result.getLastName()); Assert.assertEquals("root@globalhost.ru", result.getMail()); Assert.assertNotNull(result.getCreatedOn()); Assert.assertNull(result.getLastLoginOn()); Assert.assertNotNull(result.getCustomFields()); } finally { userManager.deleteUser(result.getId()); } } @Test public void testLockUser() throws RedmineException { User user = userManager.getUserById(nonAdminUserId); user.setStatus(User.STATUS_LOCKED); userManager.update(user); user = userManager.getUserById(nonAdminUserId); Assert.assertEquals(User.STATUS_LOCKED, user.getStatus()); user.setStatus(User.STATUS_ACTIVE); userManager.update(user); user = userManager.getUserById(nonAdminUserId); Assert.assertEquals(User.STATUS_ACTIVE, user.getStatus()); } private RedmineManager getNonAdminManager() { return RedmineManagerFactory.createWithUserAuth(IntegrationTestHelper.getTestConfig().getURI(), nonAdminUserLogin, nonAdminPassword); } }
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.hibernate4; import java.sql.Connection; import java.sql.ResultSet; import javax.sql.DataSource; import org.hibernate.ConnectionReleaseMode; import org.hibernate.FlushMode; import org.hibernate.HibernateException; import org.hibernate.Interceptor; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.transaction.spi.TransactionContext; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.jdbc.datasource.ConnectionHolder; import org.springframework.jdbc.datasource.DataSourceUtils; import org.springframework.jdbc.datasource.JdbcTransactionObjectSupport; import org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy; import org.springframework.transaction.CannotCreateTransactionException; import org.springframework.transaction.IllegalTransactionStateException; import org.springframework.transaction.InvalidIsolationLevelException; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionSystemException; import org.springframework.transaction.support.AbstractPlatformTransactionManager; import org.springframework.transaction.support.DefaultTransactionStatus; import org.springframework.transaction.support.ResourceTransactionManager; import org.springframework.transaction.support.TransactionSynchronizationManager; /** * {@link org.springframework.transaction.PlatformTransactionManager} * implementation for a single Hibernate {@link org.hibernate.SessionFactory}. * Binds a Hibernate Session from the specified factory to the thread, * potentially allowing for one thread-bound Session per factory. * {@code SessionFactory.getCurrentSession()} is required for Hibernate * access code that needs to support this transaction handling mechanism, * with the SessionFactory being configured with {@link SpringSessionContext}. * * <p>Supports custom isolation levels, and timeouts that get applied as * Hibernate transaction timeouts. * * <p>This transaction manager is appropriate for applications that use a single * Hibernate SessionFactory for transactional data access, but it also supports * direct DataSource access within a transaction (i.e. plain JDBC code working * with the same DataSource). This allows for mixing services which access Hibernate * and services which use plain JDBC (without being aware of Hibernate)! * Application code needs to stick to the same simple Connection lookup pattern as * with {@link org.springframework.jdbc.datasource.DataSourceTransactionManager} * (i.e. {@link org.springframework.jdbc.datasource.DataSourceUtils#getConnection} * or going through a * {@link org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy}). * * <p>Note: To be able to register a DataSource's Connection for plain JDBC code, * this instance needs to be aware of the DataSource ({@link #setDataSource}). * The given DataSource should obviously match the one used by the given SessionFactory. * * <p>JTA (usually through {@link org.springframework.transaction.jta.JtaTransactionManager}) * is necessary for accessing multiple transactional resources within the same * transaction. The DataSource that Hibernate uses needs to be JTA-enabled in * such a scenario (see container setup). * * <p>This transaction manager supports nested transactions via JDBC 3.0 Savepoints. * The {@link #setNestedTransactionAllowed} "nestedTransactionAllowed"} flag defaults * to "false", though, as nested transactions will just apply to the JDBC Connection, * not to the Hibernate Session and its cached entity objects and related context. * You can manually set the flag to "true" if you want to use nested transactions * for JDBC access code which participates in Hibernate transactions (provided that * your JDBC driver supports Savepoints). <i>Note that Hibernate itself does not * support nested transactions! Hence, do not expect Hibernate access code to * semantically participate in a nested transaction.</i> * * <p><b>NOTE: Hibernate 4.2+ is strongly recommended for efficient transaction * management with Spring, in particular for transactional Spring JDBC access.</b> * * @author Juergen Hoeller * @since 3.1 * @see #setSessionFactory * @see #setDataSource * @see org.hibernate.SessionFactory#getCurrentSession() * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection * @see org.springframework.jdbc.core.JdbcTemplate * @see org.springframework.jdbc.datasource.DataSourceTransactionManager * @see org.springframework.transaction.jta.JtaTransactionManager */ @SuppressWarnings("serial") public class HibernateTransactionManager extends AbstractPlatformTransactionManager implements ResourceTransactionManager, BeanFactoryAware, InitializingBean { private SessionFactory sessionFactory; private DataSource dataSource; private boolean autodetectDataSource = true; private boolean prepareConnection = true; private boolean allowResultAccessAfterCompletion = false; private boolean hibernateManagedSession = false; private Object entityInterceptor; /** * Just needed for entityInterceptorBeanName. * @see #setEntityInterceptorBeanName */ private BeanFactory beanFactory; /** * Create a new HibernateTransactionManager instance. * A SessionFactory has to be set to be able to use it. * @see #setSessionFactory */ public HibernateTransactionManager() { } /** * Create a new HibernateTransactionManager instance. * @param sessionFactory SessionFactory to manage transactions for */ public HibernateTransactionManager(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; afterPropertiesSet(); } /** * Set the SessionFactory that this instance should manage transactions for. */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } /** * Return the SessionFactory that this instance should manage transactions for. */ public SessionFactory getSessionFactory() { return this.sessionFactory; } /** * Set the JDBC DataSource that this instance should manage transactions for. * The DataSource should match the one used by the Hibernate SessionFactory: * for example, you could specify the same JNDI DataSource for both. * <p>If the SessionFactory was configured with LocalDataSourceConnectionProvider, * i.e. by Spring's LocalSessionFactoryBean with a specified "dataSource", * the DataSource will be auto-detected: You can still explicitly specify the * DataSource, but you don't need to in this case. * <p>A transactional JDBC Connection for this DataSource will be provided to * application code accessing this DataSource directly via DataSourceUtils * or JdbcTemplate. The Connection will be taken from the Hibernate Session. * <p>The DataSource specified here should be the target DataSource to manage * transactions for, not a TransactionAwareDataSourceProxy. Only data access * code may work with TransactionAwareDataSourceProxy, while the transaction * manager needs to work on the underlying target DataSource. If there's * nevertheless a TransactionAwareDataSourceProxy passed in, it will be * unwrapped to extract its target DataSource. * @see #setAutodetectDataSource * @see org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy * @see org.springframework.jdbc.datasource.DataSourceUtils * @see org.springframework.jdbc.core.JdbcTemplate */ public void setDataSource(DataSource dataSource) { if (dataSource instanceof TransactionAwareDataSourceProxy) { // If we got a TransactionAwareDataSourceProxy, we need to perform transactions // for its underlying target DataSource, else data access code won't see // properly exposed transactions (i.e. transactions for the target DataSource). this.dataSource = ((TransactionAwareDataSourceProxy) dataSource).getTargetDataSource(); } else { this.dataSource = dataSource; } } /** * Return the JDBC DataSource that this instance manages transactions for. */ public DataSource getDataSource() { return this.dataSource; } /** * Set whether to autodetect a JDBC DataSource used by the Hibernate SessionFactory, * if set via LocalSessionFactoryBean's {@code setDataSource}. Default is "true". * <p>Can be turned off to deliberately ignore an available DataSource, in order * to not expose Hibernate transactions as JDBC transactions for that DataSource. * @see #setDataSource */ public void setAutodetectDataSource(boolean autodetectDataSource) { this.autodetectDataSource = autodetectDataSource; } /** * Set whether to prepare the underlying JDBC Connection of a transactional * Hibernate Session, that is, whether to apply a transaction-specific * isolation level and/or the transaction's read-only flag to the underlying * JDBC Connection. * <p>Default is "true". If you turn this flag off, the transaction manager * will not support per-transaction isolation levels anymore. It will not * call {@code Connection.setReadOnly(true)} for read-only transactions * anymore either. If this flag is turned off, no cleanup of a JDBC Connection * is required after a transaction, since no Connection settings will get modified. * @see java.sql.Connection#setTransactionIsolation * @see java.sql.Connection#setReadOnly */ public void setPrepareConnection(boolean prepareConnection) { this.prepareConnection = prepareConnection; } /** * Set whether to allow result access after completion, typically via Hibernate's * ScrollableResults mechanism. * <p>Default is "false". Turning this flag on enforces over-commit holdability on the * underlying JDBC Connection (if {@link #prepareConnection "prepareConnection"} is on) * and skips the disconnect-on-completion step. * @since 4.1.2 * @see java.sql.Connection#setHoldability * @see ResultSet#HOLD_CURSORS_OVER_COMMIT * @see #disconnectOnCompletion(Session) */ public void setAllowResultAccessAfterCompletion(boolean allowResultAccessAfterCompletion) { this.allowResultAccessAfterCompletion = allowResultAccessAfterCompletion; } /** * Set whether to operate on a Hibernate-managed Session instead of a * Spring-managed Session, that is, whether to obtain the Session through * Hibernate's {@link org.hibernate.SessionFactory#getCurrentSession()} * instead of {@link org.hibernate.SessionFactory#openSession()} (with a Spring * {@link org.springframework.transaction.support.TransactionSynchronizationManager} * check preceding it). * <p>Default is "false", i.e. using a Spring-managed Session: taking the current * thread-bound Session if available (e.g. in an Open-Session-in-View scenario), * creating a new Session for the current transaction otherwise. * <p>Switch this flag to "true" in order to enforce use of a Hibernate-managed Session. * Note that this requires {@link org.hibernate.SessionFactory#getCurrentSession()} * to always return a proper Session when called for a Spring-managed transaction; * transaction begin will fail if the {@code getCurrentSession()} call fails. * <p>This mode will typically be used in combination with a custom Hibernate * {@link org.hibernate.context.spi.CurrentSessionContext} implementation that stores * Sessions in a place other than Spring's TransactionSynchronizationManager. * It may also be used in combination with Spring's Open-Session-in-View support * (using Spring's default {@link SpringSessionContext}), in which case it subtly * differs from the Spring-managed Session mode: The pre-bound Session will <i>not</i> * receive a {@code clear()} call (on rollback) or a {@code disconnect()} * call (on transaction completion) in such a scenario; this is rather left up * to a custom CurrentSessionContext implementation (if desired). */ public void setHibernateManagedSession(boolean hibernateManagedSession) { this.hibernateManagedSession = hibernateManagedSession; } /** * Set the bean name of a Hibernate entity interceptor that allows to inspect * and change property values before writing to and reading from the database. * Will get applied to any new Session created by this transaction manager. * <p>Requires the bean factory to be known, to be able to resolve the bean * name to an interceptor instance on session creation. Typically used for * prototype interceptors, i.e. a new interceptor instance per session. * <p>Can also be used for shared interceptor instances, but it is recommended * to set the interceptor reference directly in such a scenario. * @param entityInterceptorBeanName the name of the entity interceptor in * the bean factory * @see #setBeanFactory * @see #setEntityInterceptor */ public void setEntityInterceptorBeanName(String entityInterceptorBeanName) { this.entityInterceptor = entityInterceptorBeanName; } /** * Set a Hibernate entity interceptor that allows to inspect and change * property values before writing to and reading from the database. * Will get applied to any new Session created by this transaction manager. * <p>Such an interceptor can either be set at the SessionFactory level, * i.e. on LocalSessionFactoryBean, or at the Session level, i.e. on * HibernateTransactionManager. * @see LocalSessionFactoryBean#setEntityInterceptor */ public void setEntityInterceptor(Interceptor entityInterceptor) { this.entityInterceptor = entityInterceptor; } /** * Return the current Hibernate entity interceptor, or {@code null} if none. * Resolves an entity interceptor bean name via the bean factory, * if necessary. * @throws IllegalStateException if bean name specified but no bean factory set * @throws BeansException if bean name resolution via the bean factory failed * @see #setEntityInterceptor * @see #setEntityInterceptorBeanName * @see #setBeanFactory */ public Interceptor getEntityInterceptor() throws IllegalStateException, BeansException { if (this.entityInterceptor instanceof Interceptor) { return (Interceptor) entityInterceptor; } else if (this.entityInterceptor instanceof String) { if (this.beanFactory == null) { throw new IllegalStateException("Cannot get entity interceptor via bean name if no bean factory set"); } String beanName = (String) this.entityInterceptor; return this.beanFactory.getBean(beanName, Interceptor.class); } else { return null; } } /** * The bean factory just needs to be known for resolving entity interceptor * bean names. It does not need to be set for any other mode of operation. * @see #setEntityInterceptorBeanName */ @Override public void setBeanFactory(BeanFactory beanFactory) { this.beanFactory = beanFactory; } @Override public void afterPropertiesSet() { if (getSessionFactory() == null) { throw new IllegalArgumentException("Property 'sessionFactory' is required"); } if (this.entityInterceptor instanceof String && this.beanFactory == null) { throw new IllegalArgumentException("Property 'beanFactory' is required for 'entityInterceptorBeanName'"); } // Check for SessionFactory's DataSource. if (this.autodetectDataSource && getDataSource() == null) { DataSource sfds = SessionFactoryUtils.getDataSource(getSessionFactory()); if (sfds != null) { // Use the SessionFactory's DataSource for exposing transactions to JDBC code. if (logger.isInfoEnabled()) { logger.info("Using DataSource [" + sfds + "] of Hibernate SessionFactory for HibernateTransactionManager"); } setDataSource(sfds); } } } @Override public Object getResourceFactory() { return getSessionFactory(); } @Override protected Object doGetTransaction() { HibernateTransactionObject txObject = new HibernateTransactionObject(); txObject.setSavepointAllowed(isNestedTransactionAllowed()); SessionHolder sessionHolder = (SessionHolder) TransactionSynchronizationManager.getResource(getSessionFactory()); if (sessionHolder != null) { if (logger.isDebugEnabled()) { logger.debug("Found thread-bound Session [" + sessionHolder.getSession() + "] for Hibernate transaction"); } txObject.setSessionHolder(sessionHolder); } else if (this.hibernateManagedSession) { try { Session session = this.sessionFactory.getCurrentSession(); if (logger.isDebugEnabled()) { logger.debug("Found Hibernate-managed Session [" + session + "] for Spring-managed transaction"); } txObject.setExistingSession(session); } catch (HibernateException ex) { throw new DataAccessResourceFailureException( "Could not obtain Hibernate-managed Session for Spring-managed transaction", ex); } } if (getDataSource() != null) { ConnectionHolder conHolder = (ConnectionHolder) TransactionSynchronizationManager.getResource(getDataSource()); txObject.setConnectionHolder(conHolder); } return txObject; } @Override protected boolean isExistingTransaction(Object transaction) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; return (txObject.hasSpringManagedTransaction() || (this.hibernateManagedSession && txObject.hasHibernateManagedTransaction())); } @Override protected void doBegin(Object transaction, TransactionDefinition definition) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; if (txObject.hasConnectionHolder() && !txObject.getConnectionHolder().isSynchronizedWithTransaction()) { throw new IllegalTransactionStateException( "Pre-bound JDBC Connection found! HibernateTransactionManager does not support " + "running within DataSourceTransactionManager if told to manage the DataSource itself. " + "It is recommended to use a single HibernateTransactionManager for all transactions " + "on a single DataSource, no matter whether Hibernate or JDBC access."); } Session session = null; try { if (txObject.getSessionHolder() == null || txObject.getSessionHolder().isSynchronizedWithTransaction()) { Interceptor entityInterceptor = getEntityInterceptor(); Session newSession = (entityInterceptor != null ? getSessionFactory().withOptions().interceptor(entityInterceptor).openSession() : getSessionFactory().openSession()); if (logger.isDebugEnabled()) { logger.debug("Opened new Session [" + newSession + "] for Hibernate transaction"); } txObject.setSession(newSession); } session = txObject.getSessionHolder().getSession(); if (this.prepareConnection && isSameConnectionForEntireSession(session)) { // We're allowed to change the transaction settings of the JDBC Connection. if (logger.isDebugEnabled()) { logger.debug("Preparing JDBC Connection of Hibernate Session [" + session + "]"); } Connection con = ((SessionImplementor) session).connection(); Integer previousIsolationLevel = DataSourceUtils.prepareConnectionForTransaction(con, definition); txObject.setPreviousIsolationLevel(previousIsolationLevel); if (this.allowResultAccessAfterCompletion && !txObject.isNewSession()) { int currentHoldability = con.getHoldability(); if (currentHoldability != ResultSet.HOLD_CURSORS_OVER_COMMIT) { txObject.setPreviousHoldability(currentHoldability); con.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); } } } else { // Not allowed to change the transaction settings of the JDBC Connection. if (definition.getIsolationLevel() != TransactionDefinition.ISOLATION_DEFAULT) { // We should set a specific isolation level but are not allowed to... throw new InvalidIsolationLevelException( "HibernateTransactionManager is not allowed to support custom isolation levels: " + "make sure that its 'prepareConnection' flag is on (the default) and that the " + "Hibernate connection release mode is set to 'on_close' (the default for JDBC)."); } if (logger.isDebugEnabled()) { logger.debug("Not preparing JDBC Connection of Hibernate Session [" + session + "]"); } } if (definition.isReadOnly() && txObject.isNewSession()) { // Just set to MANUAL in case of a new Session for this transaction. session.setFlushMode(FlushMode.MANUAL); } if (!definition.isReadOnly() && !txObject.isNewSession()) { // We need AUTO or COMMIT for a non-read-only transaction. FlushMode flushMode = session.getFlushMode(); if (session.getFlushMode().equals(FlushMode.MANUAL)) { session.setFlushMode(FlushMode.AUTO); txObject.getSessionHolder().setPreviousFlushMode(flushMode); } } Transaction hibTx; // Register transaction timeout. int timeout = determineTimeout(definition); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { // Use Hibernate's own transaction timeout mechanism on Hibernate 3.1+ // Applies to all statements, also to inserts, updates and deletes! hibTx = session.getTransaction(); hibTx.setTimeout(timeout); hibTx.begin(); } else { // Open a plain Hibernate transaction without specified timeout. hibTx = session.beginTransaction(); } // Add the Hibernate transaction to the session holder. txObject.getSessionHolder().setTransaction(hibTx); // Register the Hibernate Session's JDBC Connection for the DataSource, if set. if (getDataSource() != null) { Connection con = ((SessionImplementor) session).connection(); ConnectionHolder conHolder = new ConnectionHolder(con); if (timeout != TransactionDefinition.TIMEOUT_DEFAULT) { conHolder.setTimeoutInSeconds(timeout); } if (logger.isDebugEnabled()) { logger.debug("Exposing Hibernate transaction as JDBC transaction [" + con + "]"); } TransactionSynchronizationManager.bindResource(getDataSource(), conHolder); txObject.setConnectionHolder(conHolder); } // Bind the session holder to the thread. if (txObject.isNewSessionHolder()) { TransactionSynchronizationManager.bindResource(getSessionFactory(), txObject.getSessionHolder()); } txObject.getSessionHolder().setSynchronizedWithTransaction(true); } catch (Throwable ex) { if (txObject.isNewSession()) { try { if (session.getTransaction().isActive()) { session.getTransaction().rollback(); } } catch (Throwable ex2) { logger.debug("Could not rollback Session after failed transaction begin", ex); } finally { SessionFactoryUtils.closeSession(session); txObject.setSessionHolder(null); } } throw new CannotCreateTransactionException("Could not open Hibernate Session for transaction", ex); } } @Override protected Object doSuspend(Object transaction) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; txObject.setSessionHolder(null); SessionHolder sessionHolder = (SessionHolder) TransactionSynchronizationManager.unbindResource(getSessionFactory()); txObject.setConnectionHolder(null); ConnectionHolder connectionHolder = null; if (getDataSource() != null) { connectionHolder = (ConnectionHolder) TransactionSynchronizationManager.unbindResource(getDataSource()); } return new SuspendedResourcesHolder(sessionHolder, connectionHolder); } @Override protected void doResume(Object transaction, Object suspendedResources) { SuspendedResourcesHolder resourcesHolder = (SuspendedResourcesHolder) suspendedResources; if (TransactionSynchronizationManager.hasResource(getSessionFactory())) { // From non-transactional code running in active transaction synchronization // -> can be safely removed, will be closed on transaction completion. TransactionSynchronizationManager.unbindResource(getSessionFactory()); } TransactionSynchronizationManager.bindResource(getSessionFactory(), resourcesHolder.getSessionHolder()); if (getDataSource() != null) { TransactionSynchronizationManager.bindResource(getDataSource(), resourcesHolder.getConnectionHolder()); } } @Override protected void doCommit(DefaultTransactionStatus status) { HibernateTransactionObject txObject = (HibernateTransactionObject) status.getTransaction(); if (status.isDebug()) { logger.debug("Committing Hibernate transaction on Session [" + txObject.getSessionHolder().getSession() + "]"); } try { txObject.getSessionHolder().getTransaction().commit(); } catch (org.hibernate.TransactionException ex) { // assumably from commit call to the underlying JDBC connection throw new TransactionSystemException("Could not commit Hibernate transaction", ex); } catch (HibernateException ex) { // assumably failed to flush changes to database throw convertHibernateAccessException(ex); } } @Override protected void doRollback(DefaultTransactionStatus status) { HibernateTransactionObject txObject = (HibernateTransactionObject) status.getTransaction(); if (status.isDebug()) { logger.debug("Rolling back Hibernate transaction on Session [" + txObject.getSessionHolder().getSession() + "]"); } try { txObject.getSessionHolder().getTransaction().rollback(); } catch (org.hibernate.TransactionException ex) { throw new TransactionSystemException("Could not roll back Hibernate transaction", ex); } catch (HibernateException ex) { // Shouldn't really happen, as a rollback doesn't cause a flush. throw convertHibernateAccessException(ex); } finally { if (!txObject.isNewSession() && !this.hibernateManagedSession) { // Clear all pending inserts/updates/deletes in the Session. // Necessary for pre-bound Sessions, to avoid inconsistent state. txObject.getSessionHolder().getSession().clear(); } } } @Override protected void doSetRollbackOnly(DefaultTransactionStatus status) { HibernateTransactionObject txObject = (HibernateTransactionObject) status.getTransaction(); if (status.isDebug()) { logger.debug("Setting Hibernate transaction on Session [" + txObject.getSessionHolder().getSession() + "] rollback-only"); } txObject.setRollbackOnly(); } @Override protected void doCleanupAfterCompletion(Object transaction) { HibernateTransactionObject txObject = (HibernateTransactionObject) transaction; // Remove the session holder from the thread. if (txObject.isNewSessionHolder()) { TransactionSynchronizationManager.unbindResource(getSessionFactory()); } // Remove the JDBC connection holder from the thread, if exposed. if (getDataSource() != null) { TransactionSynchronizationManager.unbindResource(getDataSource()); } Session session = txObject.getSessionHolder().getSession(); if (this.prepareConnection && session.isConnected() && isSameConnectionForEntireSession(session)) { // We're running with connection release mode "on_close": We're able to reset // the isolation level and/or read-only flag of the JDBC Connection here. // Else, we need to rely on the connection pool to perform proper cleanup. try { Connection con = ((SessionImplementor) session).connection(); Integer previousHoldability = txObject.getPreviousHoldability(); if (previousHoldability != null) { con.setHoldability(previousHoldability); } DataSourceUtils.resetConnectionAfterTransaction(con, txObject.getPreviousIsolationLevel()); } catch (HibernateException ex) { logger.debug("Could not access JDBC Connection of Hibernate Session", ex); } catch (Throwable ex) { logger.debug("Could not reset JDBC Connection after transaction", ex); } } if (txObject.isNewSession()) { if (logger.isDebugEnabled()) { logger.debug("Closing Hibernate Session [" + session + "] after transaction"); } SessionFactoryUtils.closeSession(session); } else { if (logger.isDebugEnabled()) { logger.debug("Not closing pre-bound Hibernate Session [" + session + "] after transaction"); } if (txObject.getSessionHolder().getPreviousFlushMode() != null) { session.setFlushMode(txObject.getSessionHolder().getPreviousFlushMode()); } if (!this.allowResultAccessAfterCompletion && !this.hibernateManagedSession) { disconnectOnCompletion(session); } } txObject.getSessionHolder().clear(); } /** * Disconnect a pre-existing Hibernate Session on transaction completion, * returning its database connection but preserving its entity state. * <p>The default implementation simply calls {@link Session#disconnect()}. * Subclasses may override this with a no-op or with fine-tuned disconnection logic. * @param session the Hibernate Session to disconnect * @since 4.1.2 * @see org.hibernate.Session#disconnect() */ protected void disconnectOnCompletion(Session session) { session.disconnect(); } /** * Return whether the given Hibernate Session will always hold the same * JDBC Connection. This is used to check whether the transaction manager * can safely prepare and clean up the JDBC Connection used for a transaction. * <p>The default implementation checks the Session's connection release mode * to be "on_close". * @param session the Hibernate Session to check * @see org.hibernate.engine.transaction.spi.TransactionContext#getConnectionReleaseMode() * @see org.hibernate.ConnectionReleaseMode#ON_CLOSE */ protected boolean isSameConnectionForEntireSession(Session session) { if (!(session instanceof TransactionContext)) { // The best we can do is to assume we're safe. return true; } ConnectionReleaseMode releaseMode = ((TransactionContext) session).getConnectionReleaseMode(); return ConnectionReleaseMode.ON_CLOSE.equals(releaseMode); } /** * Convert the given HibernateException to an appropriate exception * from the {@code org.springframework.dao} hierarchy. * <p>Will automatically apply a specified SQLExceptionTranslator to a * Hibernate JDBCException, else rely on Hibernate's default translation. * @param ex HibernateException that occurred * @return a corresponding DataAccessException * @see SessionFactoryUtils#convertHibernateAccessException */ protected DataAccessException convertHibernateAccessException(HibernateException ex) { return SessionFactoryUtils.convertHibernateAccessException(ex); } /** * Hibernate transaction object, representing a SessionHolder. * Used as transaction object by HibernateTransactionManager. */ private class HibernateTransactionObject extends JdbcTransactionObjectSupport { private SessionHolder sessionHolder; private boolean newSessionHolder; private boolean newSession; private Integer previousHoldability; public void setSession(Session session) { this.sessionHolder = new SessionHolder(session); this.newSessionHolder = true; this.newSession = true; } public void setExistingSession(Session session) { this.sessionHolder = new SessionHolder(session); this.newSessionHolder = true; this.newSession = false; } public void setSessionHolder(SessionHolder sessionHolder) { this.sessionHolder = sessionHolder; this.newSessionHolder = false; this.newSession = false; } public SessionHolder getSessionHolder() { return this.sessionHolder; } public boolean isNewSessionHolder() { return this.newSessionHolder; } public boolean isNewSession() { return this.newSession; } public void setPreviousHoldability(Integer previousHoldability) { this.previousHoldability = previousHoldability; } public Integer getPreviousHoldability() { return this.previousHoldability; } public boolean hasSpringManagedTransaction() { return (this.sessionHolder != null && this.sessionHolder.getTransaction() != null); } public boolean hasHibernateManagedTransaction() { return (this.sessionHolder != null && this.sessionHolder.getSession().getTransaction().isActive()); } public void setRollbackOnly() { this.sessionHolder.setRollbackOnly(); if (hasConnectionHolder()) { getConnectionHolder().setRollbackOnly(); } } @Override public boolean isRollbackOnly() { return this.sessionHolder.isRollbackOnly() || (hasConnectionHolder() && getConnectionHolder().isRollbackOnly()); } @Override public void flush() { try { this.sessionHolder.getSession().flush(); } catch (HibernateException ex) { throw convertHibernateAccessException(ex); } } } /** * Holder for suspended resources. * Used internally by {@code doSuspend} and {@code doResume}. */ private static class SuspendedResourcesHolder { private final SessionHolder sessionHolder; private final ConnectionHolder connectionHolder; private SuspendedResourcesHolder(SessionHolder sessionHolder, ConnectionHolder conHolder) { this.sessionHolder = sessionHolder; this.connectionHolder = conHolder; } private SessionHolder getSessionHolder() { return this.sessionHolder; } private ConnectionHolder getConnectionHolder() { return this.connectionHolder; } } }
package com.github.shynixn.astraledit.bukkit.logic.business.nms.v1_9_R2; import com.github.shynixn.astraledit.api.bukkit.business.entity.PacketArmorstand; import com.github.shynixn.astraledit.bukkit.logic.business.nms.NMSRegistry; import com.github.shynixn.astraledit.bukkit.logic.lib.ItemStackBuilder; import net.minecraft.server.v1_9_R2.*; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.craftbukkit.v1_9_R2.CraftWorld; import org.bukkit.craftbukkit.v1_9_R2.entity.CraftArmorStand; import org.bukkit.craftbukkit.v1_9_R2.entity.CraftPlayer; import org.bukkit.craftbukkit.v1_9_R2.inventory.CraftItemStack; import org.bukkit.entity.ArmorStand; import org.bukkit.entity.Player; import org.bukkit.util.EulerAngle; import java.util.Set; /** * Copyright 2017 Shynixn * <p> * Do not remove this header! * <p> * Version 1.0 * <p> * MIT License * <p> * Copyright (c) 2017 * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ public class DisplayArmorstand implements PacketArmorstand { private Player player; private final EntityArmorStand armorStand; private int storedId; private byte storedData; private final Set<Player> watchers; /** * Initializes the armorstand * * @param player player * @param location location * @param id id * @param data data * @param watchers watchers */ public DisplayArmorstand(Player player, Location location, int id, byte data, Set<Player> watchers) { super(); this.watchers = watchers; this.player = player; this.armorStand = new EntityArmorStand(((CraftWorld) player.getWorld()).getHandle()); final NBTTagCompound compound = new NBTTagCompound(); compound.setBoolean("invulnerable", true); compound.setBoolean("Invisible", true); compound.setBoolean("PersistenceRequired", true); compound.setBoolean("NoBasePlate", true); this.armorStand.a(compound); this.armorStand.setLocation(location.getX(), location.getY(), location.getZ(), 0, 0); this.storedId = id; this.storedData = data; ItemStackBuilder stackBuilder = new ItemStackBuilder(Material.getMaterial(id), 1, data); this.getCraftEntity().setHelmet(stackBuilder.build()); this.getCraftEntity().setBodyPose(new EulerAngle(3.15, 0, 0)); this.getCraftEntity().setLeftLegPose(new EulerAngle(3.15, 0, 0)); this.getCraftEntity().setRightLegPose(new EulerAngle(3.15, 0, 0)); this.getCraftEntity().setGlowing(true); if (((ArmorStand) this.armorStand.getBukkitEntity()).getHelmet().getType() == Material.AIR) { stackBuilder = new ItemStackBuilder(Material.SKULL_ITEM, 1, (short) 3); if (id == Material.WATER.getId() || id == Material.STATIONARY_WATER.getId()) { stackBuilder.setSkin(NMSRegistry.WATER_HEAD); } else if (id == Material.LAVA.getId() || id == Material.STATIONARY_LAVA.getId()) { stackBuilder.setSkin(NMSRegistry.LAVA_HEAD); } else { stackBuilder.setSkin(NMSRegistry.NOT_FOUND); } ((ArmorStand) this.armorStand.getBukkitEntity()).setHelmet(stackBuilder.build()); } } /** * Spawns the armorstand */ @Override public void spawn() { final PacketPlayOutSpawnEntityLiving packetSpawn = new PacketPlayOutSpawnEntityLiving(this.armorStand); final PacketPlayOutEntityEquipment packetHead = new PacketPlayOutEntityEquipment(this.armorStand.getId(), EnumItemSlot.HEAD, CraftItemStack.asNMSCopy(((ArmorStand) this.armorStand.getBukkitEntity()).getHelmet())); this.sendPacket(packetSpawn); this.sendPacket(packetHead); } /** * Teleports the armorstand to the given location * * @param location location */ @Override public void teleport(Location location) { this.armorStand.setPositionRotation(location.getX(), location.getY(), location.getZ(), location.getYaw(), location.getPitch()); final PacketPlayOutEntityTeleport teleportPacket = new PacketPlayOutEntityTeleport(this.armorStand); this.sendPacket(teleportPacket); } /** * Removes the armorstand */ @Override public void remove() { final PacketPlayOutEntityDestroy destroyPacket = new PacketPlayOutEntityDestroy(this.armorStand.getId()); this.sendPacket(destroyPacket); } /** * Returns the location of the armorstand * * @return location */ @Override public Location getLocation() { return this.armorStand.getBukkitEntity().getLocation(); } /** * Sets the pose of * * @param angle angle */ @Override public void setHeadPose(EulerAngle angle) { ((ArmorStand) this.armorStand.getBukkitEntity()).setHeadPose(angle); } /** * Returns the pose of the head * * @return angle */ @Override public EulerAngle getHeadPose() { return ((ArmorStand) this.armorStand.getBukkitEntity()).getHeadPose(); } /** * Returns the stored block id * * @return id */ @Override public int getStoredBlockId() { return this.storedId; } /** * Sets the stored block id * * @param id id */ @Override public void setStoreBlockId(int id) { this.storedId = id; } /** * Returns the stored block data * * @return data */ @Override public byte getStoredBlockData() { return this.storedData; } /** * Sets the stored block data * * @param data data */ @Override public void setStoredBlockData(byte data) { this.storedData = data; } /** * Sends the packet * * @param packet packet */ private void sendPacket(Packet<?> packet) { this.sendPacket(packet, this.player); for (final Player player : this.watchers) { this.sendPacket(packet, player); } } /** * Sends the packet * * @param player player * @param packet packet */ private void sendPacket(Packet<?> packet, Player player) { ((CraftPlayer) player).getHandle().playerConnection.sendPacket(packet); } /** * Returns the craftArmorstand * * @return stand */ private CraftArmorStand getCraftEntity() { return (CraftArmorStand) this.armorStand.getBukkitEntity(); } /** * Closes this resource, relinquishing any underlying resources. * This method is invoked automatically on objects managed by the * {@code try}-with-resources statement. * @throws Exception if this resource cannot be closed */ @Override public void close() throws Exception { this.remove(); this.player = null; } }
/* * This file is generated by jOOQ. */ package sow.db.tables.records; import java.sql.Timestamp; import javax.annotation.Generated; import org.jooq.Field; import org.jooq.Record6; import org.jooq.Row6; import org.jooq.impl.TableRecordImpl; import sow.db.tables.Proxies; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.11.11" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class ProxiesRecord extends TableRecordImpl<ProxiesRecord> implements Record6<Integer, String, Integer, Timestamp, Timestamp, Integer> { private static final long serialVersionUID = 169996803; /** * Setter for <code>public.proxies.id</code>. */ public ProxiesRecord setId(Integer value) { set(0, value); return this; } /** * Getter for <code>public.proxies.id</code>. */ public Integer getId() { return (Integer) get(0); } /** * Setter for <code>public.proxies.adress</code>. */ public ProxiesRecord setAdress(String value) { set(1, value); return this; } /** * Getter for <code>public.proxies.adress</code>. */ public String getAdress() { return (String) get(1); } /** * Setter for <code>public.proxies.port</code>. */ public ProxiesRecord setPort(Integer value) { set(2, value); return this; } /** * Getter for <code>public.proxies.port</code>. */ public Integer getPort() { return (Integer) get(2); } /** * Setter for <code>public.proxies.since</code>. */ public ProxiesRecord setSince(Timestamp value) { set(3, value); return this; } /** * Getter for <code>public.proxies.since</code>. */ public Timestamp getSince() { return (Timestamp) get(3); } /** * Setter for <code>public.proxies.to</code>. */ public ProxiesRecord setTo(Timestamp value) { set(4, value); return this; } /** * Getter for <code>public.proxies.to</code>. */ public Timestamp getTo() { return (Timestamp) get(4); } /** * Setter for <code>public.proxies.uses</code>. */ public ProxiesRecord setUses(Integer value) { set(5, value); return this; } /** * Getter for <code>public.proxies.uses</code>. */ public Integer getUses() { return (Integer) get(5); } // ------------------------------------------------------------------------- // Record6 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Row6<Integer, String, Integer, Timestamp, Timestamp, Integer> fieldsRow() { return (Row6) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public Row6<Integer, String, Integer, Timestamp, Timestamp, Integer> valuesRow() { return (Row6) super.valuesRow(); } /** * {@inheritDoc} */ @Override public Field<Integer> field1() { return Proxies.PROXIES.ID; } /** * {@inheritDoc} */ @Override public Field<String> field2() { return Proxies.PROXIES.ADRESS; } /** * {@inheritDoc} */ @Override public Field<Integer> field3() { return Proxies.PROXIES.PORT; } /** * {@inheritDoc} */ @Override public Field<Timestamp> field4() { return Proxies.PROXIES.SINCE; } /** * {@inheritDoc} */ @Override public Field<Timestamp> field5() { return Proxies.PROXIES.TO; } /** * {@inheritDoc} */ @Override public Field<Integer> field6() { return Proxies.PROXIES.USES; } /** * {@inheritDoc} */ @Override public Integer component1() { return getId(); } /** * {@inheritDoc} */ @Override public String component2() { return getAdress(); } /** * {@inheritDoc} */ @Override public Integer component3() { return getPort(); } /** * {@inheritDoc} */ @Override public Timestamp component4() { return getSince(); } /** * {@inheritDoc} */ @Override public Timestamp component5() { return getTo(); } /** * {@inheritDoc} */ @Override public Integer component6() { return getUses(); } /** * {@inheritDoc} */ @Override public Integer value1() { return getId(); } /** * {@inheritDoc} */ @Override public String value2() { return getAdress(); } /** * {@inheritDoc} */ @Override public Integer value3() { return getPort(); } /** * {@inheritDoc} */ @Override public Timestamp value4() { return getSince(); } /** * {@inheritDoc} */ @Override public Timestamp value5() { return getTo(); } /** * {@inheritDoc} */ @Override public Integer value6() { return getUses(); } /** * {@inheritDoc} */ @Override public ProxiesRecord value1(Integer value) { setId(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord value2(String value) { setAdress(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord value3(Integer value) { setPort(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord value4(Timestamp value) { setSince(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord value5(Timestamp value) { setTo(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord value6(Integer value) { setUses(value); return this; } /** * {@inheritDoc} */ @Override public ProxiesRecord values(Integer value1, String value2, Integer value3, Timestamp value4, Timestamp value5, Integer value6) { value1(value1); value2(value2); value3(value3); value4(value4); value5(value5); value6(value6); return this; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached ProxiesRecord */ public ProxiesRecord() { super(Proxies.PROXIES); } /** * Create a detached, initialised ProxiesRecord */ public ProxiesRecord(Integer id, String adress, Integer port, Timestamp since, Timestamp to, Integer uses) { super(Proxies.PROXIES); set(0, id); set(1, adress); set(2, port); set(3, since); set(4, to); set(5, uses); } }
/* * Copyright 2008-2009 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store.metadata; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.management.MBeanOperationInfo; import org.apache.commons.io.FileUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import voldemort.VoldemortException; import voldemort.annotations.jmx.JmxOperation; import voldemort.client.rebalance.RebalanceTaskInfo; import voldemort.cluster.Cluster; import voldemort.routing.RouteToAllStrategy; import voldemort.routing.RoutingStrategy; import voldemort.routing.RoutingStrategyFactory; import voldemort.server.rebalance.RebalancerState; import voldemort.store.AbstractStorageEngine; import voldemort.store.StorageEngine; import voldemort.store.Store; import voldemort.store.StoreCapabilityType; import voldemort.store.StoreDefinition; import voldemort.store.StoreUtils; import voldemort.store.configuration.ConfigurationStorageEngine; import voldemort.store.memory.InMemoryStorageEngine; import voldemort.store.system.SystemStoreConstants; import voldemort.utils.ByteArray; import voldemort.utils.ByteUtils; import voldemort.utils.ClosableIterator; import voldemort.utils.Pair; import voldemort.utils.StoreDefinitionUtils; import voldemort.utils.Utils; import voldemort.versioning.Occurred; import voldemort.versioning.VectorClock; import voldemort.versioning.Version; import voldemort.versioning.Versioned; import voldemort.xml.ClusterMapper; import voldemort.xml.StoreDefinitionsMapper; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; /** * MetadataStore maintains metadata for Voldemort Server. <br> * Metadata is persisted as strings in inner store for ease of readability.<br> * Metadata Store keeps an in memory write-through-cache for performance. */ public class MetadataStore extends AbstractStorageEngine<ByteArray, byte[], byte[]> { public static final String METADATA_STORE_NAME = "metadata"; public static final String STORE_DEFINITIONS_STORE_NAME = "STORES"; public static final String CLUSTER_KEY = "cluster.xml"; public static final String STORES_KEY = "stores.xml"; public static final String SYSTEM_STORES_KEY = "system.stores"; public static final String SERVER_STATE_KEY = "server.state"; public static final String NODE_ID_KEY = "node.id"; public static final String SLOP_STREAMING_ENABLED_KEY = "slop.streaming.enabled"; public static final String PARTITION_STREAMING_ENABLED_KEY = "partition.streaming.enabled"; public static final String READONLY_FETCH_ENABLED_KEY = "readonly.fetch.enabled"; public static final String QUOTA_ENFORCEMENT_ENABLED_KEY = "quota.enforcement.enabled"; public static final String REBALANCING_STEAL_INFO = "rebalancing.steal.info.key"; public static final String REBALANCING_SOURCE_CLUSTER_XML = "rebalancing.source.cluster.xml"; public static final String REBALANCING_SOURCE_STORES_XML = "rebalancing.source.stores.xml"; public static final Set<String> GOSSIP_KEYS = ImmutableSet.of(CLUSTER_KEY, STORES_KEY); public static final Set<String> REQUIRED_KEYS = ImmutableSet.of(CLUSTER_KEY, STORES_KEY); public static final Set<String> OPTIONAL_KEYS = ImmutableSet.of(SERVER_STATE_KEY, NODE_ID_KEY, SLOP_STREAMING_ENABLED_KEY, PARTITION_STREAMING_ENABLED_KEY, READONLY_FETCH_ENABLED_KEY, QUOTA_ENFORCEMENT_ENABLED_KEY, REBALANCING_STEAL_INFO, REBALANCING_SOURCE_CLUSTER_XML, REBALANCING_SOURCE_STORES_XML); public static final Set<Object> METADATA_KEYS = ImmutableSet.builder() .addAll(REQUIRED_KEYS) .addAll(OPTIONAL_KEYS) .build(); // helper keys for metadataCacheOnly private static final String ROUTING_STRATEGY_KEY = "routing.strategy"; private static final String SYSTEM_ROUTING_STRATEGY_KEY = "system.routing.strategy"; /** * Identifies the Voldemort server state. * * NORMAL_SERVER is the default state; OFFLINE_SERVER is where online * services and slop pushing are turned off, only admin operations * permitted; REBALANCING_MASTER_SERVER is the server state during the * rebalancing operation. */ public static enum VoldemortState { NORMAL_SERVER, OFFLINE_SERVER, REBALANCING_MASTER_SERVER } private final Store<String, String, String> innerStore; private final Map<String, Versioned<Object>> metadataCache; private final StorageEngine<String, String, String> storeDefinitionsStorageEngine; private final List<String> storeNames; private static final ClusterMapper clusterMapper = new ClusterMapper(); private static final StoreDefinitionsMapper storeMapper = new StoreDefinitionsMapper(); private static final RoutingStrategyFactory routingFactory = new RoutingStrategyFactory(); // Guards mutations made to non-scalar objects e.g., lists stored in // innerStore private final ReadWriteLock lock = new ReentrantReadWriteLock(); public final Lock readLock = lock.readLock(); public final Lock writeLock = lock.writeLock(); private final Map<String, List<MetadataStoreListener>> storeNameTolisteners; private static final Logger logger = Logger.getLogger(MetadataStore.class); public MetadataStore(Store<String, String, String> innerStore, StorageEngine<String, String, String> storeDefinitionsStorageEngine, int nodeId) { super(innerStore.getName()); this.innerStore = innerStore; this.metadataCache = new HashMap<String, Versioned<Object>>(); this.storeNameTolisteners = new ConcurrentHashMap<String, List<MetadataStoreListener>>(); this.storeDefinitionsStorageEngine = storeDefinitionsStorageEngine; this.storeNames = new ArrayList<String>(); init(nodeId); } // This constructor is used exclusively by tests public MetadataStore(Store<String, String, String> innerStore, int nodeId) { super(innerStore.getName()); this.innerStore = innerStore; this.metadataCache = new HashMap<String, Versioned<Object>>(); this.storeNameTolisteners = new ConcurrentHashMap<String, List<MetadataStoreListener>>(); this.storeNames = new ArrayList<String>(); StorageEngine<String, String, String> storesRepo = new InMemoryStorageEngine<String, String, String>("stores-repo"); List<Versioned<String>> versionedStoreList = innerStore.get(STORES_KEY, ""); if(versionedStoreList != null) { String stores = versionedStoreList.get(0).getValue(); StoreDefinitionsMapper mapper = new StoreDefinitionsMapper(); List<StoreDefinition> storeDefinitions = mapper.readStoreList(new StringReader(stores)); for(StoreDefinition storeDef: storeDefinitions) { Versioned<String> versionedStoreValue = new Versioned<String>(mapper.writeStore(storeDef)); storesRepo.put(storeDef.getName(), versionedStoreValue, null); } } this.storeDefinitionsStorageEngine = storesRepo; init(nodeId); } public void addMetadataStoreListener(String storeName, MetadataStoreListener listener) { if(this.storeNameTolisteners == null) throw new VoldemortException("MetadataStoreListener must be non-null"); if(!this.storeNameTolisteners.containsKey(storeName)) this.storeNameTolisteners.put(storeName, new ArrayList<MetadataStoreListener>(2)); this.storeNameTolisteners.get(storeName).add(listener); } public void removeMetadataStoreListener(String storeName) { if(this.storeNameTolisteners == null) throw new VoldemortException("MetadataStoreListener must be non-null"); this.storeNameTolisteners.remove(storeName); } public static MetadataStore readFromDirectory(File dir, int nodeId) { if(!Utils.isReadableDir(dir)) throw new IllegalArgumentException("Metadata directory " + dir.getAbsolutePath() + " does not exist or can not be read."); String storeDefDirPath = dir.getAbsolutePath() + File.separator + MetadataStore.STORE_DEFINITIONS_STORE_NAME; // If config directory does not contain STORES sub directory, then // create one by parsing the stores.xml file List<String> configurationFiles = Arrays.asList(dir.list()); if(configurationFiles == null) throw new IllegalArgumentException("No configuration found in " + dir.getAbsolutePath() + "."); if(!configurationFiles.contains(STORE_DEFINITIONS_STORE_NAME)) { // parse stores.xml and create STORES sub-dir StoreDefinitionsMapper mapper = new StoreDefinitionsMapper(); List<StoreDefinition> storeDefinitions = null; try { storeDefinitions = mapper.readStoreList(new File(dir.getAbsolutePath() + File.separator + STORES_KEY)); } catch(IOException e) { throw new VoldemortException("Cannot parse the store definitions from " + STORES_KEY + " file ", e); } if(storeDefinitions == null) { throw new VoldemortException("Neither STORES nor stores.xml exist in the config directory"); } // Create the STORES sub directory File storeDefinitionsDir = new File(storeDefDirPath); if(!storeDefinitionsDir.mkdir()) { throw new VoldemortException("Unable to create " + STORE_DEFINITIONS_STORE_NAME + " sub directory"); } for(StoreDefinition storeDef: storeDefinitions) { try { FileUtils.writeStringToFile(new File(storeDefDirPath + File.separator + storeDef.getName()), mapper.writeStore(storeDef)); } catch(IOException e) { throw new VoldemortException("Cannot write store definition to file: " + storeDef.getName(), e); } } } // Create a STORES configuration engine for STORES sub-directory StorageEngine<String, String, String> storesEngine = new ConfigurationStorageEngine(MetadataStore.STORE_DEFINITIONS_STORE_NAME, storeDefDirPath); Store<String, String, String> innerStore = new ConfigurationStorageEngine(MetadataStore.METADATA_STORE_NAME, dir.getAbsolutePath()); return new MetadataStore(innerStore, storesEngine, nodeId); } @Override public String getName() { return METADATA_STORE_NAME; } @SuppressWarnings("unchecked") public void validate(ByteArray keyBytes, Versioned<byte[]> valueBytes, byte[] transforms) throws VoldemortException { String key = ByteUtils.getString(keyBytes.get(), "UTF-8"); Versioned<String> value = new Versioned<String>(ByteUtils.getString(valueBytes.getValue(), "UTF-8"), valueBytes.getVersion()); Versioned<Object> valueObject = convertStringToObject(key, value); if(key.equals(MetadataStore.STORES_KEY)) { List<StoreDefinition> storeDefinitions = (List<StoreDefinition>) valueObject.getValue(); Set<String> existingStores = new HashSet<String>(this.storeNames); Set<String> specifiedStoreNames = new HashSet<String>(); for(StoreDefinition storeDef: storeDefinitions) { String storeName = storeDef.getName(); if(specifiedStoreNames.contains(storeName)) { throw new VoldemortException(" Duplicate store names in Stores.xml for storeName " + storeName); } specifiedStoreNames.add(storeName); } existingStores.removeAll(specifiedStoreNames); // Theoretically, add or delete stores in set metadata should throw // an error. But for operations we use it from time to time to block // access to some stores by removing a store from metadata and // adding it back to allow again. if(existingStores.size() > 0) { logger.warn(" Set metadata does not support store deletion. This will leave the store in an " + "inconsistent state. Stores (Inconsistent) missing in set metadata " + Arrays.toString(existingStores.toArray())); } specifiedStoreNames.removeAll(this.storeNames); if(specifiedStoreNames.size() > 0) { logger.warn(" Set metadata does not support store addition . This will leave the store in an " + "inconsistent state. Stores (Inconsistent) added in set metadata " + Arrays.toString(specifiedStoreNames.toArray())); } } } /** * helper function to convert strings to bytes as needed. * * @param key * @param value */ @SuppressWarnings("unchecked") public void put(String key, Versioned<Object> value) { // acquire write lock writeLock.lock(); try { if(this.storeNames.contains(key) || key.equals(STORES_KEY)) { // Check for backwards compatibility List<StoreDefinition> storeDefinitions = (List<StoreDefinition>) value.getValue(); StoreDefinitionUtils.validateSchemasAsNeeded(storeDefinitions); // If the put is on the entire stores.xml key, delete the // additional stores which do not exist in the specified // stores.xml Set<String> storeNamesToDelete = new HashSet<String>(); for(String storeName: this.storeNames) { storeNamesToDelete.add(storeName); } // Add / update the list of store definitions specified in the // value StoreDefinitionsMapper mapper = new StoreDefinitionsMapper(); // Update the STORES directory and the corresponding entry in // metadata cache Set<String> specifiedStoreNames = new HashSet<String>(); for(StoreDefinition storeDef: storeDefinitions) { specifiedStoreNames.add(storeDef.getName()); String storeDefStr = mapper.writeStore(storeDef); Versioned<String> versionedValueStr = new Versioned<String>(storeDefStr, value.getVersion()); this.storeDefinitionsStorageEngine.put(storeDef.getName(), versionedValueStr, ""); // Update the metadata cache this.metadataCache.put(storeDef.getName(), new Versioned<Object>(storeDefStr, value.getVersion())); } if(key.equals(STORES_KEY)) { storeNamesToDelete.removeAll(specifiedStoreNames); resetStoreDefinitions(storeNamesToDelete); } // Re-initialize the store definitions initStoreDefinitions(value.getVersion()); // Update routing strategies updateRoutingStrategies(getCluster(), getStoreDefList()); } else if(METADATA_KEYS.contains(key)) { // try inserting into inner store first putInner(key, convertObjectToString(key, value)); // cache all keys if innerStore put succeeded metadataCache.put(key, value); // do special stuff if needed if(CLUSTER_KEY.equals(key)) { updateRoutingStrategies((Cluster) value.getValue(), getStoreDefList()); } else if(SYSTEM_STORES_KEY.equals(key)) throw new VoldemortException("Cannot overwrite system store definitions"); } else { throw new VoldemortException("Unhandled Key:" + key + " for MetadataStore put()"); } } finally { writeLock.unlock(); } } /** * Function to update store definitions. Unlike the put method, this * function does not delete any existing state. It only updates the state of * the stores specified in the given stores.xml * * @param valueBytes specifies the bytes of the stores.xml containing * updates for the specified stores */ @SuppressWarnings("unchecked") public void updateStoreDefinitions(Versioned<byte[]> valueBytes) { // acquire write lock writeLock.lock(); try { Versioned<String> value = new Versioned<String>(ByteUtils.getString(valueBytes.getValue(), "UTF-8"), valueBytes.getVersion()); Versioned<Object> valueObject = convertStringToObject(STORES_KEY, value); StoreDefinitionsMapper mapper = new StoreDefinitionsMapper(); List<StoreDefinition> storeDefinitions = (List<StoreDefinition>) valueObject.getValue(); // Check for backwards compatibility StoreDefinitionUtils.validateSchemasAsNeeded(storeDefinitions); // Go through each store definition and do a corresponding put for(StoreDefinition storeDef: storeDefinitions) { if(!this.storeNames.contains(storeDef.getName())) { throw new VoldemortException("Cannot update a store which does not exist !"); } String storeDefStr = mapper.writeStore(storeDef); Versioned<String> versionedValueStr = new Versioned<String>(storeDefStr, value.getVersion()); this.storeDefinitionsStorageEngine.put(storeDef.getName(), versionedValueStr, ""); // Update the metadata cache this.metadataCache.put(storeDef.getName(), new Versioned<Object>(storeDefStr, value.getVersion())); } // Re-initialize the store definitions initStoreDefinitions(value.getVersion()); // Update routing strategies // TODO: Make this more fine grained.. i.e only update listeners for // a specific store. updateRoutingStrategies(getCluster(), getStoreDefList()); } finally { writeLock.unlock(); } } /** * helper function to read current version and put() after incrementing it * for local node. * * @param key * @param value */ public void put(String key, Object value) { // acquire write lock writeLock.lock(); try { if(METADATA_KEYS.contains(key)) { VectorClock version = (VectorClock) get(key, null).get(0).getVersion(); put(key, new Versioned<Object>(value, version.incremented(getNodeId(), System.currentTimeMillis()))); } else { throw new VoldemortException("Unhandled Key:" + key + " for MetadataStore put()"); } } finally { writeLock.unlock(); } } /** * A write through put to inner-store. * * @param keyBytes : keyName strings serialized as bytes eg. 'cluster.xml' * @param valueBytes : versioned byte[] eg. UTF bytes for cluster xml * definitions * @throws VoldemortException */ @Override public void put(ByteArray keyBytes, Versioned<byte[]> valueBytes, byte[] transforms) throws VoldemortException { // acquire write lock writeLock.lock(); try { String key = ByteUtils.getString(keyBytes.get(), "UTF-8"); Versioned<String> value = new Versioned<String>(ByteUtils.getString(valueBytes.getValue(), "UTF-8"), valueBytes.getVersion()); Versioned<Object> valueObject = convertStringToObject(key, value); this.put(key, valueObject); } finally { writeLock.unlock(); } } @Override public void close() throws VoldemortException { innerStore.close(); } @Override public Object getCapability(StoreCapabilityType capability) { return innerStore.getCapability(capability); } /** * @param keyBytes : keyName strings serialized as bytes eg. 'cluster.xml' * @return List of values (only 1 for Metadata) versioned byte[] eg. UTF * bytes for cluster xml definitions * @throws VoldemortException */ @Override public List<Versioned<byte[]>> get(ByteArray keyBytes, byte[] transforms) throws VoldemortException { // acquire read lock readLock.lock(); try { // get a read lock this prevents any sort of interleaving\ // especially critical during reebalance when we set the new cluster // and store xml String key = ByteUtils.getString(keyBytes.get(), "UTF-8"); if(METADATA_KEYS.contains(key) || this.storeNames.contains(key)) { List<Versioned<byte[]>> values = Lists.newArrayList(); // Get the cached value and convert to string Versioned<String> value = convertObjectToString(key, metadataCache.get(key)); // Metadata debugging information if(logger.isTraceEnabled()) logger.trace("Key " + key + " requested, returning: " + value.getValue()); values.add(new Versioned<byte[]>(ByteUtils.getBytes(value.getValue(), "UTF-8"), value.getVersion())); return values; } else { throw new VoldemortException("Unhandled Key:" + key + " for MetadataStore get()"); } } catch(Exception e) { throw new VoldemortException("Failed to read metadata key:" + ByteUtils.getString(keyBytes.get(), "UTF-8") + " delete config/.temp config/.version directories and restart.", e); } finally { readLock.unlock(); } } public List<Versioned<byte[]>> get(String key, String transforms) throws VoldemortException { // acquire read lock readLock.lock(); try { return get(new ByteArray(ByteUtils.getBytes(key, "UTF-8")), transforms == null ? null : ByteUtils.getBytes(transforms, "UTF-8")); } finally { readLock.unlock(); } } @JmxOperation(description = "Clean all rebalancing server/cluster states from this node.", impact = MBeanOperationInfo.ACTION) public void cleanAllRebalancingState() { // acquire write lock writeLock.lock(); try { for(String key: OPTIONAL_KEYS) { if(!key.equals(NODE_ID_KEY)) innerStore.delete(key, getVersions(new ByteArray(ByteUtils.getBytes(key, "UTF-8"))).get(0)); } init(getNodeId()); } finally { writeLock.unlock(); } } @Override public List<Version> getVersions(ByteArray key) { // acquire read lock readLock.lock(); try { List<Versioned<byte[]>> values = get(key, null); List<Version> versions = new ArrayList<Version>(values.size()); for(Versioned<?> value: values) { versions.add(value.getVersion()); } return versions; } finally { readLock.unlock(); } } public Cluster getCluster() { // acquire read lock readLock.lock(); try { return (Cluster) metadataCache.get(CLUSTER_KEY).getValue(); } finally { readLock.unlock(); } } @SuppressWarnings("unchecked") public List<StoreDefinition> getStoreDefList() { // acquire read lock readLock.lock(); try { return (List<StoreDefinition>) metadataCache.get(STORES_KEY).getValue(); } finally { readLock.unlock(); } } @SuppressWarnings("unchecked") public List<StoreDefinition> getSystemStoreDefList() { // acquire read lock readLock.lock(); try { return (List<StoreDefinition>) metadataCache.get(SYSTEM_STORES_KEY).getValue(); } finally { readLock.unlock(); } } public int getNodeId() { // acquire read lock readLock.lock(); try { return (Integer) (metadataCache.get(NODE_ID_KEY).getValue()); } finally { readLock.unlock(); } } public StoreDefinition getStoreDef(String storeName) { // acquire read lock readLock.lock(); try { List<StoreDefinition> storeDefs = getStoreDefList(); for(StoreDefinition storeDef: storeDefs) { if(storeDef.getName().equals(storeName)) return storeDef; } throw new VoldemortException("Store " + storeName + " not found in MetadataStore"); } finally { readLock.unlock(); } } public VoldemortState getServerStateLocked() { // acquire read lock readLock.lock(); try { return VoldemortState.valueOf(metadataCache.get(SERVER_STATE_KEY).getValue().toString()); } finally { readLock.unlock(); } } public VoldemortState getServerStateUnlocked() { return VoldemortState.valueOf(metadataCache.get(SERVER_STATE_KEY).getValue().toString()); } public boolean getSlopStreamingEnabledLocked() { // acquire read lock readLock.lock(); try { return Boolean.parseBoolean(metadataCache.get(SLOP_STREAMING_ENABLED_KEY) .getValue() .toString()); } finally { readLock.unlock(); } } public boolean getSlopStreamingEnabledUnlocked() { return Boolean.parseBoolean(metadataCache.get(SLOP_STREAMING_ENABLED_KEY) .getValue() .toString()); } public boolean getPartitionStreamingEnabledLocked() { // acquire read lock readLock.lock(); try { return Boolean.parseBoolean(metadataCache.get(PARTITION_STREAMING_ENABLED_KEY) .getValue() .toString()); } finally { readLock.unlock(); } } public boolean getPartitionStreamingEnabledUnlocked() { return Boolean.parseBoolean(metadataCache.get(PARTITION_STREAMING_ENABLED_KEY) .getValue() .toString()); } public boolean getReadOnlyFetchEnabledLocked() { // acquire read lock readLock.lock(); try { return Boolean.parseBoolean(metadataCache.get(READONLY_FETCH_ENABLED_KEY) .getValue() .toString()); } finally { readLock.unlock(); } } public boolean getReadOnlyFetchEnabledUnlocked() { return Boolean.parseBoolean(metadataCache.get(READONLY_FETCH_ENABLED_KEY) .getValue() .toString()); } public boolean getQuotaEnforcingEnabledLocked() { // acquire read lock readLock.lock(); try { return Boolean.parseBoolean(metadataCache.get(QUOTA_ENFORCEMENT_ENABLED_KEY) .getValue() .toString()); } finally { readLock.unlock(); } } public boolean getQuotaEnforcingEnabledUnlocked() { return Boolean.parseBoolean(metadataCache.get(QUOTA_ENFORCEMENT_ENABLED_KEY) .getValue() .toString()); } public RebalancerState getRebalancerState() { // acquire read lock readLock.lock(); try { return (RebalancerState) metadataCache.get(REBALANCING_STEAL_INFO).getValue(); } finally { readLock.unlock(); } } public Cluster getRebalancingSourceCluster() { // acquire read lock readLock.lock(); try { return (Cluster) metadataCache.get(REBALANCING_SOURCE_CLUSTER_XML).getValue(); } finally { readLock.unlock(); } } @SuppressWarnings("unchecked") public List<StoreDefinition> getRebalancingSourceStores() { // acquire read lock readLock.lock(); try { return (List<StoreDefinition>) metadataCache.get(REBALANCING_SOURCE_STORES_XML) .getValue(); } finally { readLock.unlock(); } } /* * First check in the map of regular stores. If not present, check in the * system stores map. */ @SuppressWarnings("unchecked") public RoutingStrategy getRoutingStrategy(String storeName) { // acquire read lock readLock.lock(); try { Map<String, RoutingStrategy> routingStrategyMap = (Map<String, RoutingStrategy>) metadataCache.get(ROUTING_STRATEGY_KEY) .getValue(); RoutingStrategy strategy = routingStrategyMap.get(storeName); if(strategy == null) { Map<String, RoutingStrategy> systemRoutingStrategyMap = (Map<String, RoutingStrategy>) metadataCache.get(SYSTEM_ROUTING_STRATEGY_KEY) .getValue(); strategy = systemRoutingStrategyMap.get(storeName); } return strategy; } finally { readLock.unlock(); } } /** * Returns the list of store defs as a map * * @param storeDefs * @return */ private HashMap<String, StoreDefinition> makeStoreDefinitionMap(List<StoreDefinition> storeDefs) { HashMap<String, StoreDefinition> storeDefMap = new HashMap<String, StoreDefinition>(); for(StoreDefinition storeDef: storeDefs) storeDefMap.put(storeDef.getName(), storeDef); return storeDefMap; } /** * Changes to cluster OR store definition metadata results in routing * strategies changing. These changes need to be propagated to all the * listeners. * * @param cluster The updated cluster metadata * @param storeDefs The updated list of store definition */ private void updateRoutingStrategies(Cluster cluster, List<StoreDefinition> storeDefs) { // acquire write lock writeLock.lock(); try { VectorClock clock = new VectorClock(); if(metadataCache.containsKey(ROUTING_STRATEGY_KEY)) clock = (VectorClock) metadataCache.get(ROUTING_STRATEGY_KEY).getVersion(); logger.info("Updating routing strategy for all stores"); HashMap<String, StoreDefinition> storeDefMap = makeStoreDefinitionMap(storeDefs); HashMap<String, RoutingStrategy> routingStrategyMap = createRoutingStrategyMap(cluster, storeDefMap); this.metadataCache.put(ROUTING_STRATEGY_KEY, new Versioned<Object>(routingStrategyMap, clock.incremented(getNodeId(), System.currentTimeMillis()))); for(String storeName: storeNameTolisteners.keySet()) { RoutingStrategy updatedRoutingStrategy = routingStrategyMap.get(storeName); if(updatedRoutingStrategy != null) { try { for(MetadataStoreListener listener: storeNameTolisteners.get(storeName)) { listener.updateRoutingStrategy(updatedRoutingStrategy); listener.updateStoreDefinition(storeDefMap.get(storeName)); } } catch(Exception e) { if(logger.isEnabledFor(Level.WARN)) logger.warn(e, e); } } } } finally { writeLock.unlock(); } } /* * Initialize the routing strategy map for system stores. This is used * during get / put on system stores. */ private void initSystemRoutingStrategies(Cluster cluster) { HashMap<String, RoutingStrategy> routingStrategyMap = createRoutingStrategyMap(cluster, makeStoreDefinitionMap(getSystemStoreDefList())); this.metadataCache.put(SYSTEM_ROUTING_STRATEGY_KEY, new Versioned<Object>(routingStrategyMap)); } /** * Add the steal information to the rebalancer state * * @param stealInfo The steal information to add */ public void addRebalancingState(final RebalanceTaskInfo stealInfo) { // acquire write lock writeLock.lock(); try { // Move into rebalancing state if(ByteUtils.getString(get(SERVER_STATE_KEY, null).get(0).getValue(), "UTF-8") .compareTo(VoldemortState.NORMAL_SERVER.toString()) == 0) { put(SERVER_STATE_KEY, VoldemortState.REBALANCING_MASTER_SERVER); initCache(SERVER_STATE_KEY); } // Add the steal information RebalancerState rebalancerState = getRebalancerState(); if(!rebalancerState.update(stealInfo)) { throw new VoldemortException("Could not add steal information " + stealInfo + " since a plan for the same donor node " + stealInfo.getDonorId() + " ( " + rebalancerState.find(stealInfo.getDonorId()) + " ) already exists"); } put(MetadataStore.REBALANCING_STEAL_INFO, rebalancerState); initCache(REBALANCING_STEAL_INFO); } finally { writeLock.unlock(); } } /** * Delete the partition steal information from the rebalancer state * * @param stealInfo The steal information to delete */ public void deleteRebalancingState(RebalanceTaskInfo stealInfo) { // acquire write lock writeLock.lock(); try { RebalancerState rebalancerState = getRebalancerState(); if(!rebalancerState.remove(stealInfo)) throw new IllegalArgumentException("Couldn't find " + stealInfo + " in " + rebalancerState + " while deleting"); if(rebalancerState.isEmpty()) { logger.debug("Cleaning all rebalancing state"); cleanAllRebalancingState(); } else { put(REBALANCING_STEAL_INFO, rebalancerState); initCache(REBALANCING_STEAL_INFO); } } finally { writeLock.unlock(); } } /** * change server state between OFFLINE_SERVER and NORMAL_SERVER * * @param enabled True if set to OFFLINE_SERVER */ public void setOfflineState(boolean setToOffline) { // acquire write lock writeLock.lock(); try { String currentState = ByteUtils.getString(get(SERVER_STATE_KEY, null).get(0).getValue(), "UTF-8"); if(setToOffline) { // from NORMAL_SERVER to OFFLINE_SERVER if(currentState.equals(VoldemortState.NORMAL_SERVER.toString())) { put(SERVER_STATE_KEY, VoldemortState.OFFLINE_SERVER); initCache(SERVER_STATE_KEY); put(SLOP_STREAMING_ENABLED_KEY, false); initCache(SLOP_STREAMING_ENABLED_KEY); put(PARTITION_STREAMING_ENABLED_KEY, false); initCache(PARTITION_STREAMING_ENABLED_KEY); put(READONLY_FETCH_ENABLED_KEY, false); initCache(READONLY_FETCH_ENABLED_KEY); } else if(currentState.equals(VoldemortState.OFFLINE_SERVER.toString())) { logger.warn("Already in OFFLINE_SERVER state."); return; } else { logger.error("Cannot enter OFFLINE_SERVER state from " + currentState); throw new VoldemortException("Cannot enter OFFLINE_SERVER state from " + currentState); } } else { // from OFFLINE_SERVER to NORMAL_SERVER if(currentState.equals(VoldemortState.NORMAL_SERVER.toString())) { logger.warn("Already in NORMAL_SERVER state."); return; } else if(currentState.equals(VoldemortState.OFFLINE_SERVER.toString())) { put(SERVER_STATE_KEY, VoldemortState.NORMAL_SERVER); initCache(SERVER_STATE_KEY); put(SLOP_STREAMING_ENABLED_KEY, true); initCache(SLOP_STREAMING_ENABLED_KEY); put(PARTITION_STREAMING_ENABLED_KEY, true); initCache(PARTITION_STREAMING_ENABLED_KEY); put(READONLY_FETCH_ENABLED_KEY, true); initCache(READONLY_FETCH_ENABLED_KEY); init(getNodeId()); } else { logger.error("Cannot enter NORMAL_SERVER state from " + currentState); throw new VoldemortException("Cannot enter NORMAL_SERVER state from " + currentState); } } } finally { writeLock.unlock(); } } /** * Function to add a new Store to the Metadata store. This involves * * 1. Create a new entry in the ConfigurationStorageEngine for STORES. * * 2. Update the metadata cache. * * 3. Re-create the 'stores.xml' key * * @param storeDef defines the new store to be created */ public void addStoreDefinition(StoreDefinition storeDef) { // acquire write lock writeLock.lock(); try { // Check if store already exists if(this.storeNames.contains(storeDef.getName())) { throw new VoldemortException("Store already exists !"); } // Check for backwards compatibility StoreDefinitionUtils.validateSchemaAsNeeded(storeDef); // Otherwise add to the STORES directory StoreDefinitionsMapper mapper = new StoreDefinitionsMapper(); String storeDefStr = mapper.writeStore(storeDef); Versioned<String> versionedValueStr = new Versioned<String>(storeDefStr); this.storeDefinitionsStorageEngine.put(storeDef.getName(), versionedValueStr, null); // Update the metadata cache this.metadataCache.put(storeDef.getName(), new Versioned<Object>(storeDefStr)); // Re-initialize the store definitions. This is primarily required // to re-create the value for key: 'stores.xml'. This is necessary // for backwards compatibility. initStoreDefinitions(null); updateRoutingStrategies(getCluster(), getStoreDefList()); } finally { writeLock.unlock(); } } /** * Function to delete the specified store from Metadata store. This involves * * 1. Remove entry from the ConfigurationStorageEngine for STORES. * * 2. Update the metadata cache. * * 3. Re-create the 'stores.xml' key * * @param storeName specifies name of the store to be deleted. */ public void deleteStoreDefinition(String storeName) { // acquire write lock writeLock.lock(); try { // Check if store exists if(!this.storeNames.contains(storeName)) { throw new VoldemortException("Requested store to be deleted does not exist !"); } // Otherwise remove from the STORES directory. Note: The version // argument is not required here since the // ConfigurationStorageEngine simply ignores this. this.storeDefinitionsStorageEngine.delete(storeName, null); // Update the metadata cache this.metadataCache.remove(storeName); // Re-initialize the store definitions. This is primarily required // to re-create the value for key: 'stores.xml'. This is necessary // for backwards compatibility. initStoreDefinitions(null); } finally { writeLock.unlock(); } } @Override public ClosableIterator<Pair<ByteArray, Versioned<byte[]>>> entries() { throw new VoldemortException("You cannot iterate over all entries in Metadata"); } @Override public ClosableIterator<ByteArray> keys() { throw new VoldemortException("You cannot iterate over all keys in Metadata"); } @Override public ClosableIterator<Pair<ByteArray, Versioned<byte[]>>> entries(int partition) { throw new UnsupportedOperationException("Partition based entries scan not supported for this storage type"); } @Override public ClosableIterator<ByteArray> keys(int partition) { throw new UnsupportedOperationException("Partition based key scan not supported for this storage type"); } @Override public void truncate() { throw new VoldemortException("You cannot truncate entries in Metadata"); } @Override public boolean delete(ByteArray key, Version version) throws VoldemortException { throw new VoldemortException("You cannot delete your metadata fool !!"); } @Override public Map<ByteArray, List<Versioned<byte[]>>> getAll(Iterable<ByteArray> keys, Map<ByteArray, byte[]> transforms) throws VoldemortException { // acquire read lock readLock.lock(); try { StoreUtils.assertValidKeys(keys); return StoreUtils.getAll(this, keys, transforms); } finally { readLock.unlock(); } } /** * Utility function to validate if the given store name exists in the store * name list managed by MetadataStore. This is used by the Admin service for * validation before serving a get-metadata request. * * @param name Name of the store to validate * @return True if the store name exists in the 'storeNames' list. False * otherwise. */ public boolean isValidStore(String name) { readLock.lock(); try { if(this.storeNames.contains(name)) { return true; } return false; } finally { readLock.unlock(); } } /** * Initializes the metadataCache for MetadataStore */ private void init(int nodeId) { logger.info("metadata init()."); writeLock.lock(); // Required keys initCache(CLUSTER_KEY); // If stores definition storage engine is not null, initialize metadata // Add the mapping from key to the storage engine used if(this.storeDefinitionsStorageEngine != null) { initStoreDefinitions(null); } else { initCache(STORES_KEY); } // Initialize system store in the metadata cache initSystemCache(); initSystemRoutingStrategies(getCluster()); initCache(NODE_ID_KEY, nodeId); if(getNodeId() != nodeId) throw new RuntimeException("Attempt to start previous node:" + getNodeId() + " as node:" + nodeId + " (Did you copy config directory ? try deleting .temp .version in config dir to force clean) aborting ..."); // Initialize with default if not present initCache(SLOP_STREAMING_ENABLED_KEY, true); initCache(PARTITION_STREAMING_ENABLED_KEY, true); initCache(READONLY_FETCH_ENABLED_KEY, true); initCache(QUOTA_ENFORCEMENT_ENABLED_KEY, true); initCache(REBALANCING_STEAL_INFO, new RebalancerState(new ArrayList<RebalanceTaskInfo>())); initCache(SERVER_STATE_KEY, VoldemortState.NORMAL_SERVER.toString()); initCache(REBALANCING_SOURCE_CLUSTER_XML, null); initCache(REBALANCING_SOURCE_STORES_XML, null); // set transient values updateRoutingStrategies(getCluster(), getStoreDefList()); writeLock.unlock(); } /** * Function to go through all the store definitions contained in the STORES * directory and * * 1. Update metadata cache. * * 2. Update STORES_KEY by stitching together all these keys. * * 3. Update 'storeNames' list. * * This method is not thread safe. It is expected that the caller of this * method will correctly handle concurrency issues. Currently this is not an * issue since its invoked by init, put, add and delete store all of which * use locks to deal with any concurrency related issues. */ private void initStoreDefinitions(Version storesXmlVersion) { if(this.storeDefinitionsStorageEngine == null) { throw new VoldemortException("The store definitions directory is empty"); } String allStoreDefinitions = "<stores>"; Version finalStoresXmlVersion = null; if(storesXmlVersion != null) { finalStoresXmlVersion = storesXmlVersion; } this.storeNames.clear(); ClosableIterator<Pair<String, Versioned<String>>> storesIterator = this.storeDefinitionsStorageEngine.entries(); // Some test setups may result in duplicate entries for 'store' element. // Do the de-dup here Map<String, Versioned<String>> storeNameToDefMap = new HashMap<String, Versioned<String>>(); Version maxVersion = null; while(storesIterator.hasNext()) { Pair<String, Versioned<String>> storeDetail = storesIterator.next(); String storeName = storeDetail.getFirst(); Versioned<String> versionedStoreDef = storeDetail.getSecond(); storeNameToDefMap.put(storeName, versionedStoreDef); Version curVersion = versionedStoreDef.getVersion(); // Get the highest version from all the store entries if(maxVersion == null) { maxVersion = curVersion; } else if(maxVersion.compare(curVersion) == Occurred.BEFORE) { maxVersion = curVersion; } } // If the specified version is null, assign highest Version to // 'stores.xml' key if(finalStoresXmlVersion == null) { finalStoresXmlVersion = maxVersion; } // Go through all the individual stores and update metadata for(Entry<String, Versioned<String>> storeEntry: storeNameToDefMap.entrySet()) { String storeName = storeEntry.getKey(); Versioned<String> versionedStoreDef = storeEntry.getValue(); // Add all the store names to the list of storeNames this.storeNames.add(storeName); this.metadataCache.put(storeName, new Versioned<Object>(versionedStoreDef.getValue(), versionedStoreDef.getVersion())); } Collections.sort(this.storeNames); for(String storeName: this.storeNames) { Versioned<String> versionedStoreDef = storeNameToDefMap.get(storeName); // Stitch together to form the complete store definition list. allStoreDefinitions += versionedStoreDef.getValue(); } allStoreDefinitions += "</stores>"; // Update cache with the composite store definition list. metadataCache.put(STORES_KEY, convertStringToObject(STORES_KEY, new Versioned<String>(allStoreDefinitions, finalStoresXmlVersion))); } /** * Function to clear all the metadata related to the given store * definitions. This is needed when a put on 'stores.xml' is called, thus * replacing the existing state. * * This method is not thread safe. It is expected that the caller of this * method will handle concurrency related issues. * * @param storeNamesToDelete */ private void resetStoreDefinitions(Set<String> storeNamesToDelete) { // Clear entries in the metadata cache for(String storeName: storeNamesToDelete) { this.metadataCache.remove(storeName); this.storeDefinitionsStorageEngine.delete(storeName, null); this.storeNames.remove(storeName); } } private synchronized void initCache(String key) { metadataCache.put(key, convertStringToObject(key, getInnerValue(key))); } // Initialize the metadata cache with system store list private synchronized void initSystemCache() { List<StoreDefinition> value = storeMapper.readStoreList(new StringReader(SystemStoreConstants.SYSTEM_STORE_SCHEMA)); metadataCache.put(SYSTEM_STORES_KEY, new Versioned<Object>(value)); } private void initCache(String key, Object defaultValue) { try { initCache(key); } catch(Exception e) { // put default value if failed to init this.put(key, new Versioned<Object>(defaultValue)); } } private HashMap<String, RoutingStrategy> createRoutingStrategyMap(Cluster cluster, HashMap<String, StoreDefinition> storeDefs) { HashMap<String, RoutingStrategy> map = new HashMap<String, RoutingStrategy>(); for(StoreDefinition store: storeDefs.values()) { map.put(store.getName(), routingFactory.updateRoutingStrategy(store, cluster)); } // add metadata Store route to ALL routing strategy. map.put(METADATA_STORE_NAME, new RouteToAllStrategy(getCluster().getNodesShuffled())); return map; } /** * Converts Object to byte[] depending on the key * <p> * StoreRepository takes only StorageEngine<ByteArray,byte[]> and for * persistence on disk we need to convert them to String.<br> * * @param key * @param value * @return */ @SuppressWarnings("unchecked") private Versioned<String> convertObjectToString(String key, Versioned<Object> value) { String valueStr = ""; if(CLUSTER_KEY.equals(key)) { valueStr = clusterMapper.writeCluster((Cluster) value.getValue()); } else if(STORES_KEY.equals(key)) { valueStr = storeMapper.writeStoreList((List<StoreDefinition>) value.getValue()); } else if(REBALANCING_STEAL_INFO.equals(key)) { RebalancerState rebalancerState = (RebalancerState) value.getValue(); valueStr = rebalancerState.toJsonString(); } else if(SERVER_STATE_KEY.equals(key) || NODE_ID_KEY.equals(key) || SLOP_STREAMING_ENABLED_KEY.equals(key) || PARTITION_STREAMING_ENABLED_KEY.equals(key) || READONLY_FETCH_ENABLED_KEY.equals(key) || QUOTA_ENFORCEMENT_ENABLED_KEY.equals(key)) { valueStr = value.getValue().toString(); } else if(REBALANCING_SOURCE_CLUSTER_XML.equals(key)) { if(value.getValue() != null) { valueStr = clusterMapper.writeCluster((Cluster) value.getValue()); } } else if(REBALANCING_SOURCE_STORES_XML.equals(key)) { if(value.getValue() != null) { valueStr = storeMapper.writeStoreList((List<StoreDefinition>) value.getValue()); } } else if(this.storeNames.contains(key)) { valueStr = "<stores>"; if(value.getValue() != null) { valueStr += value.getValue(); } valueStr += "</stores>"; } else { throw new VoldemortException("Unhandled key:'" + key + "' for Object to String serialization."); } return new Versioned<String>(valueStr, value.getVersion()); } /** * convert Object to String depending on key. * <p> * StoreRepository takes only StorageEngine<ByteArray,byte[]> and for * persistence on disk we need to convert them to String.<br> * * @param key * @param value * @return */ private Versioned<Object> convertStringToObject(String key, Versioned<String> value) { Object valueObject = null; if(CLUSTER_KEY.equals(key)) { valueObject = clusterMapper.readCluster(new StringReader(value.getValue())); } else if(STORES_KEY.equals(key)) { valueObject = storeMapper.readStoreList(new StringReader(value.getValue())); } else if(SERVER_STATE_KEY.equals(key)) { valueObject = VoldemortState.valueOf(value.getValue()); } else if(NODE_ID_KEY.equals(key)) { valueObject = Integer.parseInt(value.getValue()); } else if(SLOP_STREAMING_ENABLED_KEY.equals(key) || PARTITION_STREAMING_ENABLED_KEY.equals(key) || READONLY_FETCH_ENABLED_KEY.equals(key) || QUOTA_ENFORCEMENT_ENABLED_KEY.equals(key)) { valueObject = Boolean.parseBoolean(value.getValue()); } else if(REBALANCING_STEAL_INFO.equals(key)) { String valueString = value.getValue(); if(valueString.startsWith("[")) { valueObject = RebalancerState.create(valueString); } else { valueObject = new RebalancerState(Arrays.asList(RebalanceTaskInfo.create(valueString))); } } else if(REBALANCING_SOURCE_CLUSTER_XML.equals(key)) { if(value.getValue() != null && value.getValue().length() > 0) { valueObject = clusterMapper.readCluster(new StringReader(value.getValue())); } } else if(REBALANCING_SOURCE_STORES_XML.equals(key)) { if(value.getValue() != null && value.getValue().length() > 0) { valueObject = storeMapper.readStoreList(new StringReader(value.getValue())); } } else { throw new VoldemortException("Unhandled key:'" + key + "' for String to Object serialization."); } return new Versioned<Object>(valueObject, value.getVersion()); } private void putInner(String key, Versioned<String> value) { innerStore.put(key, value, null); } private Versioned<String> getInnerValue(String key) throws VoldemortException { List<Versioned<String>> values = innerStore.get(key, null); if(values.size() > 1) throw new VoldemortException("Inconsistent metadata found: expected 1 version but found " + values.size() + " for key:" + key); if(values.size() > 0) return values.get(0); throw new VoldemortException("No metadata found for required key:" + key); } }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.util.resource; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.nio.channels.ReadableByteChannel; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.StringTokenizer; import org.eclipse.jetty.util.URIUtil; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /** * A collection of resources (dirs). * Allows webapps to have multiple (static) sources. * The first resource in the collection is the main resource. * If a resource is not found in the main resource, it looks it up in * the order the resources were constructed. * * * */ public class ResourceCollection extends Resource { private static final Logger LOG = Log.getLogger(ResourceCollection.class); private Resource[] _resources; /* ------------------------------------------------------------ */ /** * Instantiates an empty resource collection. * * This constructor is used when configuring jetty-maven-plugin. */ public ResourceCollection() { _resources = new Resource[0]; } /* ------------------------------------------------------------ */ /** * Instantiates a new resource collection. * * @param resources the resources to be added to collection */ public ResourceCollection(Resource... resources) { List<Resource> list = new ArrayList<Resource>(); for (Resource r : resources) { if (r==null) continue; if (r instanceof ResourceCollection) { for (Resource r2 : ((ResourceCollection)r).getResources()) list.add(r2); } else list.add(r); } _resources = list.toArray(new Resource[list.size()]); for(Resource r : _resources) { if(!r.exists() || !r.isDirectory()) throw new IllegalArgumentException(r + " is not an existing directory."); } } /* ------------------------------------------------------------ */ /** * Instantiates a new resource collection. * * @param resources the resource strings to be added to collection */ public ResourceCollection(String[] resources) { _resources = new Resource[resources.length]; try { for(int i=0; i<resources.length; i++) { _resources[i] = Resource.newResource(resources[i]); if(!_resources[i].exists() || !_resources[i].isDirectory()) throw new IllegalArgumentException(_resources[i] + " is not an existing directory."); } } catch(IllegalArgumentException e) { throw e; } catch(Exception e) { throw new RuntimeException(e); } } /* ------------------------------------------------------------ */ /** * Instantiates a new resource collection. * * @param csvResources the string containing comma-separated resource strings */ public ResourceCollection(String csvResources) { setResourcesAsCSV(csvResources); } /* ------------------------------------------------------------ */ /** * Retrieves the resource collection's resources. * * @return the resource array */ public Resource[] getResources() { return _resources; } /* ------------------------------------------------------------ */ /** * Sets the resource collection's resources. * * @param resources the new resource array */ public void setResources(Resource[] resources) { _resources = resources != null ? resources : new Resource[0]; } /* ------------------------------------------------------------ */ /** * Sets the resources as string of comma-separated values. * This method should be used when configuring jetty-maven-plugin. * * @param csvResources the comma-separated string containing * one or more resource strings. */ public void setResourcesAsCSV(String csvResources) { StringTokenizer tokenizer = new StringTokenizer(csvResources, ",;"); int len = tokenizer.countTokens(); if(len==0) { throw new IllegalArgumentException("ResourceCollection@setResourcesAsCSV(String) " + " argument must be a string containing one or more comma-separated resource strings."); } List<Resource> resources = new ArrayList<>(); try { while(tokenizer.hasMoreTokens()) { Resource resource = Resource.newResource(tokenizer.nextToken().trim()); if(!resource.exists() || !resource.isDirectory()) LOG.warn(" !exist "+resource); else resources.add(resource); } } catch(Exception e) { throw new RuntimeException(e); } _resources = resources.toArray(new Resource[resources.size()]); } /* ------------------------------------------------------------ */ /** * @param path The path segment to add * @return The contained resource (found first) in the collection of resources */ @Override public Resource addPath(String path) throws IOException, MalformedURLException { if(_resources==null) throw new IllegalStateException("*resources* not set."); if(path==null) throw new MalformedURLException(); if(path.length()==0 || URIUtil.SLASH.equals(path)) return this; Resource resource=null; ArrayList<Resource> resources = null; int i=0; for(; i<_resources.length; i++) { resource = _resources[i].addPath(path); if (resource.exists()) { if (resource.isDirectory()) break; return resource; } } for(i++; i<_resources.length; i++) { Resource r = _resources[i].addPath(path); if (r.exists() && r.isDirectory()) { if (resources==null) resources = new ArrayList<Resource>(); if (resource!=null) { resources.add(resource); resource=null; } resources.add(r); } } if (resource!=null) return resource; if (resources!=null) return new ResourceCollection(resources.toArray(new Resource[resources.size()])); return null; } /* ------------------------------------------------------------ */ /** * @param path * @return the resource(file) if found, returns a list of resource dirs if its a dir, else null. * @throws IOException * @throws MalformedURLException */ protected Object findResource(String path) throws IOException, MalformedURLException { Resource resource=null; ArrayList<Resource> resources = null; int i=0; for(; i<_resources.length; i++) { resource = _resources[i].addPath(path); if (resource.exists()) { if (resource.isDirectory()) break; return resource; } } for(i++; i<_resources.length; i++) { Resource r = _resources[i].addPath(path); if (r.exists() && r.isDirectory()) { if (resource!=null) { resources = new ArrayList<Resource>(); resources.add(resource); } resources.add(r); } } if (resource!=null) return resource; if (resources!=null) return resources; return null; } /* ------------------------------------------------------------ */ @Override public boolean delete() throws SecurityException { throw new UnsupportedOperationException(); } /* ------------------------------------------------------------ */ @Override public boolean exists() { if(_resources==null) throw new IllegalStateException("*resources* not set."); return true; } /* ------------------------------------------------------------ */ @Override public File getFile() throws IOException { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { File f = r.getFile(); if(f!=null) return f; } return null; } /* ------------------------------------------------------------ */ @Override public InputStream getInputStream() throws IOException { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { InputStream is = r.getInputStream(); if(is!=null) return is; } return null; } /* ------------------------------------------------------------ */ @Override public ReadableByteChannel getReadableByteChannel() throws IOException { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { ReadableByteChannel channel = r.getReadableByteChannel(); if(channel!=null) return channel; } return null; } /* ------------------------------------------------------------ */ @Override public String getName() { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { String name = r.getName(); if(name!=null) return name; } return null; } /* ------------------------------------------------------------ */ @Override public URL getURL() { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { URL url = r.getURL(); if(url!=null) return url; } return null; } /* ------------------------------------------------------------ */ @Override public boolean isDirectory() { if(_resources==null) throw new IllegalStateException("*resources* not set."); return true; } /* ------------------------------------------------------------ */ @Override public long lastModified() { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) { long lm = r.lastModified(); if (lm!=-1) return lm; } return -1; } /* ------------------------------------------------------------ */ @Override public long length() { return -1; } /* ------------------------------------------------------------ */ /** * @return The list of resource names(merged) contained in the collection of resources. */ @Override public String[] list() { if(_resources==null) throw new IllegalStateException("*resources* not set."); HashSet<String> set = new HashSet<String>(); for(Resource r : _resources) { for(String s : r.list()) set.add(s); } String[] result=set.toArray(new String[set.size()]); Arrays.sort(result); return result; } /* ------------------------------------------------------------ */ @Override public void close() { if(_resources==null) throw new IllegalStateException("*resources* not set."); for(Resource r : _resources) r.close(); } /* ------------------------------------------------------------ */ @Override public boolean renameTo(Resource dest) throws SecurityException { throw new UnsupportedOperationException(); } /* ------------------------------------------------------------ */ @Override public void copyTo(File destination) throws IOException { for (int r=_resources.length;r-->0;) _resources[r].copyTo(destination); } /* ------------------------------------------------------------ */ /** * @return the list of resources separated by a path separator */ @Override public String toString() { if(_resources==null) return "[]"; return String.valueOf(Arrays.asList(_resources)); } /* ------------------------------------------------------------ */ @Override public boolean isContainedIn(Resource r) throws MalformedURLException { // TODO could look at implementing the semantic of is this collection a subset of the Resource r? return false; } }
package net.wolfesoftware.jax.parsing; import java.lang.reflect.Field; import java.util.*; import net.wolfesoftware.jax.ast.*; import net.wolfesoftware.jax.tokenization.Lang; public abstract class ExpressionOperator { public final int leftPrecedence; public final String text; public final int rightPrecedence; public ExpressionOperator(int leftPrecedence, String text, int rightPrecedence) { this.leftPrecedence = leftPrecedence; this.text = text; this.rightPrecedence = rightPrecedence; } public abstract ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression); public String toString() { return text; } private static final int PRECEDENCE_DEREFERENCE = 160, PRECEDENCE_POSTFIX = 140, PRECEDENCE_UNARY = 130, PRECEDENCE_MULTIPLICATIVE = 120, PRECEDENCE_ADDITIVE = 110, PRECEDENCE_RELATIONAL = 90, PRECEDENCE_EQUALITY = 80, PRECEDENCE_LOGICAL_AND = 40, PRECEDENCE_LOGICAL_OR = 30, PRECEDENCE_TERNARY = 20, PRECEDENCE_ASSIGNMENT = 10, PRECEDENCE_LOWEST = 1; /* Operation */ public static final ExpressionOperator addition = new ExpressionOperator(PRECEDENCE_ADDITIVE, Lang.SYMBOL_PLUS, PRECEDENCE_ADDITIVE + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Addition(leftExpression, rightExpression); } }; public static final ExpressionOperator subtraction = new ExpressionOperator(PRECEDENCE_ADDITIVE, Lang.SYMBOL_MINUS, PRECEDENCE_ADDITIVE + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Subtraction(leftExpression, rightExpression); } }; public static final ExpressionOperator multiplication = new ExpressionOperator(PRECEDENCE_MULTIPLICATIVE, Lang.SYMBOL_ASTERISK, PRECEDENCE_MULTIPLICATIVE + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Multiplication(leftExpression, rightExpression); } }; public static final ExpressionOperator division = new ExpressionOperator(PRECEDENCE_MULTIPLICATIVE, Lang.SYMBOL_SLASH, PRECEDENCE_MULTIPLICATIVE + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Division(leftExpression, rightExpression); } }; public static final ExpressionOperator preIncrement = new ExpressionOperator(-1, Lang.SYMBOL_PLUS_PLUS, PRECEDENCE_UNARY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousPreIncrementDecrement(Lang.SYMBOL_PLUS_PLUS, rightExpression); } }; public static final ExpressionOperator preDecrement = new ExpressionOperator(-1, Lang.SYMBOL_MINUS_MINUS, PRECEDENCE_UNARY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousPreIncrementDecrement(Lang.SYMBOL_MINUS_MINUS, rightExpression); } }; public static final ExpressionOperator postIncrement = new ExpressionOperator(PRECEDENCE_POSTFIX, Lang.SYMBOL_PLUS_PLUS, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousPostIncrementDecrement(leftExpression, Lang.SYMBOL_PLUS_PLUS); } }; public static final ExpressionOperator postDecrement = new ExpressionOperator(PRECEDENCE_POSTFIX, Lang.SYMBOL_MINUS_MINUS, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousPostIncrementDecrement(leftExpression, Lang.SYMBOL_MINUS_MINUS); } }; public static final ExpressionOperator lessThan = new ExpressionOperator(PRECEDENCE_RELATIONAL, Lang.SYMBOL_LESS_THAN, PRECEDENCE_RELATIONAL + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new LessThan(leftExpression, rightExpression); } }; public static final ExpressionOperator greaterThan = new ExpressionOperator(PRECEDENCE_RELATIONAL, Lang.SYMBOL_GREATER_THAN, PRECEDENCE_RELATIONAL + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new GreaterThan(leftExpression, rightExpression); } }; public static final ExpressionOperator lessThanOrEqual = new ExpressionOperator(PRECEDENCE_RELATIONAL, Lang.SYMBOL_LESS_THAN_EQUALS, PRECEDENCE_RELATIONAL + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new LessThanOrEqual(leftExpression, rightExpression); } }; public static final ExpressionOperator greaterThanOrEqual = new ExpressionOperator(PRECEDENCE_RELATIONAL, Lang.SYMBOL_GREATER_THAN_EQUALS, PRECEDENCE_RELATIONAL + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new GreaterThanOrEqual(leftExpression, rightExpression); } }; public static final ExpressionOperator equality = new ExpressionOperator(PRECEDENCE_EQUALITY, Lang.SYMBOL_EQUALS_EQUALS, PRECEDENCE_EQUALITY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Equality(leftExpression, rightExpression); } }; public static final ExpressionOperator inequality = new ExpressionOperator(PRECEDENCE_EQUALITY, Lang.SYMBOL_BANG_EQUALS, PRECEDENCE_EQUALITY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Inequality(leftExpression, rightExpression); } }; public static final ExpressionOperator negation = new ExpressionOperator(-1, Lang.SYMBOL_MINUS, PRECEDENCE_UNARY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Negation(rightExpression); } }; public static final ExpressionOperator booleanNot = new ExpressionOperator(-1, Lang.SYMBOL_BANG, PRECEDENCE_UNARY + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new BooleanNot(rightExpression); } }; public static final ExpressionOperator shortCircuitAnd = new ExpressionOperator(PRECEDENCE_LOGICAL_AND, Lang.SYMBOL_AMPERSAND_AMPERSAND, PRECEDENCE_LOGICAL_AND + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ShortCircuitAnd(leftExpression, rightExpression); } }; public static final ExpressionOperator shortCircuitOr = new ExpressionOperator(PRECEDENCE_LOGICAL_OR, Lang.SYMBOL_PIPE_PIPE, PRECEDENCE_LOGICAL_OR + 1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ShortCircuitOr(leftExpression, rightExpression); } }; public static final ExpressionOperator returnExpression = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_RETURN, -1, Expression.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ReturnExpression((Expression)innerElements.get(0)); } }; public static final ExpressionOperator returnVoid = new ExpressionOperator(-1, Lang.KEYWORD_RETURN, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ReturnVoid(); } }; public static final ExpressionOperator breakVoid = new ExpressionOperator(-1, Lang.KEYWORD_BREAK, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new BreakVoid(); } }; public static final ExpressionOperator continueVoid = new ExpressionOperator(-1, Lang.KEYWORD_CONTINUE, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ContinueVoid(); } }; public static final ExpressionOperator _throw = new ExpressionOperator(-1, Lang.KEYWORD_THROW, PRECEDENCE_LOWEST) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Throw(rightExpression); } }; public static final ExpressionOperator quantity = new ExpressionEnclosingOperator(-1, Lang.SYMBOL_OPEN_PARENS, -1, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Quantity((Expression)innerElements.get(0)); } }; public static final ExpressionOperator primitiveCast = new ExpressionEnclosingOperator(-1, Lang.SYMBOL_OPEN_PARENS, PRECEDENCE_UNARY + 1, PrimitiveType.TYPE, Lang.SYMBOL_CLOSE_PARENS, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new TypeCast((TypeId)innerElements.get(0), rightExpression); } }; public static final ExpressionOperator typeIdCast = new ExpressionEnclosingOperator(-1, Lang.SYMBOL_OPEN_PARENS, PRECEDENCE_UNARY + 1, TypeId.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new TypeCast((TypeId)innerElements.get(0), rightExpression); } }; /* ControlStructure */ public static final ExpressionOperator ifThen = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_IF, -1, Lang.SYMBOL_OPEN_PARENS, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS, Expression.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new IfThen((Expression)innerElements.get(0), (Expression)innerElements.get(1)); } }; public static final ExpressionOperator ifThenElse = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_IF, -1, Lang.SYMBOL_OPEN_PARENS, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS, Expression.TYPE, Lang.KEYWORD_ELSE, Expression.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new IfThenElse((Expression)innerElements.get(0), (Expression)innerElements.get(1), (Expression)innerElements.get(2)); } }; public static final ExpressionOperator questionColon = new ExpressionEnclosingOperator(PRECEDENCE_TERNARY, Lang.SYMBOL_QUESTION, PRECEDENCE_TERNARY, Expression.TYPE, Lang.SYMBOL_COLON) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new QuestionColon(leftExpression, (Expression)innerElements.get(0), rightExpression); } }; public static final ExpressionOperator forLoop = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_FOR, -1, Lang.SYMBOL_OPEN_PARENS, Expression.TYPE, Lang.SYMBOL_SEMICOLON, Expression.TYPE, Lang.SYMBOL_SEMICOLON, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS, Expression.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ForLoop((Expression)innerElements.get(0), (Expression)innerElements.get(1), (Expression)innerElements.get(2), (Expression)innerElements.get(3)); } }; public static final ExpressionOperator whileLoop = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_WHILE, -1, Lang.SYMBOL_OPEN_PARENS, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS, Expression.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new WhileLoop((Expression)innerElements.get(0), (Expression)innerElements.get(1)); } }; public static final ExpressionOperator doWhileLoop = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_DO, -1, Expression.TYPE, Lang.KEYWORD_WHILE, Lang.SYMBOL_OPEN_PARENS, Expression.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new DoWhileLoop((Expression)innerElements.get(0), (Expression)innerElements.get(1)); } }; public static final ExpressionOperator block = new ExpressionEnclosingOperator(-1, Lang.SYMBOL_OPEN_BRACE, -1, BlockContents.TYPE, Lang.SYMBOL_CLOSE_BRACE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new Block((BlockContents)innerElements.get(0)); } }; public static final ExpressionOperator constructorInvocation = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_NEW, -1, AmbiguousId.TYPE, Lang.SYMBOL_OPEN_PARENS, Arguments.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ConstructorInvocation((AmbiguousId)innerElements.get(0), (Arguments)innerElements.get(1)); } }; public static final ExpressionOperator constructorRedirectThis = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_THIS, -1, Lang.SYMBOL_OPEN_PARENS, Arguments.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ConstructorRedirect(Lang.KEYWORD_THIS, (Arguments)innerElements.get(0)); } }; public static final ExpressionOperator constructorRedirectSuper = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_SUPER, -1, Lang.SYMBOL_OPEN_PARENS, Arguments.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ConstructorRedirect(Lang.KEYWORD_SUPER, (Arguments)innerElements.get(0)); } }; public static final ExpressionOperator dereferenceField = new ExpressionEnclosingOperator(PRECEDENCE_DEREFERENCE, Lang.SYMBOL_PERIOD, -1, AmbiguousId.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousFieldExpression(leftExpression, (AmbiguousId)innerElements.get(0)); } }; private static class ExpressionAssignmentOperator extends ExpressionOperator { public ExpressionAssignmentOperator(String text) { super(PRECEDENCE_ASSIGNMENT, text, PRECEDENCE_ASSIGNMENT); } public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousAssignment(leftExpression, text, rightExpression); } } // = += -= *= /= %= &= ^= |= <<= >>= >>>= public static final ExpressionOperator assignmentEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_EQUALS), assignmentPlusEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_PLUS_EQUALS), assignmentMinusEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_MINUS_EQUALS), assignmentTimesEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_ASTERISK_EQUALS), assignmentDivideEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_SLASH_EQUALS), assignmentModEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_PERCENT_EQUALS), assignmentAndEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_AMPERSAND_EQUALS), assignmentXorEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_CARET_EQUALS), assignmentOrEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_PIPE_EQUALS), assignmentShiftLeftEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_LESS_THAN_LESS_THAN_EQUALS), assignmentShiftRightEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_GREATER_THAN_GREATER_THAN_EQUALS), assignmentShiftRightUnsignedEquals = new ExpressionAssignmentOperator(Lang.SYMBOL_GREATER_THAN_GREATER_THAN_GREATER_THAN_EQUALS); public static final ExpressionOperator instanceOf = new ExpressionEnclosingOperator(PRECEDENCE_DEREFERENCE, Lang.KEYWORD_INSTANCEOF, -1, TypeId.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new InstanceOf(leftExpression, (TypeId)innerElements.get(0)); } }; public static final ExpressionOperator methodInvocation = new ExpressionEnclosingOperator(PRECEDENCE_DEREFERENCE, Lang.SYMBOL_PERIOD, -1, AmbiguousId.TYPE, Lang.SYMBOL_OPEN_PARENS, Arguments.TYPE, Lang.SYMBOL_CLOSE_PARENS) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new AmbiguousMethodInvocation(leftExpression, (AmbiguousId)innerElements.get(0), (Arguments)innerElements.get(1)); } }; public static final ExpressionOperator arrayDereference = new ExpressionEnclosingOperator(PRECEDENCE_DEREFERENCE, Lang.SYMBOL_OPEN_BRACKET, -1, Expression.TYPE, Lang.SYMBOL_CLOSE_BRACKET) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new ArrayDereference(leftExpression, (Expression)innerElements.get(0)); } }; public static final ExpressionOperator tryCatch = new ExpressionEnclosingOperator(-1, Lang.KEYWORD_TRY, -1, TryPart.TYPE, CatchPart.TYPE) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return new TryCatch((TryPart)innerElements.get(0), (CatchPart)innerElements.get(1)); } }; public static final ExpressionOperator _null = new ExpressionOperator(-1, Lang.KEYWORD_NULL, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return NullExpression.INSTANCE; } }; public static final ExpressionOperator _this = new ExpressionOperator(-1, Lang.KEYWORD_THIS, -1) { public ParseElement makeExpressionContent(Expression leftExpression, ArrayList<ParseElement> innerElements, Expression rightExpression) { return ThisExpression.INSTANCE; } }; public static final HashMap<String, List<ExpressionOperator>> OPEN_LEFT = new HashMap<String, List<ExpressionOperator>>(); public static final HashMap<String, List<ExpressionOperator>> CLOSED_LEFT = new HashMap<String, List<ExpressionOperator>>(); private static final Comparator<ExpressionOperator> ambiguityOrderingComparitor = new Comparator<ExpressionOperator>() { public int compare(ExpressionOperator o1, ExpressionOperator o2) { int len1 = (o1 instanceof ExpressionEnclosingOperator) ? ((ExpressionEnclosingOperator)o1).elements.length : 0; int len2 = (o2 instanceof ExpressionEnclosingOperator) ? ((ExpressionEnclosingOperator)o2).elements.length : 0; return len2 - len1; } }; private static final int PUBLIC_STATIC_FINAL = java.lang.reflect.Modifier.PUBLIC | java.lang.reflect.Modifier.STATIC | java.lang.reflect.Modifier.FINAL; static { Field[] fields = ExpressionOperator.class.getFields(); for (Field field : fields) { if (field.getModifiers() == PUBLIC_STATIC_FINAL && field.getType() == ExpressionOperator.class) { ExpressionOperator op; try { op = (ExpressionOperator)field.get(null); } catch (IllegalAccessException e) { throw new RuntimeException(e); } if (op == typeIdCast) continue; // don't count this one HashMap<String, List<ExpressionOperator>> operators = op.leftPrecedence == -1 ? CLOSED_LEFT : OPEN_LEFT; putOperator(operators, op); } } } private static void putOperator(HashMap<String, List<ExpressionOperator>> operators, ExpressionOperator op) { List<ExpressionOperator> list = operators.get(op.text); if (list == null) { list = new LinkedList<ExpressionOperator>(); list.clear(); operators.put(op.text, list); } list.add(op); if (1 < list.size()) Collections.sort(list, ambiguityOrderingComparitor); } }